branch_name stringclasses 149 values | text stringlengths 23 89.3M | directory_id stringlengths 40 40 | languages listlengths 1 19 | num_files int64 1 11.8k | repo_language stringclasses 38 values | repo_name stringlengths 6 114 | revision_id stringlengths 40 40 | snapshot_id stringlengths 40 40 |
|---|---|---|---|---|---|---|---|---|
refs/heads/master | <file_sep>// Copyright © 2017-2018 <NAME>. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package cmdutil implements command line utilities.
package cmdutil
import (
"fmt"
"os"
"github.com/spf13/cobra"
)
// WrapCommand - Wraps cobra command function with error handler.
func WrapCommand(fn func(cmd *cobra.Command, args []string) error) func(cmd *cobra.Command, args []string) {
return func(cmd *cobra.Command, args []string) {
if err := fn(cmd, args); err != nil {
fmt.Printf("error: %v\n\n", err)
os.Exit(1)
}
}
}
| 5750c525e68a0f0129ad66563614a70783e2bd26 | [
"Go"
] | 1 | Go | ipfn/go-ipfn-cmd-util | aae6bad167de7743a2eb5e194865e70790d88af8 | f538ae1e84a25825287cec43de0fe6502c308278 |
refs/heads/master | <file_sep>class SessionsController < ApplicationController
def new
#don't need to assibn anything not saving anything to the DB
end
def create
#log in functionality goes here
email = params[:session][:email]
password = params[:session][:password]
@user =User.find_by(email: email)
if @user.present? and @user.authenticate(password)
# wipe the session clean
reset_session
# log in
session[:user_id] = @user.id
flash[:success] = "Logged in"
redirect_to root_path
else
flash[:error] = "Try again"
render :new
end
end
def show
require_user
end
def destroy
reset_session
flash[:success] = "Logged out. see you again soon"
redirect_to root_path
end
end
<file_sep>class Order < ActiveRecord::Base
belongs_to :user
belongs_to :watch
validates :stripe_token, presence:true
def save_and_charge
Stripe.api_key = Rails.application.secrets.stripe_secret_key
Stripe::Charge.create(
:amount => amount,
:currency => "gbp",
:source => stripe_token, # obtained with Stripe.js
:description => "#{user.email} charged for #{watch.name}"
)
self.save
rescue Stripe::CardError => error
body = error.json_body
err = body[:error]
if err[:message] == "Your hard has insuffient funds" or "err[:message" == "Your card was declined"
errors.add :base, err[:message]
else
errors.add :base, "There was a problem charging yoru card. Try contacting us at <EMAIL> and we'll help you out"
end
false
end
def amount
watch.price_in_pence
end
end
<file_sep># This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rake db:seed (or created alongside the db with db:setup).
#
# Examples:
#
# cities = City.create([{ name: 'Chicago' }, { name: 'Copenhagen' }])
# Mayor.create(name: 'Emanuel', city: cities.first)
Watch.create([
{name: "Backup Black", description: "lorem ipsum", price_in_pence: 5000, style: "black", gender: "unisex", brand: "Swatch", pre_owned: false, user: User.first },
{name: "Classic Digital Silver/Yellow", description: "lorem ipsum", price_in_pence: 4999, style: "silver with yellow Face", gender: "unisex", brand: "Casio", pre_owned: false, user: User.first },
{name: "<NAME>", description: "lorem ipsum", price_in_pence: 12000, style: "leather strap", brand: "Raketa", gender: "Unisex", pre_owned: true, user: User.first },
{name: "<NAME>", description: "lorem ipsum", price_in_pence: 200000, style: "white face", gender: "unisex", brand: "Jungens", pre_owned: false, user: User.first }
])<file_sep>class User < ActiveRecord::Base
has_secure_password
has_many :watches
has_many :orders
validates :email, presence:true
def owns(resource)
self == resource.user
end
def full_name
if first_name.present? and last_name.present?
[first_name, last_name].join(" ")
else
name
end
end
def name
first_name || username || email
if first_name.present?
first_name
elsif username.present?
username
else
email
end
end
end
<file_sep>class UsersController < ApplicationController
def new
@user = User.new
end
def create
@user = User.new(user_params)
if @user.save
# wipe lesson clean
reset_session
# Log in user
session[:user_id] = @user.id
flash[:success] = "Thanks for signing up"
redirect_to root_path
else
flash[:error] = "Please check the form for errors and try again"
render :new
end
end
def edit
end
private
def user_params
params.require(:user).permit(:first_name, :last_name, :password, :password_confirmation, :username, :email, :address_line_1, :address_line_2, :address_line_3, :address_line_4, :postcode)
end
end
<file_sep>class WatchesController < ApplicationController
before_action :require_user, only: [:new, :create]
def index
@watches = Watch.all
def show
@watch = Watch.find(params[:id])
end
def new
@watch = Watch.new
end
def create
@watch = Watch.new(watch_params)
@watch.user_id = current_user.id
if @watch.save
flash[:success]= "Thanks for uploading"
redirect_to @watch
else
flash[:error] = "Please check form for error and try again"
render :new
end
end
def edit
@watch = Watch.find(params[:id])
require_owner(@watch)
end
def update
@watch = Watch.find(params[:id])
require_owner(@watch)
if @watch.update(watch_params)
flash[:success] = "Updated #{@watch.name}"
redirect_to @watch
else
flash[:error] = "Please check for errors and trye again"
render :edit
end
end
def destroy
require_owner(@watch)
@watch = Watch.find(params[:id])
flash[:success] = "#{@watch.name} was removed successfully" if @watch.destroy
redirect_to root_path
end
end
end
private
def watch_params
params.require(:watch).permit(:name, :description, :price_in_pence, :gender, :style, :brand, :pre_owned, :image)
end<file_sep>class OrdersController < ApplicationController
before_action :require_user, except: [:show]
def new
@order = Order.new
end
def create
# find the watch
@watch = Watch.find(params[:watch_id])
# create a new order with watch_id = @watch.id
@order = @watch.orders.new(order_params)
# set user_id - current_user.id
@order.user = current_user
#charging code goes here
if @order.save_and_charge
flash[:success] = "Order made"
redirect_to @order
else
flash[:error] = 'Please check the form for errors and try again'
render :new
end
end
def show
@order = Order.find(params[:id])
require_owner(@order) # make sure order.user_id == current_user.id
end
private
def order_params
params.require(:order).permit(:stripe_token)
end
end
<file_sep>Rails.application.routes.draw do
root "watches#index"
resources :watches do
# GET http://localhost:3000/watches/:watch_id/orders/new
# POST <form actions= "/watches/:watch_id/orders"></form>
resources :orders, only: [:new, :create]
end
# # GET http://localhost:3000/orders/id
resources :orders, only: :show
resources :users, except: [:index, :show]
resource :session
end
<file_sep>class CreateUsers < ActiveRecord::Migration
def change
create_table :users do |t|
t.string :first_name
t.string :last_name
t.string :email
t.string :username
t.string :password_digest
t.string :address_line_1
t.string :address_line_2
t.string :address_line_3
t.string :address_line_4
t.string :postcode
t.timestamps null: false
end
end
end
<file_sep>class Watch < ActiveRecord::Base
belongs_to :user
has_many :orders
has_attached_file :image, :styles => { :medium => "300x300#>", :thumb => "100x100#>" }, :default_url => "/images/:style/missing.svg"
validates_attachment_content_type :image, :content_type => /\Aimage\/.*\Z/
validates :user, presence: true
validates :name, presence: true, uniqueness:true
validates :description, presence: true
validates :brand, presence: true
validates :price_in_pence, presence: true, numericality: {greater_than_or_equal_to: 100}
def price
price_in_pence.to_f / 100
end
end
| 017d8fc84560997eb29a8c239bea74ca974c8787 | [
"Ruby"
] | 10 | Ruby | alexdaish/Steer-Timekeep | c17bb778ff31042ac166929b2909ecaf61cb9e0d | 18ee9360cf8611fb57966263fb478aff8bbd6816 |
refs/heads/master | <repo_name>dgrat/voxelizer<file_sep>/rules.h
#pragma once
#include "glm_ext/glm_extensions.h"
#include <tuple>
#include <vector>
#include <array>
#include "stl/stl_import.h"
#include "mesh/polyhedron.h"
#include "checks.h"
#include "xml_config.h"
struct voxel_t {
glm::vec3 _position;
bool _is_in_mesh = false;
friend bool operator< (const voxel_t& lhs, const voxel_t& rhs) {
return std::make_tuple(lhs._position.x, lhs._position.y, lhs._position.z) < std::make_tuple(rhs._position.x, rhs._position.y, rhs._position.z);
}
};
struct build_stl_cube {
static std::vector<stl::face> mesh(const glm::vec3 &pos, const glm::vec3 cube_size) {
auto get_corners = [=](const glm::vec3 &size) {
std::array<glm::vec3, 8> arr = {
glm::vec3(pos.x - size.x/2, pos.y - size.y/2, pos.z - size.z/2),
glm::vec3(pos.x + size.x/2, pos.y - size.y/2, pos.z - size.z/2),
glm::vec3(pos.x + size.x/2, pos.y + size.y/2, pos.z - size.z/2),
glm::vec3(pos.x - size.x/2, pos.y + size.y/2, pos.z - size.z/2),
glm::vec3(pos.x - size.x/2, pos.y - size.y/2, pos.z + size.z/2),
glm::vec3(pos.x + size.x/2, pos.y - size.y/2, pos.z + size.z/2),
glm::vec3(pos.x + size.x/2, pos.y + size.y/2, pos.z + size.z/2),
glm::vec3(pos.x - size.x/2, pos.y + size.y/2, pos.z + size.z/2)
};
return arr;
};
auto c = get_corners(cube_size);
return {
{ glm::vec3(0), c[1], c[0], c[3] },
{ glm::vec3(0), c[3], c[2], c[1] },
{ glm::vec3(0), c[5], c[1], c[2] },
{ glm::vec3(0), c[2], c[6], c[5] },
{ glm::vec3(0), c[5], c[6], c[7] },
{ glm::vec3(0), c[7], c[4], c[5] },
{ glm::vec3(0), c[3], c[0], c[4] },
{ glm::vec3(0), c[4], c[7], c[3] },
{ glm::vec3(0), c[6], c[2], c[3] },
{ glm::vec3(0), c[3], c[7], c[6] },
{ glm::vec3(0), c[0], c[1], c[5] },
{ glm::vec3(0), c[5], c[4], c[0] }
};
}
};
<file_sep>/xml_config.h
#pragma once
#include "glm_ext/glm_extensions.h"
#include <pugixml.hpp>
#include <string>
#include <iostream>
#include <filesystem>
#include <vector>
#include <regex>
#include <set>
namespace cfg {
//! shape settings
struct shape_settings {
std::string _file_in;
std::string _file_out;
int _merge_priority;
int _material_inside;
int _material_shell;
};
//! project settings
class xml_project {
std::string _project_file = "";
std::vector<shape_settings> _shapes;
int _grid_size; // maximum number of voxels (either along: w, h, d)
float _voxel_size; // alternatively use voxel size
std::string _target_dir = "";
std::string _raw_fname = "";
// internal state members
bool _voxel_size_defined = false;
bool _grid_size_defined = false;
static bool endsWithIgnoreCase(const std::string& str, const std::string& suffix) {
return std::regex_search(str, std::regex(std::string(suffix) + "$", std::regex_constants::icase));
}
pugi::xml_parse_result read_project() {
pugi::xml_document doc;
pugi::xml_parse_result result = doc.load_file(_project_file.c_str());
if (result) {
for (pugi::xml_node tool : doc.child("project").children("stl")) {
const std::string file_in = tool.attribute("file_in").as_string();
const std::string file_out = tool.attribute("file_out").as_string();
const int prio = tool.attribute("merge_priority").as_int();
const int mat_in = tool.attribute("material_inside").as_int();
const int mat_out = tool.attribute("material_outside").as_int();
_shapes.push_back({file_in, file_out, prio, mat_in, mat_out});
}
pugi::xml_node g = doc.child("project").child("grid");
if(!g.empty()) {
_grid_size_defined = true;
_grid_size = g.attribute("max_voxels").as_int();
_voxel_size = g.attribute("stl_cube_size").as_float();
}
pugi::xml_node r = doc.child("project").child("voxel_size");
if(!r.empty()) {
_voxel_size_defined = true;
_voxel_size = r.attribute("cube_size").as_float();
}
pugi::xml_node t = doc.child("project").child("target");
if(!t.empty()) {
_target_dir = t.attribute("dir_out").as_string();
_raw_fname = t.attribute("raw_fname").as_string();
}
}
return result;
}
public:
const bool voxel_size_defined() const {
return _voxel_size_defined;
}
const bool grid_size_defined() const {
return _grid_size_defined;
}
const float voxel_size() const {
return _voxel_size;
}
const int max_grid_size() const {
return _grid_size;
}
const std::vector<shape_settings> &shapes() const {
return _shapes;
}
const std::string target_dir() const {
return _target_dir;
}
const std::string raw_fname() const {
return _raw_fname;
}
const std::string project_file() const {
return _project_file;
}
const std::string project_path() const {
return std::filesystem::path(_project_file).parent_path().string();
}
void init(const std::string &project_dir) {
if(!std::filesystem::exists(project_dir)) {
std::cerr << "xml_project::init() - invalid project config: " << project_dir << std::endl;
return;
}
std::vector<std::string> xml_files;
for (const auto& entry : std::filesystem::directory_iterator(project_dir)) {
if(endsWithIgnoreCase(entry.path().string(), ".xml")) {
std::cout << "add file: " << entry.path().string() << std::endl;
xml_files.push_back(entry.path().string());
}
}
if(xml_files.size() > 0) {
_project_file = xml_files.at(0);
read_project();
}
}
xml_project() = default;
xml_project(const std::string &project_dir) {
init(project_dir);
if(!std::filesystem::exists(_target_dir)) {
std::cout << _target_dir << " does not exist. Create new directory" << std::endl;
std::filesystem::create_directory(_target_dir);
}
}
};
};
<file_sep>/stl/stl_import.h
#pragma once
#include "../glm_ext/glm_extensions.h"
#include <limits>
#include <vector>
#include <fstream>
#include <cstdio>
#include <iostream>
#include <cassert>
#include <set>
#include <map>
namespace mesh {
template<class T>
struct polyhedron;
}
namespace stl {
#pragma pack(push, 1)
struct face {
glm::vec3 _norm;
glm::vec3 _vert_1;
glm::vec3 _vert_2;
glm::vec3 _vert_3;
uint16_t _attribute = 0;
face() = default;
face(const glm::vec3 &n, const glm::vec3 &p1, const glm::vec3 &p2, const glm::vec3 &p3) {
if(glm::length(n) < std::numeric_limits<float>::epsilon()) {
const glm::vec3 u = p2 - p1;
const glm::vec3 v = p3 - p1;
_norm = glm::cross(u, v);
}
else {
_norm = n;
}
_vert_1 = p1;
_vert_2 = p2;
_vert_3 = p3;
}
};
#pragma pack(pop)
template<typename base_t>
struct bbox {
glm::vec<3, base_t> _min = glm::vec3(FLT_MAX);
glm::vec<3, base_t> _max = glm::vec3(-FLT_MAX);
base_t scale() const;
glm::vec<3, base_t> offset() const;
//! For iterating over vertex lists
//! pass a vertex and calculate the bounding box on the fly
void extend(const glm::vec<3, base_t> &v) {
_min = glm::min(_min, v);
_max = glm::max(_max, v);
}
};
class format {
// small check to guarantee sanity
static_assert(sizeof(face) == sizeof(glm::vec3) * sizeof(float) + sizeof(uint16_t), "size mismatch: face not compatible with stl format");
uint8_t _header[80] = { 0 };
std::vector<face> _faces;
public:
format() = default;
format(const std::string &file);
static mesh::polyhedron<float> to_polyhedron(const std::vector<face> &faces);
static void save(const std::vector<face> &, const std::string &filename);
static std::ofstream open(const std::string &file);
static void close(std::ofstream &f);
template<typename T>
static void append(std::ofstream &f, const T & data) {
f.write((char*)(&data), sizeof(T));
}
bool load(const std::string &filename);
const std::vector<face> &faces() const;
//! calculates a bounding box
static bbox<float> estimate_bbox(const std::vector<face> &);
//! this functions centers the mesh automatically around around {0,0,0}
static std::vector<face> remove_offset(const bbox<float> &, const std::vector<face> &);
//! this functions centers the mesh automatically around around {0,0,0}
//! and scales it down to 0 <= {x,y,z} <= 1
static std::vector<face> normalized(const bbox<float> &, const std::vector<face> &, const glm::vec3 &transl = glm::vec3(0));
// operator overload to access face
face& operator[](std::size_t idx) { return _faces[idx]; }
const face& operator[](std::size_t idx) const { return _faces[idx]; }
};
};
<file_sep>/CMakeLists.txt
cmake_minimum_required(VERSION 3.5)
project(VoxelMagick LANGUAGES CXX)
find_package( OpenCV REQUIRED )
find_package(OpenMP)
set(CMAKE_CXX_STANDARD 20)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
#set ( CMAKE_CXX_FLAGS "-msse -mavx")
find_program(CCACHE_PROGRAM ccache)
if(CCACHE_PROGRAM)
set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE "${CCACHE_PROGRAM}")
endif()
include_directories(
glm
pugixml/src
)
set(voxel_lib
glm_ext/glm_extensions.h
mesh/polyhedron.h
mesh/polyhedron.tpp
stl/stl_import.h
stl/stl_import.cpp
tree/tree.h
voxelizer.h
enums.h
rasterizer.h
buffer.h
timer.h
rules.h
checks.h
checks.cpp
xml_config.h
vox_file.h
)
find_package(Git QUIET)
if(GIT_FOUND AND EXISTS "${PROJECT_SOURCE_DIR}/.git")
# Update submodules as needed
option(GIT_SUBMODULE "Check submodules during build" ON)
if(GIT_SUBMODULE)
message(STATUS "Submodule update")
execute_process(COMMAND ${GIT_EXECUTABLE} submodule update --init --recursive
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
RESULT_VARIABLE GIT_SUBMOD_RESULT)
if(NOT GIT_SUBMOD_RESULT EQUAL "0")
message(FATAL_ERROR "git submodule update --init failed with ${GIT_SUBMOD_RESULT}, please checkout submodules")
endif()
endif()
endif()
add_subdirectory(pugixml)
link_directories(pugixml)
add_executable(VoxelMagick main.cpp ${voxel_lib})
if(OpenMP_CXX_FOUND)
target_link_libraries(VoxelMagick pugixml OpenMP::OpenMP_CXX ${OpenCV_LIBS})
else()
target_link_libraries(VoxelMagick pugixml ${OpenCV_LIBS})
endif()
<file_sep>/checks.h
#pragma once
#include "mesh/polyhedron.h"
#include <glm/glm.hpp>
#include <iostream>
#include "glm_ext/glm_extensions.h"
#include "xml_config.h"
#include "buffer.h"
#include "enums.h"
namespace hidden {
constexpr float precision_bias = 100*FLT_EPSILON;
template<typename base_t> constexpr glm::vec<3, base_t>
swizzle_vector(
const glm::vec<3, base_t> &v,
const swizzle_mode mode
)
{
switch(mode) {
case xzy:
return v.xzy();
case yxz:
return v.yxz();
case yzx:
return v.yzx();
case zxy:
return v.zxy();
case zyx:
return v.zyx();
default:
return v;
};
}
struct plane_t {
glm::vec3 normal;
float d;
};
};
namespace checks {
namespace raycast {
inline bool point_collision(const glm::vec3 &ray_start, const glm::vec3 ray_direction, const float distance,
const glm::vec3 &v1, const glm::vec3 &v2, const glm::vec3 &v3) {
const glm::vec3 ray_trg = ray_start + (ray_direction * distance);
for(const auto &v : {v1,v2,v3}) {
if(glm::length(ray_trg - v) < hidden::precision_bias) {
return true;
}
}
return false;
}
// check whether and where a ray crosses an edge from a face
template<typename base_t>
bool edge_collision(const glm::vec3 &pos, const glm::vec3 ray_direction, const mesh::polyhedron<base_t> &poly) {
// 1) Calculate angle between the ray and an arbitrary vector used for a projection of the later points
const glm::vec3 ref = glm::vec3(0,0,1);
const glm::vec3 axis = glm::cross(ref, ray_direction);
const float angle = glm::orientedAngle(ray_direction, ref, axis);
const glm::mat3 rotation = std::abs(angle) > hidden::precision_bias ? (glm::mat3)glm::rotate(angle, axis) : glm::mat3(1);
// 2) go over all edges
for(const auto &e : poly._edges) {
const glm::vec3 &e1 = poly._vertices._vertex_arr.at(e.first._id1);
const glm::vec3 &e2 = poly._vertices._vertex_arr.at(e.first._id2);
// 3) rotate all points
const glm::vec2 rot_e1 = rotation * e1;
const glm::vec2 rot_e2 = rotation * e2;
const glm::vec2 p = rotation * pos;
// 5) check whether the ray is close to the edge
const glm::vec2 pt_on_line = glm::closestPointOnLine(p, rot_e1, rot_e2);
if(glm::length(pt_on_line - p) < hidden::precision_bias) {
return true;
}
}
return false;
}
template<typename base_t>
std::set<float> ray_intersections_safe(const glm::vec3 &pos, const glm::vec3 &dir, const mesh::polyhedron<base_t> &poly) {
const auto &vertex_arr = poly._vertices._vertex_arr;
const auto &index_buf = poly._indices._buffer;
const size_t faces = poly._indices._buffer.size() / poly._indices._stride;
std::set<float> inters_dist;
for(size_t face_id = 0; face_id < faces; face_id++) {
const glm::ivec3 id = glm::ivec3(face_id) * poly._indices._stride + glm::ivec3(0,1,2);
const size_t vid1 = index_buf.at(id.x);
const size_t vid2 = index_buf.at(id.y);
const size_t vid3 = index_buf.at(id.z);
glm::vec2 bary_pos(0);
float distance = 0;
bool is_inters = glm::intersectRayTriangle(
pos,
dir,
vertex_arr.at(vid1),
vertex_arr.at(vid2),
vertex_arr.at(vid3),
bary_pos,
distance
);
//! BUG in glm 9.9
// the intersectRayTriangle behaves like a line intersection,
// always returning two opposing intersecting faces
const bool positive = distance > 0 ? true : false;
if(is_inters && positive) {
inters_dist.insert(distance);
}
}
return inters_dist;
}
template<typename base_t>
bool pt_in_triangle (
const glm::vec<2, base_t> &in_pt,
const glm::vec<3, base_t> &in_v1,
const glm::vec<3, base_t> &in_v2,
const glm::vec<3, base_t> &in_v3,
int &out_distance )
{
glm::vec<3, base_t> dist_1 = glm::vec<3, base_t>(in_pt.x, in_pt.y, 0) - in_v1;
glm::vec<3, base_t> dist_2 = glm::vec<3, base_t>(in_pt.x, in_pt.y, 0) - in_v2;
glm::vec<3, base_t> dist_3 = glm::vec<3, base_t>(in_pt.x, in_pt.y, 0) - in_v3;
glm::vec<3, float> e1 = in_v1 - in_v2;
glm::vec<3, float> e2 = in_v2 - in_v3;
glm::vec<3, float> e3 = in_v3 - in_v1;
const base_t d1 = dist_2.x * e1.y - e1.x * dist_2.y;
const base_t d2 = dist_3.x * e2.y - e2.x * dist_3.y;
const base_t d3 = dist_1.x * e3.y - e3.x * dist_1.y;
const bool has_neg = (d1 < 0) || (d2 < 0) || (d3 < 0);
const bool has_pos = (d1 > 0) || (d2 > 0) || (d3 > 0);
const bool is_in = !(has_neg && has_pos);
if(is_in) {
const glm::vec3 n = glm::cross(glm::normalize(e1), glm::normalize(e2));
const float k = glm::compAdd(n*(glm::vec3)in_v1);
out_distance = (k - n.x*in_pt.x-n.y*in_pt.y) / n.z;
}
return is_in;
}
template<swizzle_mode mode, typename base_t, typename ind_buf_t>
std::set<int> get_intersections(const glm::vec<2, base_t> &pos,
const std::vector<glm::vec<3, base_t>> &vert_buffer,
const ind_buf_t &face_indices)
{
std::set<int> inters_dist;
for(auto &f : face_indices) {
const glm::vec<3, base_t> &v1 = hidden::swizzle_vector(vert_buffer[f[0]], mode);
const glm::vec<3, base_t> &v2 = hidden::swizzle_vector(vert_buffer[f[1]], mode);
const glm::vec<3, base_t> &v3 = hidden::swizzle_vector(vert_buffer[f[2]], mode);
int d = 0;
if(pt_in_triangle(pos, v1, v2, v3, d)) {
inters_dist.insert(d);
continue;
}
}
return inters_dist;
}
};
namespace intersection_3d {
inline bool axis_test_x02(const glm::vec3 &e, const glm::vec3 &voxel_center, const std::array<glm::vec3, 3> &face, const glm::vec3 &half_box_size) {
auto v0 = face[0] - voxel_center;
auto v2 = face[2] - voxel_center;
float fa = std::abs(e.z);
float fb = std::abs(e.y);
float p1 = e.z * v0.y - e.y * v0.z;
float p3 = e.z * v2.y - e.y * v2.z;
float min = p1 > p3 ? p3 : p1;
float max = p1 > p3 ? p1 : p3;
float rad = fa * half_box_size.y + fb * half_box_size.z;
if (min > rad || max < -rad) {
return true;
}
return false;
}
inline bool axis_test_x01(const glm::vec3 &e, const glm::vec3 &voxel_center, const std::array<glm::vec3, 3> &face, const glm::vec3 &half_box_size) {
auto v0 = face[0] - voxel_center;
auto v1 = face[1] - voxel_center;
float fa = std::abs(e.z);
float fb = std::abs(e.y);
float p1 = e.z * v0.y - e.y * v0.z;
float p2 = e.z * v1.y - e.y * v1.z;
float min = p1 > p2 ? p2 : p1;
float max = p1 > p2 ? p1 : p2;
float rad = (fa + fb) * half_box_size.z;
if (min > rad || max < -rad) {
return true;
}
return false;
}
inline bool axis_test_y02(const glm::vec3 &e, const glm::vec3 &voxel_center, const std::array<glm::vec3, 3> &face, const glm::vec3 &half_box_size) {
auto v0 = face[0] - voxel_center;
auto v2 = face[2] - voxel_center;
float fa = std::abs(e.z);
float fb = std::abs(e.x);
float p1 = -e.z * v0.x + e.x * v0.z;
float p2 = -e.z * v2.x + e.x * v2.z;
float min = p1 > p2 ? p2 : p1;
float max = p1 > p2 ? p1 : p2;
float rad = (fa + fb) * half_box_size.z;
if (min > rad || max < -rad) {
return true;
}
return false;
}
inline bool axis_test_y01(const glm::vec3 &e, const glm::vec3 &voxel_center, const std::array<glm::vec3, 3> &face, const glm::vec3 &half_box_size) {
auto v0 = face[0] - voxel_center;
auto v1 = face[1] - voxel_center;
float fa = std::abs(e.z);
float fb = std::abs(e.x);
float p1 = -e.z * v0.x + e.x * v0.z;
float p2 = -e.z * v1.x + e.x * v1.z;
float min = p1 > p2 ? p2 : p1;
float max = p1 > p2 ? p1 : p2;
float rad = (fa + fb) * half_box_size.z;
if (min > rad || max < -rad) {
return true;
}
return false;
}
inline bool axis_test_z12(const glm::vec3 &e, const glm::vec3 &voxel_center, const std::array<glm::vec3, 3> &face, const glm::vec3 &half_box_size) {
auto v1 = face[1] - voxel_center;
auto v2 = face[2] - voxel_center;
float fa = std::abs(e.y);
float fb = std::abs(e.x);
float p1 = e.y * v1.x - e.x * v1.y;
float p2 = e.y * v2.x - e.x * v2.y;
float min = p1 > p2 ? p2 : p1;
float max = p1 > p2 ? p1 : p2;
float rad = (fa + fb) * half_box_size.z;
if (min > rad || max < -rad) {
return true;
}
return false;
}
inline bool axis_test_z01(const glm::vec3 &e, const glm::vec3 &voxel_center, const std::array<glm::vec3, 3> &face, const glm::vec3 &half_box_size) {
auto v1 = face[0] - voxel_center;
auto v2 = face[1] - voxel_center;
float fa = std::abs(e.y);
float fb = std::abs(e.x);
float p1 = e.y * v1.x - e.x * v1.y;
float p2 = e.y * v2.x - e.x * v2.y;
float min = p1 > p2 ? p2 : p1;
float max = p1 > p2 ? p1 : p2;
float rad = (fa + fb) * half_box_size.z;
if (min > rad || max < -rad) {
return true;
}
return false;
}
inline bool allGreaterThan(const std::array<float, 3> &v, const float c) {
for(auto &p : v) {
if(p < c) return false;
}
return true;
}
inline bool allSmallerThan(const std::array<float, 3> &v, const float c) {
for(auto &p : v) {
if(p > c) return false;
}
return true;
}
inline bool plane_box_overlap(const hidden::plane_t &plane, const glm::vec3 &voxel_center, const glm::vec3 &halfboxsize) {
glm::vec3 vmax = voxel_center;
glm::vec3 vmin = voxel_center;
for (int dim = 0; dim < 3; dim++) {
if (plane.normal[dim] > 0) {
vmin[dim] += -halfboxsize[dim];
vmax[dim] += halfboxsize[dim];
}
else {
vmin[dim] += halfboxsize[dim];
vmax[dim] += -halfboxsize[dim];
}
}
if (glm::dot(plane.normal, vmin) + plane.d > 0) {
return false;
}
if (glm::dot(plane.normal, vmax) + plane.d >= 0) {
return true;
}
return false;
}
//! return true if outside
//! return false if maybe inside
inline bool face_in_hexahedron(const std::array<glm::vec3, 3> &face, const glm::vec3 voxel_center, const glm::vec3 &half_box_size) {
const glm::vec3 e1 = face[2] - face[1];
const glm::vec3 e2 = face[0] - face[2];
const glm::vec3 norm = glm::cross(e1, e2);
if(!plane_box_overlap({norm, -glm::dot(norm, face[0])}, voxel_center, half_box_size)) {
return false;
}
const glm::vec3 e0 = face[1] - face[0];
if(axis_test_x02(e0, voxel_center, face, half_box_size)) return false;
if(axis_test_x02(e1, voxel_center, face, half_box_size)) return false;
if(axis_test_x01(e2, voxel_center, face, half_box_size)) return false;
if(axis_test_y02(e0, voxel_center, face, half_box_size)) return false;
if(axis_test_y02(e1, voxel_center, face, half_box_size)) return false;
if(axis_test_y01(e2, voxel_center, face, half_box_size)) return false;
if(axis_test_z12(e0, voxel_center, face, half_box_size)) return false;
if(axis_test_z01(e1, voxel_center, face, half_box_size)) return false;
if(axis_test_z12(e2, voxel_center, face, half_box_size)) return false;
const auto max = voxel_center + half_box_size;
if(allGreaterThan({face[0].x, face[1].x, face[2].x}, max.x)) return false;
if(allGreaterThan({face[0].y, face[1].y, face[2].y}, max.y)) return false;
if(allGreaterThan({face[0].z, face[1].z, face[2].z}, max.z)) return false;
const auto min = voxel_center - half_box_size;
if(allSmallerThan({face[0].x, face[1].x, face[2].x}, min.x)) return false;
if(allSmallerThan({face[0].y, face[1].y, face[2].y}, min.y)) return false;
if(allSmallerThan({face[0].z, face[1].z, face[2].z}, min.z)) return false;
return true;
}
// for voxel shell calculation we invert the test
// we do not check wheter a voxel is in the mesh, but whether a face is in the bbox of the voxel
template<typename base_t>
bool is_shell(const glm::vec3 &pos, const mesh::polyhedron<base_t> &poly, const glm::vec3 &voxel_size) {
const auto &vertex_arr = poly._vertices._vertex_arr;
const auto &index_buf = poly._indices._buffer;
const size_t faces = poly._indices._buffer.size() / poly._indices._stride;
const glm::vec3 half_box_size = voxel_size / 2.f;
for(size_t face_id = 0; face_id < faces; face_id++) {
// walk over faces
const glm::ivec3 id = glm::ivec3(face_id) * poly._indices._stride + glm::ivec3(0,1,2);
const size_t vid1 = index_buf.at(id.x);
const size_t vid2 = index_buf.at(id.y);
const size_t vid3 = index_buf.at(id.z);
// face vertices
const std::array<glm::vec3, 3> face = {
vertex_arr.at(vid1),
vertex_arr.at(vid2),
vertex_arr.at(vid3)
};
if(face_in_hexahedron(face, pos, half_box_size)) return true;
}
return false;
}
};
namespace intersection_2d {
inline bool edge_cutting_rectangle(const std::array<glm::vec2, 3> &triangle, const glm::vec2 &pos, const glm::vec2 &box_hlf) {
const std::array<glm::vec2, 3> edges = {
triangle[2] - triangle[0],
triangle[2] - triangle[1],
triangle[1] - triangle[0]
};
for(const auto &e : edges) {
// line equation
const float m = e.y / e.x;
const float n = e.y - m * e.x;
// solved line equations for rectangle intersections
const float px = (box_hlf.y - n) / m;
const float nx = (-box_hlf.y - n) / m;
const float py = box_hlf.x * m + n;
const float ny = -box_hlf.x * m + n;
int c = 0;
if(std::abs(px) < box_hlf.x)
c++;
if(std::abs(nx) < box_hlf.x)
c++;
if(std::abs(py) < box_hlf.y)
c++;
if(std::abs(ny) < box_hlf.y)
c++;
if(c < 2) return false;
}
return true;
}
inline bool face_in_hexahedron(const std::array<glm::vec3, 3> &face, const glm::vec3 pos, const glm::vec3 &box_hlf) {
const std::array<glm::vec3, 3> f = {
face[0]-pos,
face[1]-pos,
face[2]-pos
};
const glm::vec2 b_x = { box_hlf.y, box_hlf.z };
const std::array<glm::vec2, 3> f_x = {
f[0].yz(),
f[1].yz(),
f[2].yz()
};
if(!edge_cutting_rectangle(f_x, { 0, 0 }, b_x)) return false;
const glm::vec2 b_y = { box_hlf.x, box_hlf.z };
const std::array<glm::vec2, 3> f_y = {
f[0].xz(),
f[1].xz(),
f[2].xz()
};
if(!edge_cutting_rectangle(f_y, { 0, 0 }, b_y)) return false;
const glm::vec2 b_z = { box_hlf.x, box_hlf.y };
const std::array<glm::vec2, 3> f_z = {
f[0].xy(),
f[1].xy(),
f[2].xy()
};
if(!edge_cutting_rectangle(f_z, { 0, 0 }, b_z)) return false;
return true;
}
};
};
<file_sep>/checks.cpp
#include "checks.h"
#include <chrono>
#include "glm_ext/glm_extensions.h"
<file_sep>/mesh/polyhedron.tpp
#include "../timer.h"
namespace mesh {
template<typename base_t>
void polyhedron<base_t>::scale(const base_t dim) {
benchmark::timer tmp("scale()");
const stl::bbox bbox = bounding_box();
const glm::vec<3, base_t> cur_size = bbox._max - bbox._min;
for(size_t i = 0; i < _vertices.size(); i++) {
_vertices[i] *= dim;
}
}
template<typename base_t>
void polyhedron<base_t>::scale(const glm::vec<3, base_t> &dim) {
benchmark::timer tmp("scale()");
const stl::bbox<base_t> bbox = bounding_box();
const glm::vec<3, base_t> cur_size = bbox._max - bbox._min;
for(size_t i = 0; i < _vertices.size(); i++) {
_vertices[i] *= dim;
}
}
template<typename base_t>
void polyhedron<base_t>::normalize() {
constexpr bool is_flt = std::is_floating_point<base_t>::value;
static_assert(is_flt, "normalize(): type must be floating point");
benchmark::timer tmp("normalize()");
const stl::bbox<base_t> bbox = bounding_box();
const glm::vec<3, base_t> cur_size = bbox._max - bbox._min;
const base_t comp_max = glm::compMax(cur_size);
// constexpr functor to calc the new position of the vertex
auto calc_pos = [=](const glm::vec<3,base_t> &p) constexpr {
return (p - bbox._min) / comp_max;
};
for(size_t i = 0; i < _vertices.size(); i++) {
_vertices[i] = calc_pos(_vertices[i]);
}
}
template<typename base_t>
polyhedron<base_t> polyhedron<base_t>::scaled(const polyhedron<base_t> &in, const base_t dim) {
polyhedron<base_t> res = in;
res.scale(dim);
return res;
}
template<typename base_t>
polyhedron<base_t> polyhedron<base_t>::scaled(const polyhedron<base_t> &in, const glm::vec<3, base_t> &dim) {
polyhedron<base_t> res = in;
res.scale(dim);
return res;
}
template<typename base_t>
polyhedron<base_t> polyhedron<base_t>::normalized(const polyhedron<base_t> &in){
polyhedron<base_t> res = in;
res.normalize();
return res;
}
template<typename base_t>
void polyhedron<base_t>::to_obj(const std::string &file) const {
std::ofstream obj_file(file);
for(auto v : this->_vertices) {
obj_file << "v " << v.x << " " << v.y << " " << v.z << std::endl;
}
for(auto &f : _indices._buffer) {
obj_file << "f " << f[0]+1 << " " << f[1]+1 << " " << f[2]+1 << std::endl;
}
obj_file.close();
}
template<typename base_t>
stl::bbox<base_t> polyhedron<base_t>::bounding_box() const {
stl::bbox<base_t> bbox;
for(const auto &v : this->_vertices) {
bbox.extend(v);
}
return bbox;
}
template<typename base_t>
glm::vec<3, base_t> polyhedron<base_t>::dim() const {
auto bbox = bounding_box();
return bbox._max - bbox._min;
}
template<typename base_t>
bool polyhedron<base_t>::issues() const {
for(const auto &p : _edges) {
const size_t _face_count = p.second;
if(_face_count != 2) return false;
}
return true;
}
};
<file_sep>/rasterizer.h
#pragma once
#include "glm_ext/glm_extensions.h"
#include "mesh/polyhedron.h"
#include "checks.h"
#include "xml_config.h"
#include "buffer.h"
#include "timer.h"
#include "tree/tree.h"
#include "enums.h"
#include <set>
#include <vector>
namespace rasterize {
template<typename base_t>
struct voxel_arr {
glm::ivec3 _arr_dim;
buffer3d<int8_t> _voxels;
mesh::polyhedron<base_t> _mesh; // rescaled
size_t _num_voxels;
};
template<typename vec_t>
float area(const vec_t &v1, const vec_t &v2, const vec_t &v3) {
float le1 = glm::length(v2-v1);
float le2 = glm::length(v3-v1);
float le3 = glm::length(v3-v2);
float p = 0.5 * (le1 + le2 + le3);
float area = std::sqrt(p * (p - le1) * (p - le2) * (p - le3));
return area;
}
template<typename base_t>
mesh::polyhedron<base_t> prepare_index_buffers(
const mesh::polyhedron<base_t> &in_mesh,
const glm::ivec3 &in_scale,
buffer3d<mesh::face<id_t>> &out_xy,
buffer3d<mesh::face<id_t>> &out_yz,
buffer3d<mesh::face<id_t>> &out_xz
)
{
benchmark::timer tmp("prepare_index_buffers()");
mesh::polyhedron<base_t> mesh;
mesh = mesh::polyhedron<base_t>::normalized(in_mesh);
mesh = mesh::polyhedron<base_t>::scaled(mesh, in_scale);
const glm::ivec3 dim = glm::ceil(mesh.dim());
auto &indices = mesh._indices;
out_xy = buffer3d<mesh::face<id_t>>(dim.x, dim.y, 0);
out_yz = buffer3d<mesh::face<id_t>>(dim.y, dim.z, 0);
out_xz = buffer3d<mesh::face<id_t>>(dim.x, dim.z, 0);
// run over target voxel coordinates
// and check whether a faces cuts a posiion in one of the planes
size_t num_elems = 0;
for(const mesh::face<id_t> &f : indices._buffer) {
const auto &v1 = mesh._vertices[f[0]];
const auto &v2 = mesh._vertices[f[1]];
const auto &v3 = mesh._vertices[f[2]];
constexpr float area_bias = 0.1;
const float a_xz = area(v1.xz(), v2.xz(), v3.xz());
const float a_xy = area(v1.xy(), v2.xy(), v3.xy());
const float a_yz = area(v1.yz(), v2.yz(), v3.yz());
// calc bbox around the face
// we use the bbox to create a simple search buffer (for later rasterization)
const glm::ivec3 min = glm::floor(glm::min(glm::min(v1, v2), v3));
const glm::ivec3 max = glm::ceil(glm::max(glm::max(v1, v2), v3));
// using the bbox:
// insert the face indices into the buffers
// do this for each major plane..
// xy
if(a_xy > area_bias) {
for(int x = min.x; x < max.x; x++)
for(int y = min.y; y < max.y; y++) {
out_xy[x][y].push_back(f);
num_elems += 3;
}
}
// yz
if(a_yz > area_bias) {
for(int y = min.y; y < max.y; y++)
for(int z = min.z; z < max.z; z++) {
out_yz[y][z].push_back(f);
num_elems += 3;
}
}
// xz
if(a_xz > area_bias) {
for(int x = min.x; x < max.x; x++)
for(int z = min.z; z < max.z; z++) {
out_xz[x][z].push_back(f);
num_elems += 3;
}
}
}
float s_mb = (float)((num_elems * sizeof(id_t)) / std::pow(1024, 2));
std::cout << "search buffers are " << (size_t)s_mb << " MBytes" << std::endl;
return mesh;
}
//! generates a voxel mesh from an arbitrary polyhedron
//! is very fast but memory consumption does not scale well
template<typename base_t>
class all_fast {
using id_t = typename mesh::polyhedron<base_t>::index_t;
mesh::polyhedron<base_t> _polyhedron;
buffer3d<mesh::face<id_t>> _xy_plane_buffer;
buffer3d<mesh::face<id_t>> _yz_plane_buffer;
buffer3d<mesh::face<id_t>> _xz_plane_buffer;
public:
all_fast(const mesh::polyhedron<base_t> &poly, glm::ivec3 dim) {
_polyhedron = prepare_index_buffers(poly, dim, _xy_plane_buffer, _yz_plane_buffer, _xz_plane_buffer);
}
voxel_arr<base_t> rasterize() const {
const glm::ivec3 dim = glm::ceil(_polyhedron.dim());
voxel_arr<base_t> res = { dim, buffer3d<int8_t>(dim.x, dim.y, dim.z, 0), _polyhedron };
// create timer object
benchmark::timer tmp("rasterize()");
#pragma omp parallel for
for(int y = 0; y < dim.y; y++)
for(int z = 0; z < dim.z; z++) {
std::set<int> intersections = checks::raycast::get_intersections<yzx>(glm::vec2(y,z), _polyhedron._vertices, _yz_plane_buffer[y][z]);
bool is_in = intersections.size() % 2 == 0 ? false : true;
int from = 0;
for(int inters : intersections) {
inters = constrain(0, dim.x-1, inters); // ensure we do not exceed array boundaries
res._voxels[inters][y][z] = voxel_type::shell;
for(int i = from; i < inters; i++) {
inters = inters < dim.x ? inters : dim.x-1;
if(res._voxels[i][y][z] == voxel_type::shell) continue;
res._voxels[i][y][z] = is_in; // initially set voxel to 1
}
from = inters+1;
is_in = !is_in;
}
}
#pragma omp parallel for
for(int x = 0; x < dim.x; x++)
for(int z = 0; z < dim.z; z++) {
std::set<int> intersections = checks::raycast::get_intersections<xzy>(glm::vec2(x,z), _polyhedron._vertices, _xz_plane_buffer[x][z]);
bool is_in = intersections.size() % 2 == 0 ? false : true;
int from = 0;
for(int inters : intersections) {
inters = constrain(0, dim.y-1, inters); // ensure we do not exceed array boundaries
res._voxels[x][inters][z] = voxel_type::shell;
for(int i = from; i < inters; i++) {
inters = inters < dim.y ? inters : dim.y-1;
if(res._voxels[x][i][z] == voxel_type::shell) continue;
res._voxels[x][i][z] <<= is_in; // first bit shift; now the value should be either 0 or voxel_type::interior
}
from = inters+1;
is_in = !is_in;
}
}
#pragma omp parallel for
for(int x = 0; x < dim.x; x++)
for(int y = 0; y < dim.y; y++) {
std::set<int> intersections = checks::raycast::get_intersections<xyz>(glm::vec2(x,y), _polyhedron._vertices, _xy_plane_buffer[x][y]);
bool is_in = intersections.size() % 2 == 0 ? false : true;
int from = 0;
for(int inters : intersections) {
inters = constrain(0, dim.z-1, inters); // ensure we do not exceed array boundaries
res._voxels[x][y][inters] = voxel_type::shell;
for(int i = from; i < inters; i++) {
inters = inters < dim.z ? inters : dim.z-1;
if(res._voxels[x][y][i] == voxel_type::shell) continue;
res._voxels[x][y][i] <<= is_in; // second bit shift; now the value should be either 0 or voxel_type::interior
}
from = inters+1;
is_in = !is_in;
}
}
return res;
}
};
//! buffer for the intersections
//! used by rasterize::all_rle
//! for memory optimization
struct intersections {
//! xy plane
buffer3d<int> xy;
//! yz plane
buffer3d<int> yz;
//! xz plane
buffer3d<int> xz;
};
//! generates a voxel mesh from an arbitrary polyhedron
//! uses fast run length encoding to minimize the size
template<typename base_t>
class all_oct {
using id_t = typename mesh::polyhedron<base_t>::index_t;
using payload_t = point<glm::vec<2, base_t>, mesh::face<id_t>>;
mesh::polyhedron<base_t> _polyhedron;
tree<quad::boundary, payload_t> _xy_tree;
tree<quad::boundary, payload_t> _yz_tree;
tree<quad::boundary, payload_t> _xz_tree;
// scale factors for quad trees
glm::vec<3, base_t> _tree_scale_factors = glm::vec<3, base_t>(1);
float _tree_scale_factor = 1;
public:
all_oct(const mesh::polyhedron<base_t> &poly, glm::ivec3 scale) {
benchmark::timer t("Generate quad trees");
_polyhedron = mesh::polyhedron<base_t>::normalized(poly);
_polyhedron = mesh::polyhedron<base_t>::scaled(_polyhedron, scale);
glm::vec<3, base_t> dim = glm::ceil(_polyhedron.dim());
glm::vec<3, base_t> dim_half = dim / 2.f;
// search trees always scaled to <max_tree_size> (save some memory)
constexpr int max_tree_size = 1024;
const glm::vec<3, base_t> scale_factors = glm::vec<3, base_t>(max_tree_size) / dim;
const float scale_factor = glm::compMin(scale_factors);
if(scale_factor < 1) {
_tree_scale_factor = scale_factor;
_tree_scale_factors = scale_factors;
}
_xy_tree = tree<quad::boundary, payload_t>(quad::boundary(dim_half.xy()*_tree_scale_factor, dim_half.xy()*_tree_scale_factor));
_yz_tree = tree<quad::boundary, payload_t>(quad::boundary(dim_half.yz()*_tree_scale_factor, dim_half.yz()*_tree_scale_factor));
_xz_tree = tree<quad::boundary, payload_t>(quad::boundary(dim_half.xz()*_tree_scale_factor, dim_half.xz()*_tree_scale_factor));
// build quad trees
for (const auto &f : _polyhedron._indices._buffer) {
const glm::vec<3, base_t> v1 = _polyhedron._vertices[f[0]] * _tree_scale_factor;
const glm::vec<3, base_t> v2 = _polyhedron._vertices[f[1]] * _tree_scale_factor;
const glm::vec<3, base_t> v3 = _polyhedron._vertices[f[2]] * _tree_scale_factor;
glm::ivec3 min = glm::floor(glm::min(v1, glm::min(v2, v3)));
glm::ivec3 max = glm::ceil(glm::max(v1, glm::max(v2, v3)));
for(int x = min.x; x < max.x; x++)
for(int y = min.y; y < max.y; y++) {
int d = 0;
const bool is_in = checks::raycast::pt_in_triangle(glm::vec2(x,y), v1.xyz(), v2.xyz(), v3.xyz(), d);
if(is_in) {
_xy_tree.insert(point((glm::vec2(x,y)), f));
}
}
for(int y = min.y; y < max.y; y++)
for(int z = min.z; z < max.z; z++) {
int d = 0;
const bool is_in = checks::raycast::pt_in_triangle(glm::vec2(y,z), v1.yzx(), v2.yzx(), v3.yzx(), d);
if(is_in) {
_yz_tree.insert(point((glm::vec2(y,z)), f));
}
}
for(int x = min.x; x < max.x; x++)
for(int z = min.z; z < max.z; z++) {
int d = 0;
const bool is_in = checks::raycast::pt_in_triangle(glm::vec2(x,z), v1.xzy(), v2.xzy(), v3.xzy(), d);
if(is_in) {
_xz_tree.insert(point((glm::vec2(x,z)), f));
}
}
}
test_tree(_xy_tree);
test_tree(_yz_tree);
test_tree(_xz_tree);
}
voxel_arr<base_t> rasterize() const {
// create timer object
benchmark::timer tmp("rasterize()");
const glm::vec3 dim = glm::ceil(_polyhedron.dim());
// the quad tree is scaled up or down to 128 x Y x Z voxels
// thus, we calc the scale factors to access the tree
const glm::vec3 search_dist = (1.f / _tree_scale_factors); // slighty bigger than 1/2 which equals a whole step in case of some rounding noise
/*
intersections r;
r.yz = buffer3d<int>(dim.y, dim.z, 0);
#pragma omp parallel for
for(int y = 0; y < dim.y; y++)
for(int z = 0; z < dim.z; z++) {
auto buffer = _yz_tree.find(glm::vec2(y*_tree_scale_factor, z*_tree_scale_factor), search_dist);
std::set<int> intersections = checks::raycast::get_intersections<yzx>(glm::ivec2(y,z), _polyhedron._vertices, buffer);
std::copy(intersections.begin(), intersections.end(), std::back_inserter(r.yz[y][z]));
}
r.xz = buffer3d<int>(dim.x, dim.z, 0);
#pragma omp parallel for
for(int x = 0; x < dim.x; x++)
for(int z = 0; z < dim.z; z++) {
auto buffer = _xz_tree.find(glm::vec2(x*_tree_scale_factor, z*_tree_scale_factor), search_dist);
std::set<int> intersections = checks::raycast::get_intersections<xzy>(glm::ivec2(x,z), _polyhedron._vertices, buffer);
std::copy(intersections.begin(), intersections.end(), std::back_inserter(r.xz[x][z]));
}
r.xy = buffer3d<int>(dim.x, dim.y, 0);
#pragma omp parallel for
for(int x = 0; x < dim.x; x++)
for(int y = 0; y < dim.y; y++) {
auto buffer = _xy_tree.find(glm::vec2(x*_tree_scale_factor, y*_tree_scale_factor), search_dist);
std::set<int> intersections = checks::raycast::get_intersections<xyz>(glm::ivec2(x,y), _polyhedron._vertices, buffer);
std::copy(intersections.begin(), intersections.end(), std::back_inserter(r.xy[x][y]));
}
return r;
*/
/*
voxel_arr<base_t> res = { dim, buffer3d<int8_t>(dim.x, dim.y, dim.z, 0), _polyhedron };
for(int y = 0; y < dim.y; y++)
for(int z = 0; z < dim.z; z++) {
auto buffer = _yz_tree.find(glm::vec2(y*_tree_scale_factor, z*_tree_scale_factor), search_dist);
std::set<int> intersections = checks::raycast::get_intersections<yzx>(glm::vec2(y,z), _polyhedron._vertices, buffer);
bool is_in = intersections.size() % 2 == 0 ? false : true;
int from = 0;
for(int inters : intersections) {
inters = constrain(0, (int)dim.x-1, inters); // ensure we do not exceed array boundaries
res._voxels[inters][y][z] = voxel_type::shell;
for(int i = from; i < inters; i++) {
inters = inters < dim.x ? inters : dim.x-1;
if(res._voxels[i][y][z] == voxel_type::shell) continue;
res._voxels[i][y][z] = is_in; // initially set voxel to 1
}
from = inters+1;
is_in = !is_in;
}
}
for(int x = 0; x < dim.x; x++)
for(int z = 0; z < dim.z; z++) {
auto buffer = _xz_tree.find(glm::vec2(x*_tree_scale_factor, z*_tree_scale_factor), search_dist);
std::set<int> intersections = checks::raycast::get_intersections<xzy>(glm::vec2(x,z), _polyhedron._vertices, buffer);
bool is_in = intersections.size() % 2 == 0 ? false : true;
int from = 0;
for(int inters : intersections) {
inters = constrain(0, (int)dim.y-1, inters); // ensure we do not exceed array boundaries
res._voxels[x][inters][z] = voxel_type::shell;
for(int i = from; i < inters; i++) {
inters = inters < dim.y ? inters : dim.y-1;
if(res._voxels[x][i][z] == voxel_type::shell) continue;
res._voxels[x][i][z] <<= is_in; // first bit shift; now the value should be either 0 or voxel_type::interior
}
from = inters+1;
is_in = !is_in;
}
}
for(int x = 0; x < dim.x; x++)
for(int y = 0; y < dim.y; y++) {
auto buffer = _xy_tree.find(glm::vec2(x*_tree_scale_factor, y*_tree_scale_factor), search_dist);
std::set<int> intersections = checks::raycast::get_intersections<xyz>(glm::vec2(x,y), _polyhedron._vertices, buffer);
bool is_in = intersections.size() % 2 == 0 ? false : true;
int from = 0;
for(int inters : intersections) {
inters = constrain(0, (int)dim.z-1, inters); // ensure we do not exceed array boundaries
res._voxels[x][y][inters] = voxel_type::shell;
for(int i = from; i < inters; i++) {
inters = inters < dim.z ? inters : dim.z-1;
if(res._voxels[x][y][i] == voxel_type::shell) continue;
res._voxels[x][y][i] <<= is_in; // second bit shift; now the value should be either 0 or voxel_type::interior
}
from = inters+1;
is_in = !is_in;
}
}
return res;
*/
return {};
}
};
//! generates a voxel mesh from an arbitrary polyhedron
template<typename base_t>
class shell_only {
mesh::polyhedron<base_t> _polyhedron;
public:
shell_only(const mesh::polyhedron<base_t> &poly, const glm::ivec3 &dim) {
_polyhedron = poly;
_polyhedron.normalize();
_polyhedron.scale(dim);
}
voxel_arr<base_t> rasterize() const {
benchmark::timer tmp("rasterize()");
const glm::ivec3 dim = glm::ceil(_polyhedron.dim());
voxel_arr res = { dim, buffer3d<int8_t>(dim.x, dim.y, dim.z, 0), _polyhedron };
const size_t stride = _polyhedron._indices._stride;
const auto &index_buf = _polyhedron._indices._buffer;
const auto &vertex_buffer = _polyhedron._vertices;
for(size_t face_id = 0; face_id < index_buf.size() / stride; face_id++) {
const glm::ivec3 id = glm::ivec3(face_id) * _polyhedron._indices._stride;
const uint32_t vid1 = index_buf[id.x+0];
const uint32_t vid2 = index_buf[id.y+1];
const uint32_t vid3 = index_buf[id.z+2];
std::array<glm::vec<3, base_t>, 3> face = {
vertex_buffer[vid1],
vertex_buffer[vid2],
vertex_buffer[vid3]
};
const glm::vec3 lmin = glm::floor(glm::min(face[2], glm::min(face[0], face[1]))) - glm::vec3(0.5f);
const glm::vec3 lmax = glm::ceil(glm::max(face[2], glm::max(face[0], face[1]))) + glm::vec3(0.5f);
const glm::ivec3 lsteps = lmax - lmin;
face[0] -= lmin;
face[1] -= lmin;
face[2] -= lmin;
const glm::ivec3 offs = lmin;
for(int x = 0; x < lsteps.x; x++)
for(int y = 0; y < lsteps.y; y++)
for(int z = 0; z < lsteps.z; z++) {
int nx = x + lmin.x;
int ny = y + lmin.y;
int nz = z + lmin.z;
if(res._voxels[nx][ny][nz]) continue;
if(checks::intersection_3d::face_in_hexahedron(face, {x,y,z}, glm::vec3(0.5))) {
res._voxels[nx][ny][nz] = voxel_type::shell;
res._num_voxels++;
}
}
}
std::cout << "created " << (float)res._num_voxels/1000000 << " M voxels" << std::endl;
return res;
}
};
};
<file_sep>/glm_ext/glm_extensions.h
#pragma once
//#define GLM_FORCE_SSE2
//#define GLM_FORCE_AVX
#define GLM_FORCE_SWIZZLE
#define GLM_ENABLE_EXPERIMENTAL
#include <glm/glm.hpp>
#include <glm/gtx/closest_point.hpp>
#include <glm/gtx/intersect.hpp>
#include <glm/gtc/epsilon.hpp>
#include <glm/gtc/matrix_transform.hpp>
#include <glm/gtx/projection.hpp>
#include <glm/gtx/vector_angle.hpp>
#include <glm/gtx/transform.hpp>
#include <glm/gtx/component_wise.hpp>
#include <tuple>
template <typename base_t>
struct compare_glm_vec2 {
bool operator() (const glm::vec<2, base_t>& lhs, const glm::vec<2, base_t>& rhs) const {
return std::make_tuple(lhs.x, lhs.y) < std::make_tuple(rhs.x, rhs.y);
}
};
template <typename base_t>
struct compare_glm_vec3 {
bool operator() (const glm::vec<3, base_t>& lhs, const glm::vec<3, base_t>& rhs) const {
return std::make_tuple(lhs.x, lhs.y, lhs.z) < std::make_tuple(rhs.x, rhs.y, rhs.z);
}
};
template <typename base_t>
base_t constrain(const base_t &min, const base_t &max, const base_t &val) {
return val < min ? min : val > max ? max : val;
}
template <typename base_t>
glm::vec<3, base_t> constrain(const glm::vec<3, base_t> &min, const glm::vec<3, base_t> &max, glm::vec<3, base_t> &val) {
return {
constrain(min.x, max.x, val.x),
constrain(min.y, max.y, val.y),
constrain(min.z, max.z, val.z)
};
}
<file_sep>/main.cpp
#include <iostream>
#include "voxelizer.h"
#include "rules.h"
#include "checks.h"
#include "stl/stl_import.h"
#include "xml_config.h"
int main()
{
cfg::xml_project pro(".");
voxelize::voxelizer v(pro);
v.run();
v.to_fs_obj();
v.to_fs_stl<build_stl_cube>();
//v.to_fs_bytes();
return 0;
}
<file_sep>/voxelizer.h
#pragma once
#include "glm_ext/glm_extensions.h"
#include "mesh/polyhedron.h"
#include "stl/stl_import.h"
#include "checks.h"
#include "xml_config.h"
#include "rules.h"
#include "buffer.h"
#include "timer.h"
#include "enums.h"
#include "rasterizer.h"
#include "vox_file.h"
#include <string>
#include <set>
#include <map>
#include <vector>
#include <tuple>
#include <filesystem>
#include <fstream>
namespace voxelize {
class voxelizer {
cfg::xml_project _project_cfg;
struct voxel_data_t {
mesh::polyhedron<float> mesh;
cfg::shape_settings cfg;
stl::bbox<float> bbox;
// rasterizer results
rasterize::voxel_arr<float> voxels; // high memory usage
rasterize::intersections intersections; // intersections only
};
std::vector<voxel_data_t> _rasterizer_res;
stl::bbox<float> _prj_bbox;
float _max_grid_size = 0;
float _voxel_size = 0;
private:
//! calculates the project bbox
//! buffers the meshes after creation
void project_bbox() {
const std::filesystem::path path = _project_cfg.project_path();
glm::vec3 glob_min(FLT_MAX);
glm::vec3 glob_max(-FLT_MAX);
for(const cfg::shape_settings &shape : _project_cfg.shapes()) {
const std::filesystem::path file = path / shape._file_in;
const stl::format stl(file.string());
mesh::polyhedron<float> p_flt = stl.to_polyhedron(stl.faces());
const stl::bbox<float> bbox = p_flt.bounding_box();
glob_min = glm::min(glob_min, bbox._min);
glob_max = glm::max(glob_max, bbox._max);
_rasterizer_res.push_back({p_flt, shape, bbox});
}
_prj_bbox = { glob_min, glob_max };
}
int _proj_file_cntr = 0;
std::filesystem::path make_fname(const voxel_data_t &d, std::string ext) {
std::filesystem::path p;
if(d.cfg._file_out.empty()) {
p = std::filesystem::path(_project_cfg.target_dir()) / (std::to_string(_proj_file_cntr++) + ext);
}
else {
p = std::filesystem::path(_project_cfg.target_dir()) / (d.cfg._file_out + ext);
}
return p;
}
public:
voxelizer(const cfg::xml_project &cfg) {
_project_cfg = cfg;
// calc the meshes and the project bbox
project_bbox();
if(_project_cfg.grid_size_defined()) {
_max_grid_size = _project_cfg.max_grid_size();
_voxel_size = _project_cfg.voxel_size();
}
else if(_project_cfg.voxel_size_defined()) {
_voxel_size = _project_cfg.voxel_size();
const float max_bbox_edge = glm::compMax(_prj_bbox._max - _prj_bbox._min);
_max_grid_size = glm::ceil(max_bbox_edge / _voxel_size);
}
}
void to_fs_obj() {
for(const voxel_data_t &mdata : _rasterizer_res) {
const std::filesystem::path p = make_fname(mdata, ".obj");
mdata.mesh.to_obj(p.string());
}
}
template<typename rule_t>
void to_fs_stl() {
const glm::vec3 proj_dim = (_prj_bbox._max - _prj_bbox._min);
const float scalef = _max_grid_size / glm::compMax(proj_dim);
for(const voxel_data_t &mdata : _rasterizer_res) {
const rasterize::voxel_arr<float> &arr = mdata.voxels;
const std::filesystem::path p = make_fname(mdata, ".stl");
benchmark::timer t("voxelizer::to_stl() - " + p.string() + " export took");
auto stlf = stl::format::open(p.string());
for(int i = 0; i < 80; i++)
stl::format::append(stlf, char(0));
// first count hull cubes
int num_cubes = 0;
for(int x = 0; x < arr._arr_dim.x; x++)
for(int y = 0; y < arr._arr_dim.y; y++)
for(int z = 0; z < arr._arr_dim.z; z++) {
if(arr._voxels[x][y][z] != voxel_type::shell) continue;
num_cubes++;
}
uint32_t faces = num_cubes * 12;
stl::format::append(stlf, faces);
glm::vec3 offset;
if(_project_cfg.voxel_size_defined()) {
offset = glm::round(mdata.bbox._min * scalef) * _voxel_size;
}
else if(_project_cfg.grid_size_defined()) {
offset = glm::round((mdata.bbox._min - _prj_bbox._min) * scalef) * _voxel_size;
}
// now write faces of hull cubes into stl
for(int x = 0; x < arr._arr_dim.x; x++)
for(int y = 0; y < arr._arr_dim.y; y++)
for(int z = 0; z < arr._arr_dim.z; z++) {
if(arr._voxels[x][y][z] != voxel_type::shell) continue;
for(stl::face &f : rule_t::mesh(glm::vec3(x,y,z)*_voxel_size+offset, glm::vec3(_voxel_size))) {
stl::format::append(stlf, f);
}
}
stl::format::close(stlf);
}
}
template<array_order order = row_major>
std::vector<uint8_t> to_bytes() const {
glm::vec3 prj_dim_unit = _prj_bbox._max - _prj_bbox._min;
const float scalef = _max_grid_size / glm::compMax(prj_dim_unit);
float voxel_size;
if(_project_cfg.voxel_size_defined()) {
voxel_size = _voxel_size;
}
else if(_project_cfg.grid_size_defined()) {
voxel_size = glm::compMax(prj_dim_unit) / _max_grid_size;
}
else {
std::cerr << "to_bytes() - No valid voxel size" << std::endl;
return {};
}
const glm::ivec3 prj_dim_voxels = glm::ceil(prj_dim_unit / voxel_size);
const glm::ivec3 prj_dim_bound = prj_dim_voxels - 1;
std::vector<uint8_t> buffer(glm::compMul(prj_dim_voxels), 0);
for(const voxel_data_t &mdata : _rasterizer_res) {
benchmark::timer t("voxelizer::to_bytes() - " + mdata.cfg._file_in + " export took");
const glm::vec3 offset = glm::round(mdata.bbox._min * scalef) * _voxel_size;
const rasterize::voxel_arr<float> &arr = mdata.voxels;
const auto &vox_size = arr._arr_dim;
const auto &cfg = mdata.cfg;
for(int x = 0; x < vox_size.x; x++)
for(int y = 0; y < vox_size.y; y++)
for(int z = 0; z < vox_size.z; z++) {
// calculate position in project wide voxel model
// position in project unit
const glm::vec3 pos_unit = glm::vec3(x,y,z)*voxel_size+offset;
// voxel coordinate
glm::ivec3 pos_vox = glm::round(pos_unit / voxel_size);
// ensure boundaries are not exceeded
pos_vox = constrain(glm::ivec3(0), prj_dim_bound, pos_vox);
// 1d index
size_t id;
if constexpr(order == row_major) {
id = pos_vox.z * prj_dim_voxels.x * prj_dim_voxels.y + pos_vox.y * prj_dim_voxels.x + pos_vox.z;
}
else {
id = pos_vox.x * prj_dim_voxels.y * prj_dim_voxels.z + pos_vox.y * prj_dim_voxels.z + pos_vox.x;
}
switch(arr._voxels[x][y][z]) {
case voxel_type::interior:
buffer[id] = cfg._material_inside;
break;
case voxel_type::shell:
buffer[id] = cfg._material_shell;
break;
default:
break;
};
}
}
return buffer;
}
template<array_order order = row_major>
void to_fs_bytes() const {
std::string trg_file = "unamed_prj.raw";
if(!_project_cfg.raw_fname().empty()) {
trg_file = _project_cfg.raw_fname();
}
const std::filesystem::path p = std::filesystem::path(_project_cfg.target_dir()) / trg_file;
benchmark::timer t("voxelizer::to_fs_bytes() - " + p.string() + " export took");
std::vector<uint8_t> buffer = to_bytes();
std::ofstream f(p, std::ios::out | std::ios::binary);
f.write((char*)&buffer[0], buffer.size());
f.close();
}
/*
void to_fs_vox(const std::string &out_file) {
for(const voxel_data_t &mdata : _rasterizer_res) {
const rasterize::voxel_arr<float> &arr = mdata.voxels;
const auto &vox_size = arr._arr_dim;
const auto &cfg = mdata.cfg;
vox::chunk::MAIN chunk_main;
vox::chunk::SIZE chunk_size(vox_size);
vox::chunk::XYZI chunk_xyzi;
for(int x = 0; x < vox_size.x; x++)
for(int y = 0; y < vox_size.y; y++)
for(int z = 0; z < vox_size.z; z++) {
uint8_t v = arr._voxels[x][y][z];
switch(v) {
case voxel_type::interior:
chunk_xyzi.data.push_back(vox::xyzi_t(x, y, z, cfg._material_inside));
break;
case voxel_type::shell:
chunk_xyzi.data.push_back(vox::xyzi_t(x, y, z, cfg._material_shell));
break;
default:
break;
};
}
chunk_main.models.push_back({chunk_size, chunk_xyzi});
std::ofstream f(out_file, std::ios::out | std::ios::binary);
chunk_main.write(f);
f.close();
}
}
*/
void clear() {
_rasterizer_res.clear();
}
void run() {
if(!_project_cfg.voxel_size_defined() && !_project_cfg.grid_size_defined()) {
std::cerr << "Neither maximum grid size, nor voxel size defined in *.xml file" << std::endl;
return;
}
const glm::vec3 proj_dim = (_prj_bbox._max - _prj_bbox._min);
const float scalef = _max_grid_size / glm::compMax(proj_dim);
for(voxel_data_t &mdata : _rasterizer_res) {
// calculate uniorm scale factor (+/- 1 voxel)
// keeping the size ratio of each voxel model in the project constant to each other
const glm::vec3 mesh_dim = (mdata.bbox._max - mdata.bbox._min) * scalef;
const float dim = glm::round(glm::compMax(mesh_dim));
std::cout << "Process file: " << mdata.cfg._file_in << std::endl;
//mdata.voxels = rasterize::all_fast(mdata.mesh, glm::ivec3(dim)).rasterize();
mdata.voxels = rasterize::all_oct(mdata.mesh, glm::ivec3(dim)).rasterize();
//mdata.voxels = rasterize::shell_only(mdata.mesh, _max_grid_size).rasterize();
}
}
};
};
| 478203afff48983ffcb3e329dc83f23cab30ae85 | [
"CMake",
"C++"
] | 11 | C++ | dgrat/voxelizer | 7cc166a71a39db5317c71dd676cf81a0a93c53fb | a9c53fc95b168156447362c228eee7221cf6319b |
refs/heads/main | <repo_name>ccsc-tools/RNN-CME-prediction<file_sep>/README.md
## Predicting Coronal Mass Ejections Using SDO/HMI Vector Magnetic Data Products and Recurrent Neural Networks
[](https://zenodo.org/badge/latestdoi/432874495)
## Authors
<NAME>, <NAME>, <NAME>, and <NAME>
## Contributors
<NAME> and <NAME>
## Abstract
We present two recurrent neural networks (RNNs), one based on gated recurrent units and the other based on long short-term memory, for predicting whether an active region (AR) that produces an M- or X-class flare will also produce a coronal mass ejection (CME). We model data samples in an AR as time series and use the RNNs to capture temporal information of the data samples. Each data sample has 18 physical parameters, or features, derived from photospheric vector magnetic field data taken by the Helioseismic and Magnetic Imager (HMI) on board the Solar Dynamics Observatory (SDO). We survey M- and X-class flares that occurred from 2010 May to 2019 May using the Geostationary Operational Environmental Satellite's X-ray flare catalogs provided by the National Centers for Environmental Information (NCEI), and select those flares with identified ARs in the NCEI catalogs. In addition, we extract the associations of flares and CMEs from the Space Weather Database Of Notifications, Knowledge, Information (DONKI). We use the information gathered above to build the labels (positive versus negative) of the data samples at hand. Experimental results demonstrate the superiority of our RNNs over closely related machine learning methods in predicting the labels of the data samples. We also discuss an extension of our approach to predict a probabilistic estimate of how likely an M- or X-class flare will initiate a CME, with good performance results. To our knowledge this is the first time that RNNs have been used for CME prediction.
## Binder
This notebook is Binder enabled and can be run on [mybinder.org](https://mybinder.org/) by using the link below.
### ccsc_CMEpredict.ipynb (Jupyter Notebook for CMEPredict)
[](https://mybinder.org/v2/gh/ccsc-tools/RNN-CME-prediction/HEAD?labpath=ccsc_CMEpredict.ipynb)
Please note that starting Binder might take some time to create and start the image.
For the latest updates of CMEPredict refer to [https://github.com/deepsuncode/RNN-CME-prediction](https://github.com/deepsuncode/RNN-CME-prediction)
## Installation on local machine
Requires `Python==3.9.x` (was tested on 3.9.13)
Run `pip install -r requirements.txt` (recommended), or manually install the following packages and specified versions:
| Library | Version | Description |
|--------------|---------|--------------------------------|
| numpy | 1.21.6 | Data processing |
| pandas | 1.4.4 | Data analysis |
| scikit-learn | 1.0.2 | Neural network libraries |
| matplotlib | 3.5.2 | Plotting and graphs |
| tensorflow | 2.11.0 | Neural network libraries |
| keras | 2.11.0 | Artificial neural networks API |
## References
Predicting Coronal Mass Ejections Using SDO/HMI Vector Magnetic Data Products and Recurrent Neural Networks. <NAME>., <NAME>., <NAME>., <NAME>., ApJ., 890:12, 2020
https://iopscience.iop.org/article/10.3847/1538-4357/ab6850
https://arxiv.org/abs/2002.10953
https://web.njit.edu/~wangj/RNNcme/
<file_sep>/CME_data_samples/ReadMe.txt
This zip contains one folder, named CME_data_samples, containing data samples shown in Table 1 of the paper. In the folder, there are two types of csv files named normalized_training_x and normalized_testing_x respectively, which contain training and testing data samples (after normalization) used for predicting CMEs within the next x hours (x = 12, 24, 36, 48 or 60).
Each file has 22 columns.
The first column is titled Label. This column has 2 values: N, and P. N means there is a >=M class flare within the next x hours but this flare is not associated with a CME. P means there is a >=M class flare within the next x hours and this flare is associated with a CME.
The second column is titled Timestamp. The third column and fourth column are titled NOAA active region number and HARP number, respectively. Starting from the fifth column, you can see physical parameters of data samples, which include 18 SHARP parameters: TOTUSJH, TOTPOT, TOTUSJZ, ABSNJZH, SAVNCPP, USFLUX, AREA_ACR, MEANPOT, R_VALUE, SHRGT45, MEANGAM, MEANJZH, MEANGBT, MEANGBZ, MEANJZD, MEANGBH, MEANSHR, MEANALP.
<file_sep>/CMEpredict/CMEpredict.py
# =========================================================================
# (c) Copyright 2020
# All rights reserved
# Programs written by <NAME>
# Department of Computer Science
# New Jersey Institute of Technology
# University Heights, Newark, NJ 07102, USA
#
# Permission to use, copy, modify, and distribute this
# software and its documentation for any purpose and without
# fee is hereby granted, provided that this copyright
# notice appears in all copies. Programmer(s) makes no
# representations about the suitability of this
# software for any purpose. It is provided "as is" without
# express or implied warranty.
# =========================================================================
import warnings
warnings.filterwarnings('ignore')
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
import sys
import pandas as pd
from sklearn.utils import class_weight
# https://github.com/keras-team/keras/issues/1406
stderr = sys.stderr
sys.stderr = open(os.devnull, 'w')
from keras.models import *
from keras.layers import *
from tensorflow.keras import regularizers
sys.stderr = stderr
import numpy as np
import sys
import csv
try:
import tensorflow as tf
tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR)
except Exception as e:
pass
def get_df_values(type, time_window, df_values0):
if type == 'gru':
if time_window == 12:
df_values = df_values0[:,
[0, 1, 2, 3, 11, 13, 7, 8, 15, 18, 21, 6, 9, 10, 17, 5, 16, 4, 12, 19, 20, 14]] # 12 GRU
elif time_window == 24:
df_values = df_values0[:,
[0, 1, 2, 3, 11, 13, 15, 5, 20, 9, 21, 7, 8, 6, 17, 18, 10, 14, 4, 12, 16, 19]] # 24 GRU
elif time_window == 36:
df_values = df_values0[:,
[0, 1, 2, 3, 11, 5, 13, 20, 9, 21, 15, 8, 7, 4, 6, 14, 12, 17, 10, 18, 16, 19]] # 36 GRU
elif time_window == 48:
df_values = df_values0[:,
[0, 1, 2, 3, 11, 5, 13, 20, 9, 14, 8, 7, 21, 6, 4, 15, 12, 17, 16, 10, 18, 19]] # 48 GRU
elif time_window == 60:
df_values = df_values0[:,
[0, 1, 2, 3, 11, 5, 13, 20, 7, 15, 8, 14, 6, 21, 4, 9, 12, 10, 19, 18, 16, 17]] # 60 GRU
elif type == 'lstm':
if time_window == 12:
df_values = df_values0[:,
[0, 1, 2, 3, 11, 13, 20, 7, 15, 8, 21, 6, 18, 5, 10, 9, 17, 16, 19, 12, 14, 4]] # 12 LSTM
elif time_window == 24:
df_values = df_values0[:,
[0, 1, 2, 3, 20, 11, 13, 9, 15, 14, 8, 7, 5, 21, 6, 17, 18, 10, 12, 16, 4, 19]] # 24 LSTM
elif time_window == 36:
df_values = df_values0[:,
[0, 1, 2, 3, 11, 20, 13, 5, 14, 8, 15, 7, 9, 21, 6, 4, 12, 17, 18, 10, 16, 19]] # 36 LSTM
elif time_window == 48:
df_values = df_values0[:,
[0, 1, 2, 3, 11, 5, 20, 13, 9, 14, 7, 15, 8, 6, 4, 21, 12, 17, 18, 16, 10, 19]] # 48 LSTM
elif time_window == 60:
df_values = df_values0[:,
[0, 1, 2, 3, 11, 5, 13, 20, 7, 15, 8, 14, 6, 21, 4, 9, 12, 10, 19, 18, 16, 17]] # 60 LSTM
return df_values
def load_data(datafile, series_len, start_feature, n_features, mask_value, type, time_window):
df = pd.read_csv(datafile, header=None)
df_values0 = df.values
df_values = get_df_values(type, time_window, df_values0)
X = []
y = []
tmp = []
for k in range(start_feature, start_feature + n_features):
tmp.append(mask_value)
n_neg = 0
n_pos = 0
for idx in range(0, len(df_values)):
each_series_data = []
row = df_values[idx]
label = row[0]
if label == 'padding':
continue
has_zero_record = False
# if one of the physical feature values is missing, then discard it.
for k in range(start_feature, start_feature + n_features):
if float(row[k]) == 0.0:
has_zero_record = True
break
if has_zero_record is False:
cur_harp_num = int(row[3])
each_series_data.append(row[start_feature:start_feature + n_features].tolist())
itr_idx = idx - 1
while itr_idx >= 0 and len(each_series_data) < series_len:
prev_row = df_values[itr_idx]
prev_harp_num = int(prev_row[3])
if prev_harp_num != cur_harp_num:
break
has_zero_record_tmp = False
for k in range(start_feature, start_feature + n_features):
if float(prev_row[k]) == 0.0:
has_zero_record_tmp = True
break
if float(prev_row[-5]) >= 3500 or float(prev_row[-4]) >= 65536 or \
abs(float(prev_row[-1]) - float(prev_row[-2])) > 70:
has_zero_record_tmp = True
if len(each_series_data) < series_len and has_zero_record_tmp is True:
each_series_data.insert(0, tmp)
if len(each_series_data) < series_len and has_zero_record_tmp is False:
each_series_data.insert(0, prev_row[start_feature:start_feature + n_features].tolist())
itr_idx -= 1
while len(each_series_data) > 0 and len(each_series_data) < series_len:
each_series_data.insert(0, tmp)
if (label == 'N' or label == 'P') and len(each_series_data) > 0:
X.append(np.array(each_series_data).reshape(series_len, n_features).tolist())
if label == 'N':
y.append(0)
n_neg += 1
elif label == 'P':
y.append(1)
n_pos += 1
X_arr = np.array(X)
y_arr = np.array(y)
nb = n_neg + n_pos
return X_arr, y_arr, nb
def attention_3d_block(hidden_states, series_len):
hidden_size = int(hidden_states.shape[2])
hidden_states_t = Permute((2, 1), name='attention_input_t')(hidden_states)
hidden_states_t = Reshape((hidden_size, series_len), name='attention_input_reshape')(hidden_states_t)
score_first_part = Dense(series_len, use_bias=False, name='attention_score_vec')(hidden_states_t)
score_first_part_t = Permute((2, 1), name='attention_score_vec_t')(score_first_part)
h_t = Lambda(lambda x: x[:, :, -1], output_shape=(hidden_size, 1), name='last_hidden_state')(hidden_states_t)
score = dot([score_first_part_t, h_t], [2, 1], name='attention_score')
attention_weights = Activation('softmax', name='attention_weight')(score)
context_vector = dot([hidden_states_t, attention_weights], [2, 1], name='context_vector')
context_vector = Reshape((hidden_size,))(context_vector)
h_t = Reshape((hidden_size,))(h_t)
pre_activation = concatenate([context_vector, h_t], name='attention_output')
attention_vector = Dense(hidden_size, use_bias=False, activation='tanh', name='attention_vector')(pre_activation)
return attention_vector
def lstm(n_features, series_len):
inputs = Input(shape=(series_len, n_features,))
lstm_out = LSTM(10, return_sequences=True, dropout=0.5, recurrent_dropout=0.3)(inputs)
attention_mul = attention_3d_block(lstm_out, series_len)
layer1 = Dense(100, activation='relu')(attention_mul)
layer1 = Dropout(0.25)(layer1)
output = Dense(1, activation='sigmoid', activity_regularizer=regularizers.l2(0.0001))(layer1)
model = Model(inputs=[inputs], outputs=output)
return model
def gru(n_features, series_len):
inputs = Input(shape=(series_len, n_features,))
gru_out = GRU(10, return_sequences=True, dropout=0.5, recurrent_dropout=0.3)(inputs)
attention_mul = attention_3d_block(gru_out, series_len)
layer1 = Dense(100, activation='relu')(attention_mul)
layer1 = Dropout(0.25)(layer1)
output = Dense(1, activation='sigmoid', activity_regularizer=regularizers.l2(0.0001))(layer1)
model = Model(inputs=[inputs], outputs=output)
return model
def output_result(test_data_file, result_file, type, time_window, start_feature, n_features, thresh):
df = pd.read_csv(test_data_file, header=None)
df_values0 = df.values
df_values = get_df_values(type, time_window, df_values0)
with open(result_file, 'w', encoding='UTF-8') as result_csv:
w = csv.writer(result_csv)
w.writerow(['Predicted Label', 'Label', 'Timestamp', 'NOAA AR NUM', 'HARP NUM',
'TOTUSJH', 'TOTPOT', 'TOTUSJZ', 'ABSNJZH', 'SAVNCPP', 'USFLUX', 'AREA_ACR',
'MEANPOT', 'R_VALUE', 'SHRGT45', 'MEANGAM', 'MEANJZH', 'MEANGBT', 'MEANGBZ',
'MEANJZD', 'MEANGBH', 'MEANSHR', 'MEANALP'])
idx = 0
for i in range(len(df_values)):
line = df_values[i].tolist()
if line[0] == 'padding' or float(line[-5]) >= 3500 or float(line[-4]) >= 65536 \
or abs(float(line[-1]) - float(line[-2])) > 70:
continue
has_zero_record = False
# if one of the physical feature values is missing, then discard it.
for k in range(start_feature, start_feature + n_features):
if float(line[k]) == 0.0:
has_zero_record = True
break
if has_zero_record:
continue
if prob[idx] >= thresh:
line.insert(0, 'P')
else:
line.insert(0, 'N')
idx += 1
w.writerow(line)
def get_n_features_thresh(type, time_window):
n_features = 0
thresh = 0
if type == 'gru':
if time_window == 12:
n_features = 16
thresh = 0.45
elif time_window == 24:
n_features = 12
thresh = 0.4
elif time_window == 36:
n_features = 9
thresh = 0.45
elif time_window == 48:
n_features = 14
thresh = 0.45
elif time_window == 60:
n_features = 5
thresh = 0.5
elif type == 'lstm':
if time_window == 12:
n_features = 15
thresh = 0.4
elif time_window == 24:
n_features = 12
thresh = 0.45
elif time_window == 36:
n_features = 8
thresh = 0.45
elif time_window == 48:
n_features = 15
thresh = 0.45
elif time_window == 60:
n_features = 6
thresh = 0.5
return n_features, thresh
if __name__ == '__main__':
type = sys.argv[1]
time_window = int(sys.argv[2])
train_again = int(sys.argv[3])
train_data_file = './normalized_training_' + str(time_window) + '.csv'
test_data_file = './normalized_testing_' + str(time_window) + '.csv'
result_file = './' + type + '-' + str(time_window) + '-output.csv'
model_file = './' + type + '-' + str(time_window) + '-model.h5'
start_feature = 4
n_features, thresh = get_n_features_thresh(type, time_window)
mask_value = 0
series_len = 20
epochs = 20
batch_size = 256
nclass = 2
if train_again == 1:
# Train
print('loading training data...')
X_train, y_train, nb_train = load_data(datafile=train_data_file,
series_len=series_len,
start_feature=start_feature,
n_features=n_features,
mask_value=mask_value,
type=type,
time_window=time_window)
class_weights = class_weight.compute_class_weight('balanced', classes=np.unique(y_train), y=y_train)
class_weight_ = {0: class_weights[0], 1: class_weights[1]}
print('done loading training data...')
if type == 'gru':
model = gru(n_features, series_len)
elif type == 'lstm':
model = lstm(n_features, series_len)
print('training the model, wait until it is finished...')
model.compile(loss='binary_crossentropy',
optimizer='RMSprop',
metrics=['accuracy'])
history = model.fit(X_train,
y_train,
epochs=epochs,
batch_size=batch_size,
verbose=False,
shuffle=True,
class_weight=class_weight_)
print('finished...')
model.save(model_file)
else:
print('loading model...')
model = load_model(model_file)
print('done loading...')
# Test
print('loading testing data')
X_test, y_test, nb_test = load_data(datafile=test_data_file,
series_len=series_len,
start_feature=start_feature,
n_features=n_features,
mask_value=mask_value,
type=type,
time_window=time_window)
print('done loading testing data...')
print('predicting testing data...')
prob = model.predict(X_test,
batch_size=batch_size,
verbose=False,
steps=None)
print('done predicting...')
print('writing prediction results into file...')
output_result(test_data_file=test_data_file,
result_file=result_file,
type=type,
time_window=time_window,
start_feature=start_feature,
n_features=n_features,
thresh=thresh)
print('done...')
<file_sep>/Collection of CMEs/README.txt
Download the database of 129 M- and X-class flares that are associated with CMEs and 610 M- and X-class flares that are not associated with CMEs described in Section 2 of the paper.<file_sep>/requirements.txt
keras==2.11.0
matplotlib==3.5.2
numpy==1.21.6
pandas==1.4.4
scikit-learn==1.0.2
scipy==1.9.1
tensorboard==2.11.2
tensorflow==2.11.0
protobuf==3.19.6
<file_sep>/CMEpredict/ReadMe.txt
In this zip, there are csv files named normalized_training_x, which contain training data samples (after normalization) used for predicting CMEs within the next x hours (x = 12, 24, 36, 48 or 60). In addition, there are csv files named normalized_testing_x, which contain testing data samples (after normalization) within the next x hours (x = 12, 24, 36, 48 or 60) of active region #12497 and #12529.
Each file has 22 columns.
The first column is titled Label. This column has 3 values: padding, N, and P. Padding means this is an auxiliary data sample used to construct time series for prediction. N means there is a >=M class flare within the next x hours but the flare is not associated with a CME. P means there is a >=M class flare within the next x hours and this flare is associated with a CME.
The second column is titled Timestamp. The third column and fourth column are titled NOAA active region number and HARP number, respectively. Starting from the fifth column, you can see physical parameters of data samples, which include 18 SHARP parameters: TOTUSJH, TOTPOT, TOTUSJZ, ABSNJZH, SAVNCPP, USFLUX, AREA_ACR, MEANPOT, R_VALUE, SHRGT45, MEANGAM, MEANJZH, MEANGBT, MEANGBZ, MEANJZD, MEANGBH, MEANSHR, MEANALP.
This zip also contains the source code of our program, called CMEpredict.py, which is used to predict labels of testing data samples.
The usage is given as follows:
python3 CMEpredict.py gru 12 0
The first argument "CMEpredict.py" is the Python program file name.
The second argument "gru" denotes that the program will make predictions using GRU.
An alternative option is "lstm" which uses LSTM. The usage is given as follows:
python3 CMEpredict.py lstm 12 0
The third argument "12" denotes that the program will predict CMEs within the next 12 hours. Other options are 24, 36, 48 or 60 hours.
The fourth argument "0" denotes that the program will load and use the pre-trained model, named gru-x-model.h5 or lstm-x-model.h5. If one would like to re-train the model, change "0" to "1".
The output obtained by executing the above command is stored in the file named gru-x-output.csv or lstm-x-output.csv in the zip. This output file is the same as the normalized_testing_x file except that it has one additional column (the first column) titled "Predicted Label," which contains labels predicted by our program. The value "padding" is removed from the output file.
Our program is run on Python 3.9.7, Keras 2.8.0, and TensorFlow 2.8.0.
| 570e20572bf5039178950eca512518da8a057ef7 | [
"Markdown",
"Python",
"Text"
] | 6 | Markdown | ccsc-tools/RNN-CME-prediction | 04be43f1ecb8fb4aea6e0cc8f0b1eb798aed2e2a | 21726e486bda6a90af48cd8fa667ce6ea9a13a2f |
refs/heads/master | <file_sep>package com.easy.xmltest;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler;
import java.util.ArrayList;
/**
* @Desc
* @Author lvyang
* @Date 2020/5/26
*/
public class MyXMLHandler extends DefaultHandler {
private Book currBook;
private String tagName;
public ArrayList<Book> getBooks() {
return books;
}
private ArrayList<Book> books;
/**
* @description 接收文档开始的通知。
*/
@Override
public void startDocument() throws SAXException {
super.startDocument();
books = new ArrayList<Book>();
}
/**
* @param uri 元素的命名空间
* @param localName 元素的本地名称(不带前缀)
* @param qName 元素的限定名(带前缀)
* @param attributes 元素的属性集合
* @description 接收文档的开始的通知。
*/
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException {
super.startElement(uri, localName, qName, attributes);
if (localName.equals("book")) {
currBook = new Book();
currBook.setId(attributes.getValue("id"));
}
this.tagName = localName;
}
/**
* @param ch
* @param start
* @param length
* @description 接收字符数据的通知。
* 在DOM中 ch[start:end] 相当于Text节点的节点值(nodeValue)
*/
@Override
public void characters(char[] ch, int start, int length) throws SAXException {
super.characters(ch, start, length);
if (tagName != null) {
String data = new String(ch, start, length);
if ("name".equals(tagName)) {
this.currBook.setName(data);
} else if ("author".equals(tagName)) {
this.currBook.setAuthor(data);
}
}
}
/**
* @param uri 元素的命名空间
* @param localName :元素的本地名称(不带前缀)
* @param qName 元素的限定名(带前缀)
* @description 接收文档的结尾的通知。
*/
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
super.endElement(uri, localName, qName);
if ("book".equals(localName)) {
books.add(currBook);
currBook = null;
}
this.tagName = null;
}
/**
* @description 接收文档的结尾的通知。
*/
@Override
public void endDocument() throws SAXException {
super.endDocument();
}
}
<file_sep>rootProject.name='XMLTest'
include ':app'
| 80e7cf301a6f4365bf36ab08396570e8af428af7 | [
"Java",
"Gradle"
] | 2 | Java | LVJYO/TestDemo | 9c055c67785f2075b60250d03cbb8f7504379b4e | 899173ef391b1a75c1916161053db51b45fb7b00 |
refs/heads/master | <file_sep>#! /usr/bin/python2.7
import SCPI
import time
import sys
from eng import EngNumber
class PA_control():
def help(self):
print "usage:"
print "pp\t\tusage"
print "pp 2\t\tpower cycle ch 2"
print "pp 1off\t\tch 1 off"
print "pp 1on\t\tch 1 on"
print "pp 1v3.3\tch 1 voltage to 3.3V"
print "pp 1c\t\taverage current ch 1"
print "pp 1cc\t\tcontinuous average current ch 1"
def connect(self):
self.s = SCPI.SCPI('pa05')
def turn_on(self):
self.s.setOutput(int(self.ch),1)
def turn_off(self):
self.s.setOutput(int(self.ch),0)
def set_voltage(self, voltage):
self.s.setVoltage(int(self.ch),float(voltage))
def main(self, argv):
if len(argv) <= 1:
self.help()
return -1
self.connect()
cmd = argv[1][1:]
try:
self.ch = int(argv[1][0])
except:
self.help()
return -1
if cmd == '':
self.turn_off()
time.sleep(0.5)
self.turn_on()
elif cmd == 'on':
self.turn_on()
elif cmd == 'off':
self.turn_off()
elif cmd[0] == 'v':
if float(cmd[1:]) > 3.6:
print "voltage too high"
return -1
self.set_voltage(cmd[1:])
elif cmd[0] == 'c':
try:
if cmd[1] == 'c':
while(True):
ret = self.s.getAvgCurrent(self.ch,195312*2,195312)
print str(EngNumber(ret)) + "A"
except:
ret = self.s.getAvgCurrent(self.ch,195312*2,195312)
print str(EngNumber(ret)) + "A"
if __name__ == '__main__':
pa = PA_control()
pa.main(sys.argv)
<file_sep>import socket
import time
import struct
# Based on mclib by <NAME> (http://github.com/tschmid/mclib)
class SCPI:
PORT = 5025
def __init__(self, host, port=PORT):
self.host = host
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.s.connect((host, port))
def reset(self):
# reset and clear device
self.s.send("*RST\n")
self.s.send("*CLS\n")
def setVoltage(self, channel, voltage):
#set output voltage
self.s.send("VOLTage %.2f,(@%d)\n"%(voltage,channel,))
def setCurrent(self, channel, voltage):
#set current
self.s.send("CURR %.2f,(@%d)\n"%(voltage,channel,))
def setOutput(self, channel, status):
if status:
#enable the output
self.s.send("OUTPut ON,(@" + str(channel) + ")\n")
else:
self.s.send("OUTPut OFF,(@" + str(channel) + ")\n")
def startCurrentMeasurement(self, channel, samples, res):
self.s.send("FORM REAL\n")
self.s.send("FORM:BORD NORM \n")
self.s.send("SENS:SWE:TINT " + str(1/float(res)) + ",(@" + str(channel) + ")\n")
self.s.send("SENS:SWE:POIN " + str(samples) + ",(@" + str(channel) + ")\n")
self.s.send("SENS:CURR:RANG:AUTO ON, (@" + str(channel) + ")\n")
self.s.send("SENS:CURR:CCOM OFF, (@" + str(channel) + ")\n")
self.s.send("MEAS:ARR:CURR? (@" + str(channel) + ")\n")
#self.s.send("FETC:ARR:CURR? (@" + str(channel) + ")\n")
def startPowerMeasurement(self, channel, samples, res):
self.s.send("SENS:SWE:TINT " + str(1/float(res)) + ",(@" + str(channel) + ")\n")
self.s.send("SENS:SWE:POIN " + str(samples) + ",(@" + str(channel) + ")\n")
self.s.send("SENS:CURR:RANG:AUTO ON, (@" + str(channel) + ")\n")
self.s.send("SENS:CURR:CCOM OFF, (@" + str(channel) + ")\n")
self.s.send("MEAS:ARR:POW? (@" + str(channel) + ")\n")
def getAvgCurrent(self, channel, samples, res):
self.s.send("SENS:SWE:OFFS:POIN 0, (@" + str(channel) + ")\n")
self.s.send("SENS:SWE:TINT " + str(1/float(res)) + ",(@" + str(channel) + ")\n")
self.s.send("SENS:SWE:POIN " + str(samples) + ",(@" + str(channel) + ")\n")
self.s.send("SENS:CURR:RANG:AUTO ON, (@" + str(channel) + ")\n")
self.s.send("MEAS:CURR? (@" + str(channel) + ")\n")
self.s.settimeout(4)
return float(self.s.recv(1024))
def getAvgVoltage(self, channel, samples, res):
self.s.send("SENS:SWE:OFFS:POIN 0, (@" + str(channel) + ")\n")
self.s.send("SENS:SWE:TINT " + str(1/float(res)) + ",(@" + str(channel) + ")\n")
self.s.send("SENS:SWE:POIN " + str(samples) + ",(@" + str(channel) + ")\n")
self.s.send("SENS:VOLT:RANG:AUTO ON, (@" + str(channel) + ")\n")
self.s.send("MEAS:VOLT? (@" + str(channel) + ")\n")
self.s.settimeout(4)
return float(self.s.recv(1024))
def getCurrentMeasurements(self, channel, samples):
self.s.settimeout(1)
buf = []
n = 1024
num_digits = int(self.s.recv(2)[1])
num_samples = int(self.s.recv(num_digits))
while True:
try:
data = self.s.recv(n)
buf.append(data)
except socket.timeout:
break
records = "".join(buf)
buf2 = []
for i in range(0,num_samples,4):
buf2.append(struct.unpack('>f',records[i:i+4])[0])
'''
data = list()
for entry in records:
try:
data.append(float(entry.strip(' \r\n')))
except ValueError:
print "Error: ", entry
return data
'''
return buf2
def getCSV(self, filename):
self.s.settimeout(1)
a = self.s.send("MMEM:EXP:DLOG \"external:\data.csv\"\n")
buf = []
#a = self.s.send("DCL\n")
#print "AAA: ", a
while True:
try:
data = self.s.recv(1024)
buf.append(data)
except socket.timeout:
break
self.s.settimeout(None)
return buf
<file_sep>#! /bin/bash
LIST="list"
if [ "$1" == "$LIST" ]; then
tmux list-keys | grep send-keys
elif [ "$#" -ne 3 ]; then
echo usage: bindkey KEY PANE "COMMAND"
echo list: bindkey list
echo example: bindkey F1 top-right \"C-c \' ls -a\'\"
else
eval tmux bind-key -n $1 send-keys -t .$2 $3 ENTER
echo bind key $1 with \"$3\" in pane \"$2\"
fi
<file_sep>#!/bin/bash
if [ "$(id -u)" != "0" ]; then
echo "This script must be run as root" 1>&2
exit 1
fi
modprobe bluetooth-6lowpan
echo 1 > /proc/sys/net/ipv6/conf/all/forwarding
echo 1 > /sys/kernel/debug/bluetooth/6lowpan_enable
echo "connect "$1" 1" > /sys/kernel/debug/bluetooth/6lowpan_control &&
CONNECTED=false
for i in `seq 1 8` ; do
FOUND=`grep bt0 /proc/net/dev`
if [ -n "$FOUND" ] ; then
ifconfig bt0 add 2001:db8::1/64
/etc/init.d/radvd restart
CONNECTED=true
fi
if [ "$CONNECTED" = true ] ; then
break
else
printf "."
fi
/bin/sleep 0.5
done
if [ "$CONNECTED" = true ] ; then
echo "Connected"
echo `hcitool con`
else
echo "Could not connect"
fi
<file_sep>
#!/bin/bash
# This script opens 4 terminal windows.
i="0"
while [ $i -lt 100 ]
do
nrfjprog --recover -f nrf52
i=$[$i+1]
done
<file_sep>#!/usr/bin/python
import sys, getopt
import subprocess
import glob
import re
def main(argv):
hex_file = ''
#flash = "\ndevice nrf52\nS\nSWD\n4000\n"#sleep 1000\n"
flash = "\nconnect\nnrf52\nS\n4000\nr\n"#sleep 1000\n"
#ip = "\nip 192.168.200.204\n"
ip = "\nip 192.168.15.61\n"
tun = "\nip tunnel:683545400\n"
erase = False
flash_sd = False
addr_set = False
opts, args = getopt.getopt(argv,"hea:s:",["erase", "ram", "remote","tunnel","address="])
remote = False
tunnel = False
for opt,arg in opts:
if opt in ("--remote"):
remote = True
if opt in ("--tunnel"):
tunnel = True
sn = None
if not remote and not tunnel:
cmd = "echo -e '\nShowEmuList\nexit\n' | JLinkExe"
print cmd
p1 = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell='/bin/sh')
p1.wait()
p1_out = p1.communicate()
m_line = []
for line in p1_out[0].splitlines():
m = re.search("J-Link\[.\](.*)", line)
if m:
m_line.append(m.group(0))
if len(m_line) == 0:
print "No J-Link found"
sys.exit(2)
elif len(m_line) > 1:
print "Please select J-Link"
for l in m_line:
print l
sel = input("> ")
sn = re.search("Serial number: (.........)", m_line[sel])
sn = " -SelectEmuBySN " + sn.group(1)
try:
hex_file = args[0]
except:
hex_file = None
for opt,arg in opts:
if opt == '-h':
print 'usage: jlink_flash.py [-e (--erase) -a (--address) 0 filename.hex]'
sys.exit(2)
elif opt in ("--remote"):
flash = ip + flash
elif opt in ("--tunnel"):
flash = tun + flash
elif opt in ("-e", "--erase"):
flash = flash + "r\ng\nw4 4001e504 2\nw4 4001e50c 1\nr\ng\n"
erase = True
elif opt in ("--ram",):
flash = flash + "r\ng\nmem32 40000900 24\nr\n"
elif opt in ("-a", "--address"):
try:
addr = arg
addr_set = True
except:
addr = None
addr_set = False
elif opt in ("-s", "--softdevice"):
s2 = s3 = s4 = '*'
s1 = arg[:4]
if len(arg) > 4:
s2 = arg[4]
if len(arg) > 5:
s3 = arg[5]
if len(arg) > 6:
s4 = arg[6]
string = "/home/devzone/softdevice/" + s1 + "*nrf51*" + s2 + "*" + s3 + "*" + s4 + "*.hex"
files = glob.glob(string)
if len(files) == 0:
print "No softdevices maching the argument"
sys.exit(2)
elif len(files) > 1:
print "Several softdevices maching the argument"
for i in files:
print i
sys.exit(2)
else:
flash = flash + "loadbin " + files[0] + " 0\nsleep 100\nr\n"
flash_sd = True
if hex_file == None:
files = glob.glob("*.hex")
if len(files) == 0:
if not erase and not flash_sd:
print "No hex-files in current folder"
print 'usage: jlink_flash.py [-e (--erase) -a (--address) 0 filename.hex]'
sys.exit(2)
elif len(files) > 1:
if not erase and not flash_sd:
print "More than one hex-file in current folder"
print 'usage: jlink_flash.py [-e (--erase) -a (--address) 0 filename.hex]'
sys.exit(2)
else:
hex_file = files[0]
if hex_file != None:
if addr_set:
flash = flash + "loadbin " + hex_file + " 0x" + addr +"\nsleep 100\nr\n"
else:
flash = flash + "loadbin " + hex_file + " 0\nsleep 100\nr\n"
flash = flash + "r\ng\nexit\n"
cmd = "echo -e '" + flash + "' | JLinkExe" + (sn if sn else '')
print cmd
pcmd = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell='/bin/sh')
pcmd.wait()
pout = pcmd.communicate()
print pout[0]
if __name__ == "__main__":
main(sys.argv[1:])
<file_sep>from decimal import *
_suffix_lookup = {
'p': 'e-12',
'n': 'e-9',
'u': 'e-6',
'm': 'e-3',
'': 'e0',
'k': 'e3',
'M': 'e6',
'G': 'e9',
'T': 'e12'
}
_exponent_lookup_scaled = {
'-36': 'p',
'-33': 'n',
'-30': 'u',
'-27': 'm',
'-24': '',
'-21': 'k',
'-18': 'M',
'-15': 'G',
'-12': 'T'
}
class EngNumber:
"""
Used for easy manipulation of numbers which use engineering notation
"""
def __init__(self, value, precision=2):
"""
Initialize the class
:param value: string, integer, or float representing the numeric value of the number
:param precision: the precision past the decimal - default to 2
"""
self.precision = precision
if isinstance(value, str):
suffix_keys = [key for key in _suffix_lookup.keys() if key != '']
for suffix in suffix_keys:
if suffix in value:
value = value[:-1] + _suffix_lookup[suffix]
break
self.number = Decimal(value)
elif isinstance(value, int) or isinstance(value, float):
self.number = Decimal(str(value))
def __repr__(self):
"""
Returns the string representation
:return: a string representing the engineering number
"""
# since Decimal class only really converts number that are very small
# into engineering notation, then we will simply make all number a
# small number and take advantage of Decimal class
num_str = self.number * Decimal('10e-25')
num_str = num_str.to_eng_string().lower()
base, exponent = num_str.split('e')
base = str(round(Decimal(base), self.precision))
if '.00' in base:
base = base[:-3]
return base + _exponent_lookup_scaled[exponent]
<file_sep>alias ll="ls -lh --color --group-directories-first"
alias lla="ls -lah --color --group-directories-first"
alias ..="cd .."
alias lease="cat /var/lib/dhcp/dhcpd.leases"
alias mp="./manage.py"
alias rs="./manage.py runserver"
alias cel="celery worker -B -E -A"
alias ve=". env/bin/activate"
alias sai="sudo apt-get install"
alias sar="sudo apt-get remove"
alias greph="grep -r --include \*.h"
alias greppy="grep -r --include \*.py"
alias grepc="grep -r --include \*.c"
alias jl="JLinkExe"
alias jls="cat <(printf 'connect\n\nS\n\n') - | JLinkExe -selectemubysn 518002451"
alias ta="tmux attach -t"
alias logicex="sudo ./Logic -geometry 1900x1100+0+430"
alias debug="Ozone *.jdebug &"
alias disp10="export DISPLAY=localhost:10"
alias disp11="export DISPLAY=localhost:11"
alias disp12="export DISPLAY=localhost:12"
alias disp0="export DISPLAY=:0"
alias ...="cd ../../../"
alias ....="cd ../../../../"
alias .....="cd ../../../../../"
alias ......="cd ../../../../../../"
alias .......="cd ../../../../../../../"
alias ........="cd ../../../../../../../../"
alias mj="make -j"
alias rr="nrfjprog --reset"
| 293acfa0bd40d5cdf240fd36dabb26ed59f147eb | [
"Python",
"Shell"
] | 8 | Python | stianrh/config | 99ff9af728d5e10be8b45920d947a6efe46b13da | 7026b058862c78d3069f5199391d1e16ba0a3276 |
refs/heads/master | <file_sep>require 'sinatra'
require 'json'
require 'user'
def authenticate!
# 验证用户是否有权限
p request.params
logger.info "权限验证"
end
class App < Sinatra::Application
before do
p request.accept
authenticate!
end
get '/' do
err = false
# do some thing
# p User[1].values
halt 500, '错误信息!' if err
User[1].values.to_json
# {name: "张三", age: 28}.to_json
end
post '/api/add' do
body = eval(request.body.read)
p body
# do some thing
# User.add
{message: "add success"}.to_json
end
# curl -l -H "Content-type: application/json" -X POST -d '{"phone":"13521389587","password":"<PASSWORD>"}' http://localhost:9293/api/movies/add
end
<file_sep># Ruby-RESTful-API
## 基于 sinatra 的简单 ruby restful api
用到以下第三方组件:
* sinatra [使用手册](http://sinatrarb.com/intro-zh.html)
* sequel: 数据库ORM框架 [使用手册](http://sequel.jeremyevans.net/rdoc/files/README_rdoc.html)
* mysql2
* redis
## 目录结构
```
├── config
│ ├── application.yml # 各种配置数据
│ └── setup.rb # 初始化程序
├── config.ru # 启动脚本
├── controllers # 业务代码
│ ├── application_controller.rb
│ └── example_controller.rb # 业务模块
├── Gemfile
├── Gemfile.lock
├── logs # 日志文件夹
│ ├── db.log
│ └── service.log
├── main.rb # 路由已经业务逻辑
├── models
│ └── user.rb # 模型文件夹(数据库表对应模型)
└── README.md
```
``` ruby
# config.ru
# 配置业务模块访问前缀
map('/example'){ run ExampleController }
map('/'){ run ApplicationController }
```
## 启动说明
``` shell
$ bundle install
# * 启动命令
# -p :指定端口
# -w : 指定启动线程数
# -e : 运行环境(默认development)production test
# -d :守护进程模式
$ puma -p 9293 -w 4 -e test -d
# 测试请求
$ curl -l -H "Content-type: application/json" -X POST -d '{"phone":"13521389587","password":"<PASSWORD>"}' http://localhost:9293/example/add
```
<file_sep>class User < Sequel::Model(DB[:users])
end<file_sep>
require 'application_controller'
require 'user'
# 前缀 /example
class ExampleController < ApplicationController
helpers ExampleHelper
# 完整的 url 是 /example/all
get '/all' do
p '------ get /example/all ----'
# User[1].values.to_json
p getUser(1) # helpe 中的方法
{ isSuccess: true, message: "操作成功" }.to_json
end
# 完整的 url 是 /example/add
post '/add' do
p '======== post /example/add ==================='
body = eval(request.body.read)
p body
p body[:phone]
send_queue_message('add_queue', body)
{message: "add success"}.to_json
end
end<file_sep>module ExampleHelper
def getUser(id)
User[1].values.to_json
end
end
<file_sep>
require 'json'
require 'blank'
# 公共 Controller
class ApplicationController < Sinatra::Application
set :show_exceptions, :after_handler
def authenticate!
# 验证用户是否有权限
p request.params
logger.info "参数为空" if request.params.blank?
logger.info "权限验证"
end
# 前置方法
before do
p request.accept
authenticate!
end
get '/' do
p '------ get / ----'
{ isSuccess: true, message: "操作成功" }.to_json
end
# Work queues 工作队列模式发送消息
# queue_name: 队列名菜
# message: json类型数据
def send_queue_message(queue_name, message)
$rabbit.start
channel = $rabbit.create_channel
queue = channel.queue(queue_name, durable:true)
queue.publish(message, persistent: true)
logger.info "Sent message: '#{message}' to queue: #{queue_name}"
$rabbit.close
end
# 404异常返回
not_found do
{ message: "not_found" }
end
error do
p 'Sorry there was a nasty error - '
'Sorry there was a nasty error - ' + env['sinatra.error'].message
end
# 400到510异常返回
error 400..510 do
'Boom error - ' + env['sinatra.error'].message
end
end<file_sep>
source 'https://gems.ruby-china.com'
gem 'sinatra', :github => "sinatra/sinatra"
gem 'json'
gem 'redis'
# gem 'httplog'
gem 'settingslogic'
gem 'sequel'
gem 'mysql2'
gem 'puma'
gem 'bunny'<file_sep># require_relative './config/setup'
# require './main'
# run App
require_relative './config/setup'
require 'sinatra'
# pull in the helpers and controllers
Dir.glob('./{helpers,controllers}/*.rb').each { |file| require file }
map('/example'){ run ExampleController }
map('/'){ run ApplicationController }
| f1f917a82bc9f074516a1febe66fb2cf3d759f86 | [
"Markdown",
"Ruby"
] | 8 | Ruby | Richardxu2014/ruby-restful-api | 781046949270ee671cd7ce4d2c4e24e8656db19e | 5dd19e8518d9237491f7c98e01cbfcd0295870b6 |
refs/heads/master | <repo_name>JonathanTanudjaja/ANN<file_sep>/training-v0.1.1.cpp
#include <iostream>
#include <vector>
#include <fstream>
typedef std::vector<double> weights_vct;
typedef std::vector<weights_vct> route_vct;
class Connection
{
private:
double weight;
public:
Connection( double weight );
void setWeight ( double weight );
double getWeight ();
};
Connection::Connection( double weight )
{
this->weight = weight;
}
double Connection::getWeight()
{
return weight;
}
class Neuron
{
private:
std::vector<Connection> connections;
double state;
public:
Neuron( std::vector<double> weights );
void feedFoward();
double getState();
void getConnection();
};
Neuron::Neuron( std::vector<double> weights )
{
unsigned numConnection, connectionNum;
double weight;
numConnection = weights.size();
for( connectionNum = 0 ; connectionNum < numConnection ; connectionNum++ )
{
connections.push_back( Connection( weights[connectionNum] ) );
}
}
double Neuron::getState()
{
return state;
}
void Neuron::getConnection()
{
unsigned connectionNum, numConnections;
numConnections = connections.size();
for( connectionNum = 0 ; connectionNum < numConnections ; connectionNum++ )
{
std::cout << "Connection num-" << connectionNum+1 << " : " << connections[connectionNum].getWeight() << std::endl ;
}
}
class Layer
{
private:
std::vector<Neuron> neurons;
public:
Layer( std::vector<weights_vct> route );
void feedForward();
void getNeuronStatus();
};
void Layer::getNeuronStatus()
{
unsigned neuronNum, numNeurons;
numNeurons = neurons.size();
for( neuronNum=0 ; neuronNum < numNeurons ; neuronNum++ )
{
std::cout << "Neuron num-" << neuronNum+1 << std::endl;
std::cout << "State : " << neurons[neuronNum].getState() << std::endl ;
neurons[neuronNum].getConnection();
}
}
Layer::Layer( std::vector<weights_vct> route )
{
unsigned neuronNum, numNeuron;
numNeuron = route.size();
for ( neuronNum=0 ; neuronNum < numNeuron ; neuronNum++ )
{
neurons.push_back( Neuron( route[neuronNum] ) );
}
}
class Net
{
private:
std::vector<Layer> layers;
public:
Net( std::vector<route_vct> map );
void feedForward();
void display();
};
Net::Net( std::vector<route_vct> map )
{
unsigned layerNum, numLayers;
numLayers = map.size();
for( layerNum=0 ; layerNum<numLayers ; layerNum++ )
{
layers.push_back( Layer( map[layerNum] ) );
}
}
void Net::feedForward()
{
}
void Net::display()
{
unsigned numLayers, layerNum, numNeurons, neuronNum, numConnections, connectionNum;
numLayers = layers.size();
for( layerNum = 0 ; layerNum < numLayers ; layerNum++ )
{
std::cout << "Layer num-" << layerNum+1 << std::endl;
layers[layerNum].getNeuronStatus();
}
}
int main()
{
std::vector<route_vct> mem_map;
std::fstream fs;
unsigned layerNum, neuronNum, numLayers , numNeurons , nextNumNeurons, connectionNum;
double weight;
fs.open( "map" , std::ios::in );
fs >> numLayers;
for( layerNum=0 ; layerNum < numLayers ; layerNum++ )
{
route_vct temp;
mem_map.push_back(temp);
fs >> numNeurons;
fs >> nextNumNeurons;
for( neuronNum = 0 ; neuronNum < numNeurons ; neuronNum++ )
{
weights_vct temp;
mem_map.back().push_back(temp);
for( connectionNum = 0 ; connectionNum<nextNumNeurons ; connectionNum++ )
{
fs >> weight ;
mem_map.back().back().push_back(weight);
}
}
}
Net neuralNet (mem_map);
neuralNet.display();
return 0;
}
<file_sep>/README.md
# ANN-v0.1
first test learning ANN
use map to create neural network and save training data
*Change Log*
-v.0.1.1
- create a neural based on file map
| 3697ae6c4d82823a2ac758d8bb5ebb06fcc101bb | [
"Markdown",
"C++"
] | 2 | C++ | JonathanTanudjaja/ANN | f5c1323a214edb2a57489ff9b2c18da929120f89 | 44eaf411725431604e56f7e8cebd1b056c1679c1 |
refs/heads/main | <repo_name>abolfazlrastegar/laravel-sms<file_sep>/src/Providers/SmsServiceProvider.php
<?php
namespace Abolfazlrastegar\LaravelSms\Providers;
use Abolfazlrastegar\LaravelSms\Sms;
use Illuminate\Support\ServiceProvider;
class SmsServiceProvider extends ServiceProvider
{
public function register()
{
$this->app->bind('sms', function () {
return new Sms();
});
}
public function boot()
{
$this->publishes([__DIR__ . '/../config/sms.php' => config_path('sms.php')], 'config');
}
}
<file_sep>/src/Drivers/Message.php
<?php
namespace Abolfazlrastegar\LaravelSms\Drivers;
interface Message
{
public function sendMessages ($mobile, $message, $params);
public function sendMessageGroup($mobile = array(), $message = array(), $params = array());
public function sendVerifyCode($mobile, $template, $params);
}
<file_sep>/src/Exception/createMessageErrorException.php
<?php
namespace Abolfazlrastegar\LaravelSms\Exception;
use Exception;
class createMessageErrorException extends Exception
{
public static function notClass ($sms, $namespace)
{
return new static('class ' .$sms . ' is not found ' . $namespace);
}
}
<file_sep>/src/Drivers/Smsir.php
<?php
namespace Abolfazlrastegar\LaravelSms\Drivers;
use Illuminate\Support\Facades\Http;
class Smsir implements Message
{
/**
* Send message one user or users
* @param $mobile
* @param $message
* @param $params
* @return mixed
*/
public function sendMessages($mobile, $message, $params)
{
$data = [
"lineNumber" => $params['lineNumber'],
"messageText" => $message,
"mobiles" => $mobile,
"SendDateTime" => isset($params['SendDateTime']) ? $params['SendDateTime'] : null
];
$result = Http::withHeaders($this->setHeaders())->post('https://api.sms.ir/v1/send/bulk', (object) $data);
return json_decode($result->getBody()->getContents(), true);
}
/**
* Send message group users
* @param $mobile
* @param $message
* @param $params
* @return mixed
*/
public function sendMessageGroup($mobile = array(), $message = array(), $params = array())
{
$data = [
"lineNumber" => $params['lineNumber'],
"messageText" => $message,
"mobiles" => $mobile,
"SendDateTime" => isset($params['SendDateTime']) ? $params['SendDateTime'] : null
];
$result = Http::withHeaders($this->setHeaders())->post('https://api.sms.ir/v1/send/likeToLike', (object) $data);
return json_decode($result->getBody()->getContents(), true);
}
/**
* Send message verify code to user
* @param $mobile
* @param $template
* @param $params
* @return mixed
*
*/
public function sendVerifyCode($mobile, $template, $params)
{
$data = [
"mobile" => $mobile,
"templateId" => $template,
"parameters" => $params,
];
$result = Http::withHeaders($this->setHeaders())->post('https://api.sms.ir/v1/send/verify', (object) $data);
return json_decode($result->getBody()->getContents(), true);
}
/**
* set headers request http
* @return string[]
*/
private function setHeaders ()
{
return [
'Accept: text/plain',
'charset: utf-8',
'Content-Type: application/json',
'X-API-KEY:' . config('sms.drivers.Smsir.key')
];
}
}
<file_sep>/src/Sms.php
<?php
namespace Abolfazlrastegar\LaravelSms;
use Abolfazlrastegar\LaravelSms\Exception\createMessageErrorException;
use phpDocumentor\Reflection\Types\This;
class Sms
{
/**
* @var mixed|string
*/
private $name_sms;
/**
* @var string | array
*/
private $mobile;
/**
* @var string
*/
private $template;
/**
* @var string | array
*/
private $message;
/**
* @var string | array
*/
private $params = null;
public function __construct($name_sms = '')
{
$this->name_sms = $name_sms;
}
/**
* @param $name_sms
* @return static
*/
public static function make ($name_sms = '')
{
return new static($name_sms);
}
/**
* @param $mobile
* @return $this
*/
public function mobile ($mobile)
{
$this->mobile = $mobile;
return $this;
}
/**
* @param $template
* @return $this
*/
public function template ($template)
{
$this->template = $template;
return $this;
}
/**
* @param $message
* @return $this
*/
public function message ($message)
{
$this->message = $message;
return $this;
}
/**
* @param $params
* @return $this
*/
public function params ($params)
{
$this->params = $params;
return $this;
}
/**
* @return $this
*/
public function defaultSms ()
{
$this->name_sms = config('sms.default');
return $this;
}
/**
* @return mixed
* @throws createMessageErrorException
*/
public function sendVerifyCode ()
{
$sms = $this->makeSms();
return $sms->sendVerifyCode($this->mobile, $this->template, $this->params);
}
/**
* @return mixed
* @throws createMessageErrorException
*/
public function sendMessages ()
{
$sms = $this->makeSms();
return $sms->sendMessages($this->mobile, $this->message, $this->params);
}
/**
* @return mixed
* @throws createMessageErrorException
*
*/
public function sendMessageGroup ()
{
$sms = $this->makeSms();
return $sms->sendMessageGroup($this->mobile, $this->message, $this->params);
}
/**
* @return mixed
* @throws createMessageErrorException
*/
public function voiceCall ()
{
$sms = $this->makeSms();
return $sms->voiceCall($this->mobile, $this->message, $this->params);
}
/**
* @return mixed
* @throws createMessageErrorException
*/
private function makeSms ()
{
$sms_class = 'Abolfazlrastegar\LaravelSms\Drivers\\' . $this->name_sms;
if (class_exists($sms_class, true))
{
return new $sms_class;
}
throw createMessageErrorException::notClass($this->name_sms, 'Abolfazlrastegar\LaravelSms\Drivers');
}
}
<file_sep>/src/Drivers/Melipayamak.php
<?php
namespace Abolfazlrastegar\LaravelSms\Drivers;
use Illuminate\Support\Facades\Http;
class Melipayamak implements Message
{
public function sendMessages ($mobile, $message, $params)
{
//
}
public function sendMessageGroup($mobile = array(), $message = array(), $params = array())
{
// TODO: Implement groupSend() method.
}
public function sendVerifyCode($mobile, $template, $params)
{
// TODO: Implement sendVerifyCode() method.
}
}
<file_sep>/src/Drivers/Kavenegar.php
<?php
namespace Abolfazlrastegar\LaravelSms\Drivers;
use Illuminate\Support\Facades\Http;
class Kavenegar implements Message
{
/**
* Send message one user or users
* @param $mobile
* @param $message
* @param $params
* @return mixed
*/
public function sendMessages($mobile, $message, $params)
{
$receptor = $mobile;
if (is_array($mobile))
{
$receptor = implode(',', $mobile);
}
$date = isset($params['date']) ? $params['date'] : null;
$sender = isset($params['sender']) ? $params['sender'] : null;
$type = isset($params['type']) ? $params['type'] : null;
$localid = isset($params['localid']) ? $params['localid'] : null;
return $this->requestHttp('https://api.kavenegar.com/v1/'. $this->setKey() .'/sms/send.json?receptor=' . $receptor . '&sender='. $sender . '&date='. $date . '&type='. $type . '&localid=' . $localid . '&message=' . $message);
}
/**
* Send message group users
* @param $mobile
* @param $message
* @param $params
* @return mixed
*/
public function sendMessageGroup($mobile = array(), $message = array(), $params = array())
{
$date = isset($params['date']) ? $params['date'] : null;
$type = isset($params['type']) ? $params['type'] : null;
$localmessageids = isset($params['localid']) ? $params['localid'] : null;
return $this->requestHttp('https://api.kavenegar.com/v1/'. $this->setKey() .'/sms/sendarray.json?receptor=' . $mobile . '&sender='. $params['sender'] . '&date='. $date . '&type='. $type . '&localmessageids=' . $localmessageids . '&message=' . $message);
}
/**
* Send message verify code to user
* @param $mobile
* @param $template
* @param $params
* @return mixed
*/
public function sendVerifyCode($mobile, $template, $params)
{
$token = '&token=' . $params['token'];
if (count($params) > 1) {
if (count($params) > 2)
{
$token = '&token=' . $params['token'] . '&token2=' . $params['token2'] . '&token3=' . $params['token3'];
}else
{
$token = '&token=' . $params['token'] . '&token2=' . $params['token2'];
}
}
$type = isset($params['type']) ? $params['type'] : 'sms';
return $this->requestHttp('https://api.kavenegar.com/v1/' . $this->setKey() . '/verify/lookup.json?receptor=' . $mobile . $token . '&type=' . $type . '&template=' . $template);
}
/**
* Send message call for one user or users
* @param $mobile
* @param $message
* @param $params
* @return mixed
*/
public function voiceCall ($mobile, $message, $params) {
$receptor = $mobile;
if (is_array($mobile))
{
$receptor = implode(',', $mobile);
}
$date = isset($params['date']) ? $params['date'] : null;
$localid = isset($params['localid']) ? $params['localid'] : null;
return $this->requestHttp('https://api.kavenegar.com/v1/' . $this->setKey() .'/call/maketts.json?receptor=' . $receptor. '&date='. $date . '&localid='. $localid .'&message=' . $message);
}
/**
* @return string[]
*/
private function setHeaders ()
{
return [
'Accept: application/json',
'charset: utf-8',
'Content-Type: application/json',
];
}
/**
* @return \Illuminate\Config\Repository|\Illuminate\Contracts\Foundation\Application|mixed
*/
private function setKey ()
{
return config('sms.drivers.Kavenegar.key');
}
/**
* @param $url
* @return mixed
*
*/
private function requestHttp ($url)
{
$result = Http::withHeaders($this->setHeaders())->post($url);
return json_decode($result->getBody()->getContents(), true);
}
}
<file_sep>/README.md

<p align="center">
<a href="https://packagist.org/packages/abolfazlrastegar/laravel-sms"><img src="https://img.shields.io/packagist/dm/abolfazlrastegar/laravel-sms" alt="Total Downloads"></a>
<a href="https://packagist.org/packages/abolfazlrastegar/laravel-sms"><img src="https://img.shields.io/packagist/v/abolfazlrastegar/laravel-sms" alt="Latest Stable Version"></a>
<a href="https://packagist.org/packages/abolfazlrastegar/laravel-sms"><img src="https://img.shields.io/github/license/abolfazlrastegar/laravel-payments" alt="License"></a>
</p>
### Package Larave-sms
With this package, you can use the capabilities of the SMS system sms.ir and kavenegar
### Install package laravel-sms
```bash
composer require abolfazlrastegar/laravel-sms
```
### Publish config
```bash
php artisan vendor:publish --provider="Abolfazlrastegar\LaravelSms\Providers\SmsServiceProvider" --tag="config"
```
### Docs drivers
<a href="https://apidocs.sms.ir/bulksmsv2.html">sms.ir</a>
<a href="https://kavenegar.com/rest.html#call-maketts">kavenegar</a>
[//]: # (<a href="https://www.melipayamak.com/api/">milepayamak</a>)
### Use method `sendVerifyCode`
```bash
// this model SMS system kavenegar
Sms::make('kavenegar')
->mobile('09105805770')
->template('454545')
->params(['token' => <PASSWORD>])
->sendVerifyCode();
```
### or
```bash
Sms::make()
->defaultSms()
->mobile('09105805770')
->template('454545')
->params(['token' => '<PASSWORD>', 'token2' => '<PASSWORD>', 'token3' => '<PASSWORD>'])
->sendVerifyCode();
```
### Use method `sendMessages`
```bash
Sms::make('kavenegar')
->mobile(['09105805772', '09105805772', '09105805772'])
->message('set message for send')
->params([
'date' => 'اختیاری',
'sender' => 'اختیاری',
'type' => 'اختیاری',
'localid' => 'اختیاری'
])
->sendMessages();
```
### or
```bash
Sms::make()
->defaultSms()
->mobile(['09105805772', '09105805772', '09105805772'])
->message('set message for send')
->params([
'date' => 'اختیاری',
'sender' => 'اختیاری',
'type' => 'اختیاری',
'localid' => 'اختیاری'
])
->sendMessages();
```
### Use method `sendMessageGroup`
```bash
Sms::make('kavenegar')
->mobile(['09105805772', '09105805772', '09105805772'])
->message(['set message for send1', 'set message for send2', 'set message for send3'])
->params([
'sender' => ['5455557', '987565423', '6322154'],
'date' => 'اختیاری',
'type' => 'اختیاری',
'localmessageids' => 'اختیاری',
])
->sendMessageGroup();
```
### or
```bash
Sms::make()
->defaultSms()
->mobile(['09105805772', '09105805772', '09105805772'])
->message(['set message for send1', 'set message for send2', 'set message for send3'])
->params([
'sender' => ['5455557', '987565423', '6322154'],
'date' => 'اختیاری',
'type' => 'اختیاری',
'localmessageids' => 'اختیاری',
])
->sendMessageGroup();
```
#
### Use method `voiceCall`
this metode SMS system ['kavenegar'] support
```bash
Sms::make('kavenegar')
->message('set message for voice call')
->mobile(['09105805772', '09105805772', '09105805772'])
->voiceCall();
```
### or
```bash
Sms::make()
->defaultSms()
->message('set message for voice call')
->mobile(['09105805772', '09105805772', '09105805772'])
->voiceCall();
```
###
### Function Parameter
| Driver | Method | Parameter | Support |
|-------------|-------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------|
| kavenegar | params() | ['token' => 122254] // one parameter <br/>['token' => 122254, 'token2' => 54875, 'token3' => 54875] // multi parameter | Yes |
| Sms.ir | params() | ['name' => 'verify', 'value' => 45666] // send message code verify login <br/> [<br/>['name' => 'name_product', 'value' => 45666]<br/>['name' => 'price', 'value' => 5000]<br/>] // send message Factor buy product | Yes |
| Kavenegar | mobile() | '09105805770' // use for one user<br/> ['09105805770', '09105805770', '09105805770'] // Use for users <br/> | Yes |
| Sms.ir | mobile() | '09105805770' // use for one user<br/> ['09105805770', '09105805770', '09105805770'] // Use for users <br/> | Yes |
| Kavenegar | voiceCall() | | Yes |
| Sms.ir | voiceCall() | | No |
| Kavenegar | message() | 'set message for voice call'<br/> [<br/>'set message for send1',<br/> 'set message for send2',<br/> 'set message for send3'<br/>] | Yes |
| Sms.ir | message() | 'set message for user'<br/> [<br/>'set message for user1',<br/> 'set message for user2',<br/> 'set message for user3'<br/>] | Yes |
<file_sep>/tests/TestSms.php
<?php
namespace Tests;
use Abolfazlrastegar\LaravelSms\Sms;
use PHPUnit\Framework\TestCase;
class TestSms extends TestCase
{
/** @test */
public function testSendVerifyCode ()
{
$result = Sms::make('Kavenegar')
->mobile('09105805777')
->template(548762)
->params(['token' => 122254])
->sendVerifyCode();
$this->assertEquals($result, $result);
}
/** @test */
public function testSendMessageGroup ()
{
$result = Sms::make('Kavenegar')
->mobile(['09105805772', '09105805772', '09105805772'])
->message(['set message for send1', 'set message for send2', 'set message for send3'])
->params([
'sender' => ['5455557', '987565423', '6322154'],
'date' => 'اختیاری',
'type' => 'اختیاری',
'localmessageids' => 'اختیاری',
])
->sendMessageGroup();
$this->assertEquals($result, $result);
}
/** @test */
public function testSendMessages ()
{
$result = Sms::make('Kavenegar')
->mobile(['09105805772', '09105805772', '09105805772'])
->message('set message for send')
->params([
'sender' => '6322154',
'date' => 'اختیاری',
'type' => 'اختیاری',
'localmessageids' => 'اختیاری',
])
->sendMessages();
$this->assertEquals($result, $result);
}
/** @test */
public function testVoiceCall ()
{
$result = Sms::make('Kavenegar')
->message('سلام')
->mobile('09105805772')
->voiceCall();
$this->assertEquals($result, $result);
}
}
| 63dacf3f9f52610ab65444160cbe5ef191a0a9fa | [
"Markdown",
"PHP"
] | 9 | PHP | abolfazlrastegar/laravel-sms | 1eaacf9d163c61be6c5f7fc284a3236d882b8c87 | de6e55b1b0b14c45d082f9be4a362ed86676bb3b |
refs/heads/master | <repo_name>davidferland/ScreenScraper<file_sep>/Program.cs
using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Net;
using System.Runtime.Serialization.Json;
using System.Threading.Tasks;
using HtmlAgilityPack;
using System.Linq;
using System.IO;
using ScreenScraper.Model;
namespace screenScraper
{
class Program
{
static void Main(string[] args)
{
MainAsync(args).ConfigureAwait(false).GetAwaiter().GetResult();
}
async static Task MainAsync(string[] args)
{
var collectionPage = new string("https://www.onthewateroutfitters.com/collection/page{0}.html");
int collectionPageCount = 1;
HttpClient client = new HttpClient();
SortedList<string, string> productList = new SortedList<string, string>();
for (int pageNumber = 1; pageNumber <= collectionPageCount; pageNumber++)
{
var response = await client.GetAsync(string.Format(collectionPage, pageNumber));
var pageContents = await response.Content.ReadAsStringAsync();
HtmlDocument pageDocument = new HtmlDocument();
pageDocument.LoadHtml(pageContents);
var productlinks = pageDocument.DocumentNode.SelectNodes("//a")
.Where(url => url.InnerHtml.Contains("title"))
.ToList();
foreach (var link in productlinks)
{
if (!productList.ContainsKey(link.Attributes["title"].Value))
{
productList.Add(link.Attributes["title"].Value,link.Attributes["href"].Value);
}
}
}
List<Product> products = new List<Product>();
foreach (var productPage in productList)
{
var response = await client.GetAsync(productPage.Value);
var pageContents = await response.Content.ReadAsStringAsync();
HtmlDocument pageDocument = new HtmlDocument();
pageDocument.LoadHtml(pageContents);
var product = new Product();
product.Title = pageDocument.DocumentNode.SelectNodes("//h1")
.First()
.InnerText;
// page info active, get the HTML describing the product
if (pageDocument.DocumentNode.SelectNodes("//div[@class='page info active']/ul") != null)
{
product.Description = pageDocument.DocumentNode.SelectNodes("//div[@class='page info active']/ul")
.First()
.InnerHtml;
}
// Images
var productImages = pageDocument.DocumentNode.SelectNodes("//div[@class='images']/a")
.ToList();
product.Images = new List<Image>();
//Console.WriteLine("Title: " + product.Title);
//Console.WriteLine("Description: " + product.Description);
foreach (var img in productImages)
{
Console.WriteLine("=======================================================");
Console.WriteLine(img.InnerHtml);
var firstImage = img.SelectNodes("//img").First();
var image = new Image();
image.ImageId = img.Attributes["data-image-id"].Value;
image.Title = firstImage.Attributes["alt"].Value;
image.Src = firstImage.Attributes["src"].Value; ;
image.Alt = firstImage.Attributes["alt"].Value; ;
product.Images.Add(image);
Console.WriteLine("id: "+image.ImageId);
Console.WriteLine("title: "+image.Title);
Console.WriteLine("alt: "+image.Alt);
}
// <label for="product_configure_option_color"
// Colors
// <label for="product_configure_option_size"
// Sizes
// Categories
}
Console.ReadLine();
}
private static void DownloadImage(string folderImagesPath, Uri url, WebClient webClient)
{
try
{
webClient.DownloadFile(url, Path.Combine(folderImagesPath, Path.GetFileName(url.ToString())));
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
}
}
}
}
<file_sep>/Model/Product.cs
namespace ScreenScraper.Model
{
public class Product
{
public long Id { get; set; }
public string Title { get; set; }
public string Description { get; set; }
public System.Collections.Generic.List<string> Colors { get; set; }
public System.Collections.Generic.List<string> Size { get; set; }
public System.Collections.Generic.List<string> Categories { get; set; }
public System.Collections.Generic.List<Image> Images { get; set; }
public string Availability { get; set; }
}
} | 25512ebbc88c991f8378de9c51974c9c85ea589c | [
"C#"
] | 2 | C# | davidferland/ScreenScraper | f117d7698ab789e16266314c934adce869583b1c | 1d41d60063ede95d40907c9c9e52a5467d8ae6d3 |
refs/heads/master | <repo_name>YordanPetkov/TicTacToe-Game<file_sep>/Tic Tac Toe GAME/data/TicTacToeGamev2.0.cpp
#include<iostream>
#include<string>
#include<string.h>
#include<windows.h>
using namespace std;
string nickname[3],levelpl[3],rankpl[3];
char plt[3];
int cell[5][5],win[3],flag=1,flag1=1,x1,y1,xbot,ybot,players,quit=0;
void swapplayers()
{
string c;
int d;
c=nickname[1];
nickname[1]=nickname[2];
nickname[2]=c;
c=levelpl[1];
levelpl[1]=levelpl[2];
levelpl[2]=c;
c=rankpl[1];
rankpl[1]=rankpl[2];
rankpl[2]=c;
d=win[1];
win[1]=win[2];
win[2]=d;
}
void visualScore()
{
for(int i=1;i<=3;i++)
{
cout<<" |";
for(int j=1;j<=3;j++)
{
if(cell[i][j]==1){cout<<plt[1];}
if(cell[i][j]==2){cout<<plt[2];}
if(cell[i][j]==0){cout<<" ";}
cout<<"|";
}
cout<<endl;
cout<<" -------"<<endl;
}
}
void visualGame1pl(string Level , string Rank , string Nickname ,char symbol)
{
system("CLS");
cout<<" =============================================================================="<<endl;
cout<<" TicTacToe Game <TTT> v2.0 2016"<<endl;
cout<<" =============================================================================="<<endl;
cout<<" Have Fun !"<<endl;
cout<<" Available levels : 5 Available ranks : 5"<<endl;
cout<<"--------------------- -------------------"<<endl;
cout<<"Level 1: Easy Rank 1 : ~-Newbie-~"<<endl;
cout<<"Level 2: Normal Rank 2 : ~Amateur!~"<<endl;
cout<<"Level 3: Hard Rank 3 : ~Hardcore~"<<endl;
cout<<"Level 4: Expert Version : TTT Game v2.0 Rank 4 : Elite Pack"<<endl;
cout<<"Level 5: Professional Author : <NAME> Rank 5 : TTT Master"<<endl;
cout<<"--------------------- ------- -------------------"<<endl;
visualScore();
cout<<" Game Score "<<endl;
cout<<" G A M E -Level : "<<Level<<" V E R S I O N"<<endl;
cout<<" P R O G R A M M I N G -Rank : "<<Rank<<" 2 . 0"<<endl;
cout<<" B Y -Ninkname : "<<Nickname<<" "<<endl;
cout<<" -SYMBOL : "<<symbol<<endl;
cout<<" "<<endl;
cout<<" S h a d o w Include coordinates : ";
}
void space(int nick1,int nick2)
{
int n;
n=70-nick1-nick2;
for(int i=1;i<=n;i++)
cout<<" ";
}
void visualGame2pl(string Level , string Rank , string Nickname ,char symbol)
{
system("CLS");
cout<<" =============================================================================="<<endl;
cout<<" TicTacToe Game <TTT> v2.0 2016"<<endl;
cout<<" =============================================================================="<<endl;
cout<<" Have Fun !"<<endl;
cout<<" "<<nickname[1];
space(nickname[1].size(),nickname[2].size());
cout<<nickname[2]<<endl;
cout<<"--------------------- -------------------"<<endl;
cout<<" WINS : "<<win[1]<<" WINS : "<<win[2]<<endl;
cout<<" "<<endl;
cout<<" Rank : "<<rankpl[1]<<" Rank :"<<rankpl[2]<<endl;
cout<<" Version : TTT Game v2.0 "<<endl;
cout<<" SYMBOL : "<<plt[1]<<" Author : <NAME> SYMBOL : "<<plt[2]<<endl;
cout<<"--------------------- ------- -------------------"<<endl;
visualScore();
cout<<" Game Score "<<endl;
cout<<" G A M E -Level : "<<Level<<" V E R S I O N"<<endl;
cout<<" P R O G R A M M I N G -Rank : "<<Rank<<" 2 . 0"<<endl;
cout<<" B Y -Ninkname : "<<Nickname<<" "<<endl;
cout<<" -SYMBOL : "<<symbol<<endl;
cout<<" "<<endl;
cout<<" S h a d o w Include coordinates : ";
}
void cleargame()
{
for(int z1=1;z1<=3;z1++)
for(int z2=1;z2<=3;z2++)
{
cell[z1][z2]=0;
cell[1][1]=0;
}
}
int checkgame()
{
char c;
if(win[1]>4)return 1;
if(win[2]>4)return 2;
c=plt[1];
plt[1]=plt[2];
plt[2]=c;
if(win[1]==1){levelpl[1]="2 <Normal>";rankpl[1]="~Amateur!~";}
if(win[2]==1){levelpl[2]="2 <Normal>";rankpl[2]="~Amateur!~";}
if(win[1]==2){levelpl[1]="3 <Hard>";rankpl[1]="~Hardcore~";}
if(win[2]==2){levelpl[2]="3 <Hard>";rankpl[2]="~Hardcore~";}
if(win[1]==3){levelpl[1]="4 <Expert>";rankpl[1]="Elite Pack";}
if(win[2]==3){levelpl[2]="4 <Expert>";rankpl[2]="Elite Pack";}
if(win[1]==4){levelpl[1]="5 <Professional>";rankpl[1]="TTT Master";}
if(win[2]==4){levelpl[2]="5 <Professional>";rankpl[2]="TTT Master";}
return 0;
}
int checkgame1pl()
{
if(win[1]>4)return 1;
if(win[1]==1){levelpl[1]="2 <Normal>";rankpl[1]="~Amateur!~";}
if(win[1]==2){levelpl[1]="3 <Hard>";rankpl[1]="~Hardcore~";}
if(win[1]==3){levelpl[1]="4 <Expert>";rankpl[1]="Elite Pack";}
if(win[1]==4){levelpl[1]="5 <Professional>";rankpl[1]="TTT Master";}
return 0;
}
bool Win(int player)
{
if(cell[1][1]==player && cell[1][2]==player && cell[1][3]==player)return true;
if(cell[2][1]==player && cell[2][2]==player && cell[2][3]==player) return true;
if(cell[3][1]==player && cell[3][2]==player && cell[3][3]==player) return true;
if(cell[1][1]==player && cell[2][1]==player && cell[3][1]==player) return true;
if(cell[1][2]==player && cell[2][2]==player && cell[3][2]==player) return true;
if(cell[1][3]==player && cell[2][3]==player && cell[3][3]==player) return true;
if(cell[1][1]==player && cell[2][2]==player && cell[3][3]==player) return true;
if(cell[3][1]==player && cell[2][2]==player && cell[1][3]==player)return true;
return false;
}
void gameover1pl(int winner)
{
system("CLS");
if(winner==1)rankpl[1]="UNBELIVEBLE MASTER";
cout<<" =============================================================================="<<endl;
cout<<" TicTacToe Game <TTT> v2.0 2016"<<endl;
cout<<" =============================================================================="<<endl;
cout<<endl;
cout<<endl;
cout<<endl;
cout<<endl;
cout<<" GAME OVER"<<endl;
cout<<endl;
cout<<endl;
cout<<" GAME STATISTIC"<<endl;
cout<<" Nickname : "<<nickname[1]<<endl;
cout<<" Level :"<<levelpl[1]<<endl;
cout<<" Rank : "<<rankpl[1]<<endl;
cout<<endl;
cout<<endl;
cout<<endl;
cout<<" THE WINNER IS -----> "<<nickname[winner]<<endl;
cout<<endl;
cout<<" THANKS FOR PLAYING TicTacToe GAME v2.0 2016"<<endl;
cout<<" AUTHOR"<<endl;
cout<<" <NAME>"<<endl;
cout<<" Shadow"<<endl;
cout<<endl;cout<<endl;
cout<<endl;cout<<endl;
cout<<" To restart the game write : 'restart'"<<endl;
cout<<" To quit the game write : 'quit'"<<endl;
}
void gameover(int winner)
{
cout<<" =============================================================================="<<endl;
cout<<" TicTacToe Game <TTT> v2.0 2016"<<endl;
cout<<" =============================================================================="<<endl;
cout<<endl;
cout<<endl;
cout<<endl;
cout<<endl;
cout<<" GAME OVER"<<endl;
cout<<endl;
cout<<endl;
cout<<" GAME STATISTIC"<<endl;
cout<<" Nickname : "<<nickname[1]<<endl;
cout<<" Level :"<<levelpl[1]<<endl;
cout<<" Rank : "<<rankpl[1]<<endl;
cout<<endl;
cout<<" Nickname : "<<nickname[2]<<endl;
cout<<" Level :"<<levelpl[2]<<endl;
cout<<" Rank : "<<rankpl[2]<<endl;
cout<<endl;
cout<<endl;
cout<<" THE WINNER IS -----> "<<nickname[winner]<<endl;
cout<<endl;
cout<<" THANKS FOR PLAYING TicTacToe GAME v2.0 2016"<<endl;
cout<<" AUTHOR"<<endl;
cout<<" <NAME>"<<endl;
cout<<" Shadow"<<endl;
cout<<endl;cout<<endl;
cout<<endl;cout<<endl;
cout<<" To restart the game write : 'restart'"<<endl;
cout<<" To quit the game write : 'quit'"<<endl;
}
bool restart(int flag)
{
if(flag==1)
{
string restartg;
cout<<" ";
cin>>restartg;
if(restartg=="restart"){system("CLS");return 1;}
if(restartg=="quit"){return 0;}
restart(1);
}
if(flag==2)
{
cout<<endl;cout<<endl;
cout<<" Level completed , to continue write 'play' "<<endl;
cout<<" to quit write 'quit' "<<endl;
string restartg;
cout<<" ";
cin>>restartg;
if(restartg=="play"){system("CLS");return 1;}
if(restartg=="quit"){return 0;}
restart(2);
}
}
void gameofplayer1(int pl)
{
if(players==1)visualGame1pl(levelpl[pl],rankpl[pl],nickname[pl],plt[pl]);
if(players==2)visualGame2pl(levelpl[pl],rankpl[pl],nickname[pl],plt[pl]);
int x,y;
cin>>x>>y;
if(x<1 || y<1){cout<<"The coordinates must be bigger than 0"<<endl;gameofplayer1(pl);}
if(x>3 || y>3){cout<<"The coordinates must be smaller than 4"<<endl;gameofplayer1(pl);}
if(cell[x][y]==1){cout<<"There is X"<<endl;gameofplayer1(pl);}
if(cell[x][y]==2){cout<<"There is O"<<endl;gameofplayer1(pl);}
if(cell[x][y]==0)cell[x][y]=pl;
x1=x;
y1=y;
}
void twoplayers()
{
int check;
players=2;
string nick2;
system("CLS");
if(flag==1)
{
cout<<" W E L L C O M E T o T i c T a c T o e G A M E v 2 . 0 "<<endl;
cout<<" author : <NAME>"<<endl;cout<<endl;cout<<endl;cout<<endl;
cout<<" Player 1 write your Nickname : ";cin>>nickname[1];
cout<<" Player 2 write your Nickname : ";cin>>nick2;
nickname[2]=nick2;
}
int pl=1;
for(int turn=1;turn<=9;turn++)
{
if(turn%2==0)pl=2;
if(turn%2==1)pl=1;
visualGame2pl(levelpl[pl],rankpl[pl],nickname[pl],plt[pl]);
gameofplayer1(pl);
system("CLS");
if(pl==1)visualGame2pl(levelpl[2],rankpl[2],nickname[2],plt[2]);
if(pl==2)visualGame2pl(levelpl[1],rankpl[1],nickname[1],plt[1]);
if(Win(1)){cout<<endl;win[1]++;cout<<endl;cout<<" "<<nickname[1]<<" WIN"<<endl;if(!restart(2))quit=1;;break;}
if(Win(2)){cout<<endl;win[2]++;cout<<endl;cout<<" "<<nickname[2]<<" WIN"<<endl;if(!restart(2))quit=1;;break;}
}
if(quit==0)
{
check=checkgame();
if(check==1 || check==2)gameover(check);
else {flag=2;cleargame();swapplayers();twoplayers();}
}
}
int botEasy()
{
for(int i=1;i<=3;i++)
for(int j=1;j<=3;j++)
if(cell[i][j]==0){cell[i][j]=2;return 0;}
}
int botNormal(int t)
{
if(t==1)
{
if(x1!=2&&y1!=2)
{
xbot=4-x1;
ybot=4-y1;
cell[xbot][ybot]=2;
return 0;
}
else
{
if(x1!=2 || y1!=2)
{
if(x1==2)
{
ybot=4-y1;
xbot=2;
cell[xbot][ybot]=2;
return 0;
}
if(y1==2)
{
xbot=4-x1;
ybot=2;
cell[xbot][ybot]=2;
return 0;
}
}
if(x1==2 && y1==2)
{
xbot=1;
ybot=1;
cell[xbot][ybot]=2;
return 0;
}
}
}
if(t==2)
{
if(xbot+ybot%2==0)
{
if(cell[xbot][2]==0){cell[xbot][2]=2;return 0;}
if(cell[2][ybot]==0){cell[2][ybot]=2;return 0;}
}
else
{
if(xbot!=2)
{
if(cell[xbot][xbot]==0){cell[xbot][xbot]=2;return 0;}
if(cell[xbot][4-xbot]==0){cell[xbot][4-xbot]=2;return 0;}
}
else
{
if(cell[ybot][ybot]==0){cell[ybot][ybot]=2;return 0;}
if(cell[4-ybot][ybot]==0){cell[4-ybot][ybot]=2;return 0;}
}
}
}
if(t==3)
{
for(int i=1;i<=3;i++)
for(int j=1;j<=3;j++)
{
if(cell[i][j]==0)
{
cell[i][j]=2;
if(Win(2))return 0;
cell[i][j]=0;
}
}
for(int i=1;i<=3;i++)
for(int j=1;j<=3;j++)
{
if(cell[i][j]==0)
{
cell[i][j]=1;
if(Win(1)){cell[i][j]=2;return 0;}
cell[i][j]=0;
}
}
for(int i=1;i<=3;i++)
for(int j=1;j<=3;j++)
if(cell[i][j]==0){cell[i][j]=2;return 0;}
}
if(t==4)
{
for(int i=1;i<=3;i++)
for(int j=1;j<=3;j++)
if(cell[i][j]==0){cell[i][j]=2;return 0;}
}
}
int botHard(int t)
{
if(t==1)
{
if((x1+y1)%2==0)
{
if(cell[x1][2]==0){cell[x1][2]=2;return 0;}
if(cell[2][y1]==0){cell[2][y1]=2;return 0;}
}
else
{
if(xbot!=2)
{
if(cell[x1][x1]==0){cell[x1][x1]=2;return 0;}
if(cell[x1][4-xbot]==0){cell[x1][4-x1]=2;return 0;}
}
else
{
if(cell[y1][y1]==0){cell[y1][y1]=2;return 0;}
if(cell[4-y1][y1]==0){cell[4-y1][y1]=2;return 0;}
}
}
for(int i=1;i<=3;i++)
for(int j=1;j<=3;j++)
if(cell[i][j]==0){cell[i][j]=2;return 0;}
}
if(t==2)
{
if(xbot+ybot%2==0)
{
if(cell[xbot][2]==0){cell[xbot][2]=2;return 0;}
if(cell[2][ybot]==0){cell[2][ybot]=2;return 0;}
}
else
{
if(xbot!=2)
{
if(cell[xbot][xbot]==0){cell[xbot][xbot]=2;return 0;}
if(cell[xbot][4-xbot]==0){cell[xbot][4-xbot]=2;return 0;}
}
else
{
if(cell[ybot][ybot]==0){cell[ybot][ybot]=2;return 0;}
if(cell[4-ybot][ybot]==0){cell[4-ybot][ybot]=2;return 0;}
}
}
for(int i=1;i<=3;i++)
for(int j=1;j<=3;j++)
{
if(cell[i][j]==0)
{
cell[i][j]=1;
if(Win(1)){cell[i][j]=2;return 0;}
cell[i][j]=0;
}
}
}
if(t==3)
{
for(int i=1;i<=3;i++)
for(int j=1;j<=3;j++)
{
if(cell[i][j]==0)
{
cell[i][j]=1;
if(Win(1)){cell[i][j]=2;return 0;}
cell[i][j]=0;
}
}
for(int i=1;i<=3;i++)
for(int j=1;j<=3;j++)
if(cell[i][j]==0){cell[i][j]=2;return 0;}
}
if(t==4)
{
for(int i=1;i<=3;i++)
for(int j=1;j<=3;j++)
if(cell[i][j]==0){cell[i][j]=2;return 0;}
}
}
int botExpert(int t)
{
if(t==1)
{
if(x1!=2&&y1!=2)
{
xbot=4-x1;
ybot=4-y1;
cell[xbot][ybot]=2;
return 0;
}
else
{
if(x1!=2 || y1!=2)
{
if(x1==2)
{
ybot=4-y1;
xbot=2;
cell[xbot][ybot]=2;
return 0;
}
if(y1==2)
{
xbot=4-x1;
ybot=2;
cell[xbot][ybot]=2;
return 0;
}
}
if(x1==2 && y1==2)
{
xbot=1;
ybot=1;
cell[xbot][ybot]=2;
return 0;
}
}
}
if(t==2)
{
for(int i=1;i<=3;i++)
for(int j=1;j<=3;j++)
{
if(cell[i][j]==0)
{
cell[i][j]=2;
if(Win(2))return 0;
cell[i][j]=0;
}
}
for(int i=1;i<=3;i++)
for(int j=1;j<=3;j++)
{
if(cell[i][j]==0)
{
cell[i][j]=1;
if(Win(1)){cell[i][j]=2;return 0;}
cell[i][j]=0;
}
}
if(xbot+ybot%2==0)
{
if(cell[xbot][2]==0){cell[xbot][2]=2;return 0;}
if(cell[2][ybot]==0){cell[2][ybot]=2;return 0;}
}
else
{
if(xbot!=2)
{
if(cell[xbot][xbot]==0){cell[xbot][xbot]=2;return 0;}
if(cell[xbot][4-xbot]==0){cell[xbot][4-xbot]=2;return 0;}
}
else
{
if(cell[ybot][ybot]==0){cell[ybot][ybot]=2;return 0;}
if(cell[4-ybot][ybot]==0){cell[4-ybot][ybot]=2;return 0;}
}
}
}
if(t==3)
{
for(int i=1;i<=3;i++)
for(int j=1;j<=3;j++)
{
if(cell[i][j]==0)
{
cell[i][j]=2;
if(Win(2))return 0;
cell[i][j]=0;
}
}
for(int i=1;i<=3;i++)
for(int j=1;j<=3;j++)
{
if(cell[i][j]==0)
{
cell[i][j]=1;
if(Win(1)){cell[i][j]=2;return 0;}
cell[i][j]=0;
}
}
for(int i=1;i<=3;i++)
for(int j=1;j<=3;j++)
if(cell[i][j]==0){cell[i][j]=2;return 0;}
}
if(t==4)
{
for(int i=1;i<=3;i++)
for(int j=1;j<=3;j++)
{
if(cell[i][j]==0)
{
cell[i][j]=2;
if(Win(2))return 0;
cell[i][j]=0;
}
}
for(int i=1;i<=3;i++)
for(int j=1;j<=3;j++)
{
if(cell[i][j]==0)
{
cell[i][j]=1;
if(Win(1)){cell[i][j]=2;return 0;}
cell[i][j]=0;
}
}
for(int i=1;i<=3;i++)
for(int j=1;j<=3;j++)
if(cell[i][j]==0){cell[i][j]=2;return 0;}
}
}
int protip,x11,y11;
int botProfessional (int t)
{
if(t==1)
{
x11=x1;
y11=y1;
if((x1+y1)%2==0 && x1!=2 && y1!=2)
{
protip=1;
xbot=2;
ybot=2;
cell[2][2]=2;
}
else
if(x1==2 && y1==2)
{
protip=2;
xbot=1;
ybot=1;
cell[1][1]=2;
}
else
{
protip=3;
xbot=2;
ybot=2;
cell[2][2]=2;
}
}
if(t==2)
{
if(protip==1)
{
for(int i=1;i<=3;i++)
for(int j=1;j<=3;j++)
{
if(cell[i][j]==0)
{
cell[i][j]=1;
if(Win(1)){cell[i][j]=2;return 0;}
cell[i][j]=0;
}
}
if((x1+y1+x11+y11)%2!=0){cell[4-x11][4-y11]=2;return 0;}
else cell[2][1]=2;return 0;
}
if(protip==2)
{
for(int i=1;i<=3;i++)
for(int j=1;j<=3;j++)
{
if(cell[i][j]==0)
{
cell[i][j]=1;
if(Win(1)){cell[i][j]=2;return 0;}
cell[i][j]=0;
}
}
cell[4-xbot][ybot]=2;
return 0;
}
if(protip==3)
{
for(int i=1;i<=3;i++)
for(int j=1;j<=3;j++)
{
if(cell[i][j]==0)
{
cell[i][j]=1;
if(Win(1)){cell[i][j]=2;return 0;}
cell[i][j]=0;
}
}
if((x1+y1)%2!=0&&(x11+y11)%2!=0)
{
if(y1!=y11&&x1!=x11)
{
if(cell[x1][y11]==0){cell[x1][y11]=2;return 0;}
if(cell[x11][y1]==0){cell[x11][y1]=2;return 0;}
}
else
{
cell[y1][x1]=2;
return 0;
}
}
else
{
if(x11==2){cell[x1][y11]=2;return 0;}
if(y11==2){cell[x11][y1]=2;return 0;}
}
}
}
if(t==3)
{
for(int i=1;i<=3;i++)
for(int j=1;j<=3;j++)
{
if(cell[i][j]==0)
{
cell[i][j]=2;
if(Win(2))return 0;
cell[i][j]=0;
}
}
for(int i=1;i<=3;i++)
for(int j=1;j<=3;j++)
{
if(cell[i][j]==0)
{
cell[i][j]=1;
if(Win(1)){cell[i][j]=2;return 0;}
cell[i][j]=0;
}
}
if(protip==1)
{
for(int i=1;i<=3;i++)
for(int j=1;j<=3;j++)
{
if((i+j)%2!=0 && cell[i][j]==2 ){cell[4-x11][4-y11]=2;return 0;}
}
if(x1==1&&y1==2){if(cell[3][1]==0){cell[3][1]=2;return 0;}if(cell[3][3]==0){cell[3][3]=2;return 0;}}
if(x1==2&&y1==1){if(cell[1][3]==0){cell[1][3]=2;return 0;}if(cell[3][3]==0){cell[3][3]=2;return 0;}}
if(x1==2&&y1==3){if(cell[1][1]==0){cell[1][1]=2;return 0;}if(cell[3][1]==0){cell[3][1]=2;return 0;}}
if(x1==3&&y1==2){if(cell[1][1]==0){cell[1][1]=2;return 0;}if(cell[1][3]==0){cell[1][3]=2;return 0;}}
}
if(protip==2)
{
cell[3][1]=2;
return 0;
}
if(protip==3)
{
if(cell[1][1]==0 && cell[1][3]==0 && cell[3][1]==0 && cell[3][3]==0)
{
if(cell[1][2]==2 || cell[2][1]==2){cell[1][1]=2;return 0;}
if(cell[2][3]==2 || cell[3][2]==2){cell[3][3]=2;return 0;}
}
if(cell[1][1]==0){cell[1][1]=2;return 0;}
if(cell[1][3]==0){cell[1][3]=2;return 0;}
if(cell[3][1]==0){cell[3][1]=2;return 0;}
if(cell[3][3]==0){cell[3][3]=2;return 0;}
}
}
if(t==4)
{
for(int i=1;i<=3;i++)
for(int j=1;j<=3;j++)
{
if(cell[i][j]==0)
{
cell[i][j]=2;
if(Win(2))return 0;
cell[i][j]=0;
}
}
for(int i=1;i<=3;i++)
for(int j=1;j<=3;j++)
{
if(cell[i][j]==0)
{
cell[i][j]=1;
if(Win(1)){cell[i][j]=2;return 0;}
cell[i][j]=0;
}
}
for(int i=1;i<=3;i++)
for(int j=1;j<=3;j++)
{
if(cell[i][j]==0){cell[i][j]=2;return 0;}
}
}
}
void gameofbot(int botturn)
{
if(levelpl[1]=="1 <Easy>")botEasy();
if(levelpl[1]=="2 <Normal>")botNormal(botturn);
if(levelpl[1]=="3 <Hard>")botHard(botturn);
if(levelpl[1]=="4 <Expert>")botExpert(botturn);
if(levelpl[1]=="5 <Professional>")botProfessional(botturn);
}
void oneplayer()
{
int check;
players=1;
system("CLS");
if(flag1==1)
{
cout<<" W E L L C O M E T o T i c T a c T o e G A M E v 2 . 0 "<<endl;
cout<<" author : <NAME>"<<endl;cout<<endl;cout<<endl;cout<<endl;
cout<<" Player 1 write your Nickname : ";cin>>nickname[1];
nickname[2]="BOT";
}
int pl=1;
for(int turn=1;turn<=9;turn++)
{
if(turn%2==0)gameofbot(turn/2);
if(turn%2==1)gameofplayer1(1);
system("CLS");
visualGame1pl(levelpl[1],rankpl[1],nickname[1],plt[1]);
if(Win(1)){cout<<endl;win[1]++;cout<<endl;cout<<" "<<nickname[1]<<" WIN"<<endl;if(!restart(2))quit=1;;break;}
if(Win(2)){cout<<endl;win[2]++;cout<<endl;cout<<" "<<nickname[2]<<" WIN"<<endl;if(!restart(2))quit=1;if(quit==0)gameover1pl(2);break ;}
}
if(quit==0)
{
if(win[2]==0)
{
check=checkgame1pl();
if(check==1){gameover1pl(1);}
else {flag1=2;cleargame();oneplayer();}
}
cleargame();
}
}
int main()
{
string type;
levelpl[1]="1 <Easy>";
levelpl[2]="1 <Easy>";
rankpl[1]="~-Newbie-~";
rankpl[2]="~-Newbie-~";
win[1]=0;
win[2]=0;
plt[1]='X';
plt[2]='O';
cout<<" W E L L C O M E T o T i c T a c T o e G A M E v 2 . 0 "<<endl;
cout<<" author : <NAME>"<<endl;cout<<endl;cout<<endl;cout<<endl;
cout<<" Choose type of Game"<<endl;
cout<<" For two players write : 'two' "<<endl;
cout<<" For one player write : 'one' "<<endl;
cout<<" -";
cin>>type;
if(type=="two")twoplayers();
else
if(type=="one")oneplayer();
else {system("CLS");main();}
if(quit==0)if(restart(1))main();
return 0;
}
| 8d98230d0d1d5dabf17c0526960bb1e920e784ac | [
"C++"
] | 1 | C++ | YordanPetkov/TicTacToe-Game | 171df7fa8d5dfaf5cdc99c4efebd21e33cf0ca13 | d82a62b7fdf0656a691fdf8299fae8594ec13e9a |
refs/heads/main | <file_sep>package com.example.timesnewsapp;
import androidx.appcompat.app.AppCompatActivity;
import android.os.Bundle;
public class NewsDetailActivity extends AppCompatActivity {
String title,desc,content,imageURL,url;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_news_detail);
title=getIntent().getStringExtra("title");
desc=getIntent().getStringExtra("desc");
content=getIntent().getStringExtra("content");
imageURL=getIntent().getStringExtra("imageURL");
url=getIntent().getStringExtra("url");
}
}<file_sep>rootProject.name = "timesnewsapp"
include ':app'
| b5c4f7cd938aa3260bd9924874be6b038e1b6819 | [
"Java",
"Gradle"
] | 2 | Java | SanskarGupta10/Hello | f5fb2b93c4a71a4b337d245ee32aad0146b851ed | 028a88a4932f8d1a1fb237eb41a07f81c6c0a897 |
refs/heads/master | <repo_name>Ubaid-Ali/exp-tracker-tscript<file_sep>/public/firebase-messaging-sw.js
importScripts('https://www.gstatic.com/firebasejs/8.1.1/firebase-app.js')
importScripts('https://www.gstatic.com/firebasejs/8.1.1/firebase-messaging.js')
firebase.initializeApp({
apiKey: "<KEY>",
authDomain: "fir-messaging-e60d7.firebaseapp.com",
databaseURL: "https://fir-messaging-e60d7.firebaseio.com",
projectId: "fir-messaging-e60d7",
storageBucket: "fir-messaging-e60d7.appspot.com",
messagingSenderId: "420592940129",
appId: "1:420592940129:web:7b84a8d2953a83abd26620"
});
firebase.messaging();
| e30fbf3431c2e81ac7d75b6512fe8608ae8b7296 | [
"JavaScript"
] | 1 | JavaScript | Ubaid-Ali/exp-tracker-tscript | 0a2b3a789cbd1c9d37ec3327f4c93ec971efc81f | 4d7f90a95d5123ae8d74c51839e527ed8a8f322f |
refs/heads/master | <file_sep>"""Convert LED strips AVI video to JSON files."""
import json
import glob
import imageio
from .color import Color
if __name__ == "__main__":
# For each strip, write LED video pixel data to JSON file
frame_paths = sorted(list(glob.glob("videos/render/*.png")))
nbr_strips = 10
strips: list[list[list[int]]] = [[] for _ in range(nbr_strips)]
for frame_nbr in range(len(frame_paths)):
frame = imageio.imread(f"videos/render/render_{str(frame_nbr).zfill(5)}.png")
# Print progress
if frame_nbr % 1000 == 0 and frame_nbr > 0:
print(f"{frame_nbr}/{len(frame_paths)}")
for strip_nbr in range(nbr_strips):
# Get the x column of pixels in the video for the strip
column_width = frame.shape[1] / nbr_strips
x = int(column_width * (strip_nbr + 1) - column_width / 2)
# Convert frame to list of pixels
frame_pixels: list[int] = [
Color(
red=int(frame[y, x, 1]),
green=int(frame[y, x, 0]),
blue=int(frame[y, x, 2]),
)
for y in range(len(frame))
]
# Add frame pixels to the video
strips[strip_nbr].append(frame_pixels)
# Write LED video pixel data to JSON file
for strip_nbr, strip in enumerate(strips):
with open(
file=f"final_lights/strip_{strip_nbr+1}.txt", mode="w", encoding="utf-8"
) as f:
for frame in strip:
f.write(json.dumps(frame) + "\n")
<file_sep>import time
from neopixel import *
import numpy as np
import math
from scipy import ndimage
import scipy.misc
import random
#import cv2
def tick(np_strip, bpm, res, bar_length, direction, mirror):
global remainder, fps
time_per_beat = 3*60.0/bpm
updates_per_beat = time_per_beat / (1.0/fps)
skip = bar_length / updates_per_beat + remainder
remainder = skip - int(skip)
skip = int(skip)
if skip == 0:
print('Increase resolution')
else:
if direction == -1:
temp = np.copy(np_strip[:skip, :])
np_strip[:-skip, :] = np_strip[skip:, :]
if mirror:
np_strip[-skip:, :] = temp
else:
np_strip[-skip:, :] = np.zeros((skip, 3))
else:
temp = np.copy(np_strip[-skip:, :])
np_strip[skip:, :] = np_strip[:-skip, :]
if mirror:
np_strip[:skip, :] = temp
else:
np_strip[:skip, :] = np.zeros((skip, 3))
return np_strip
def colorWipe():
global strip, LED_COUNT, start_time
for i in range(strip.numPixels()):
strip.setPixelColor(i, Color(0,0,0))
strip.show()
def applyNumpyColors(strip, frame):
for i in range(strip.numPixels()):
frame = np.clip(frame,0,255)
strip.setPixelColor(i, Color(int(round(frame[i,1])), int(round(frame[i,0])), int(round(frame[i,2]))))
strip.show()
def screen_blend(a, b):
ones = np.ones(a.shape)
return 255*(ones - (ones - a/255)*(ones - b/255))
def constant_color(duration, fadein, fadeout, color):
global LED_COUNT, renders, remainder, fps
# Add each frame to renders
i = int(duration[0]*fps)
last_percentage = -1
while i < int(duration[1]*fps):
if not(int(100*i/int(duration[1]*fps)) == last_percentage):
last_percentage = int(100*(i - int(duration[0]*fps))/(int(duration[1]*fps) - int(duration[0]*fps)))
print(str(duration[0]) + ' - ' + str(duration[1]) + '\tConstant color:\t' + str(last_percentage) + '%')
short_strip = np.ones((LED_COUNT, 3))*color
# Fade in
if i - int(duration[0]*fps) < fadein*fps:
j = i - int(duration[0]*fps)
fade_percentage = j / (fadein*fps)
short_strip = short_strip*fade_percentage
# Fade out
if i - int(duration[0]*fps) > int(duration[1]*fps) - int(duration[0]*fps) - fadeout*fps:
j = i - int(duration[0]*fps) - (int(duration[1]*fps) - int(duration[0]*fps) - int(fadeout*fps))
fade_percentage = 1 - j / (fadeout*fps)
short_strip = short_strip*fade_percentage
# If overlap with previous render, screen blend. Else add new frame
renders[i] = screen_blend(renders[i], short_strip)
i += 1
def noise(duration, fadein, fadeout, intensity):
global LED_COUNT, renders, remainder, fps
# Apply noise to selected frames
i = int(duration[0]*fps)
while i < int(duration[1]*fps):
x = random.random()
noise = np.clip(math.exp(-10*x) - 0.2*x + 0.2, 0, 1)*intensity
# Fade in
if i - int(duration[0]*fps) < fadein*fps:
j = i - int(duration[0]*fps)
fade_percentage = j / (fadein*fps)
noise = noise*fade_percentage
# Fade out
if i - int(duration[0]*fps) > int(duration[1]*fps) - int(duration[0]*fps) - fadeout*fps:
j = i - int(duration[0]*fps) - (int(duration[1]*fps) - int(duration[0]*fps) - int(fadeout*fps))
fade_percentage = 1 - j / (fadeout*fps)
noise = noise*fade_percentage
renders[i] = renders[i]*(1-noise)
i += 1
def strobe(duration, fadein, fadeout, bpm, intensity_interval):
global LED_COUNT, renders, remainder, fps
# Add each frame to renders
i = int(duration[0]*fps)
last_div = -1
strobe = intensity_interval[0]
while i < int(duration[1]*fps):
time_per_beat = 60.0/bpm
skip = time_per_beat / (1.0/fps)
if int((i-duration[0]*fps) / skip) > last_div:
last_div = int((i-duration[0]*fps) / skip)
if strobe == intensity_interval[0]: strobe = intensity_interval[1]
else: strobe = intensity_interval[0]
# Fade in
if i - int(duration[0]*fps) < fadein*fps:
j = i - int(duration[0]*fps)
fade_percentage = j / (fadein*fps)
strobe = strobe*fade_percentage
# Fade out
if i - int(duration[0]*fps) > int(duration[1]*fps) - int(duration[0]*fps) - fadeout*fps:
j = i - int(duration[0]*fps) - (int(duration[1]*fps) - int(duration[0]*fps) - int(fadeout*fps))
fade_percentage = 1 - j / (fadeout*fps)
strobe = strobe*fade_percentage
renders[i] = renders[i]*strobe
i += 1
def multiple_colors(duration, fadein, fadeout, bpm, filter_length, res, direction, color_list):
global LED_COUNT, renders, remainder, fps
start_time = time.time()
# Construct np_strip, with evenly distributed colors
length = math.ceil(LED_COUNT/filter_length)*filter_length + 2*filter_length
while not((length/filter_length + 1) % len(color_list) == 1):
length += filter_length
long_strip = np.zeros((int(length*res), 3))
for i in range(int(len(long_strip)/(filter_length*res)) + 1):
if i == 0: idx = 0
else: idx = i*filter_length*res - 1
long_strip[idx] = color_list[i%len(color_list)]
# Construct convolution filter
filter = np.linspace(0, 1, num=filter_length*res)
filter = np.append(filter[:-1], filter[::-1])
# Convolution
long_strip = np.append(np.convolve(long_strip[:, 0], filter, 'same'),
[np.convolve(long_strip[:, 1], filter, 'same'),
np.convolve(long_strip[:, 2], filter, 'same')]).reshape(len(long_strip), 3, order='F')
long_strip = long_strip[:-1]
# Skip to correct time
for i in range(int((duration[0]*fps) % (60.0/bpm*3*fps*len(color_list)))):
long_strip = tick(long_strip, bpm, res, filter_length*res, direction, mirror=True)
# Add each frame to renders
i = int(duration[0]*fps)
last_percentage = -1
while i < int(duration[1]*fps):
if not(int(100*i/int(duration[1]*fps)) == last_percentage):
last_percentage = int(100*(i - int(duration[0]*fps))/(int(duration[1]*fps) - int(duration[0]*fps)))
print(str(duration[0]) + ' - ' + str(duration[1]) + '\tMultiple colors:\t' + str(last_percentage) + '%')
long_strip = tick(long_strip, bpm, res, filter_length*res, direction, mirror=True)
#short_strip = skimage.transform.resize(long_strip, [LED_COUNT,3])
#print(short_strip.shape)
short_strip = long_strip[::res]
short_strip = short_strip[:LED_COUNT]
# Fade in
if i - int(duration[0]*fps) < fadein*fps:
j = i - int(duration[0]*fps)
fade_percentage = j / (fadein*fps)
short_strip = short_strip*fade_percentage
# Fade out
if i - int(duration[0]*fps) > int(duration[1]*fps) - int(duration[0]*fps) - fadeout*fps:
j = i - int(duration[0]*fps) - (int(duration[1]*fps) - int(duration[0]*fps) - int(fadeout*fps))
fade_percentage = 1 - j / (fadeout*fps)
short_strip = short_strip*fade_percentage
renders[i] = screen_blend(renders[i], short_strip)
i += 1
def particles(duration, bpm, tail_length, birth_rate, birth_random, res, direction, color_list):
global LED_COUNT, renders, remainder, fps
# Initialize first particle
master_particles = np.zeros((LED_COUNT*res + tail_length*res,3))
if direction == 1:
master_particles[0,0] = 1
else:
master_particles[-1,0] = 1
# Render each frame
spawn_new_particle = int(duration[0]*fps)+1
i = int(duration[0]*fps)
last_percentage = -1
while i < int(duration[1]*fps) or not(np.sum(master_particles) == 0):
if not(int(100*i/int(duration[1]*fps)) == last_percentage):
last_percentage = int(100*(i - int(duration[0]*fps))/(int(duration[1]*fps) - int(duration[0]*fps)))
print(str(duration[0]) + ' - ' + str(duration[1]) + '\tParticles:\t' + str(last_percentage) + '%')
# Spawn another particle?
if i % spawn_new_particle == 0 and i < int(duration[1]*fps):
spawn_new_particle += random.randint(fps/birth_rate-(fps/birth_rate)*birth_random, fps/birth_rate+(fps/birth_rate)*birth_random)
if direction == 1:
master_particles[0,0] = 1
else:
master_particles[-1,0] = 1
# Construct master particles and tail particles
master_particles = tick(master_particles, bpm, res, LED_COUNT*res, direction, mirror=False)
tail_particles = np.zeros((LED_COUNT*res + tail_length*res,3))
for j in np.argwhere(master_particles > 0):
j = j[0]
temp_tail_particles = np.zeros((LED_COUNT*res + tail_length*res,3))
actual_tail = 0
if direction == 1:
if j - tail_length*res < 0: actual_tail = j
else: actual_tail = tail_length*res
else:
if j + tail_length*res > len(tail_particles): actual_tail = len(tail_particles) - j
else: actual_tail = tail_length*res
alpha = np.linspace(0, master_particles[j,0], actual_tail).reshape(actual_tail,1)
alpha_inv = np.linspace(master_particles[j,0], 0, actual_tail).reshape(actual_tail,1)
if direction == 1:
temp_tail_particles[j-actual_tail:j, :] = alpha*(alpha*np.ones((actual_tail,1))*color_list[0] + alpha_inv*np.ones((actual_tail,1))*color_list[1])
else:
temp_tail_particles[j:j+actual_tail, :] = alpha_inv*(alpha_inv*np.ones((actual_tail,1))*color_list[0] + alpha*np.ones((actual_tail,1))*color_list[1])
tail_particles = screen_blend(tail_particles, temp_tail_particles)
# Normalize values above 255
tail_particles = np.clip(tail_particles,0,255)
#tail_particles = np.resize(tail_particles, (LED_COUNT,3))
tail_particles = tail_particles[::res]
if direction == 1:
tail_particles = tail_particles[:LED_COUNT]
else:
tail_particles = tail_particles[tail_length:LED_COUNT+tail_length]
renders[i] = screen_blend(renders[i], tail_particles)
i += 1
def ElasticEaseInOut(p):
if (p < 0.5):
return 0.5 * math.sin(9 * math.pi*math.pi * (2 * p)) * math.pow(2, 5 * ((2 * p) - 1))
else:
return 0.5 * (math.sin(-9 * math.pi*math.pi * ((2 * p - 1) + 1)) * math.pow(2, -5 * (2 * p - 1)) + 2)
def march(duration, fadein, fadeout, bpm, res, length_interval, opacity_interval):
global LED_COUNT, renders, remainder, fps
# Add each frame to renders
i = int(duration[0]*fps)
last_percentage = -1
on = True
last_div = -1
on = False
while i < int(duration[1]*fps):
if not(int(100*i/int(duration[1]*fps)) == last_percentage):
last_percentage = int(100*(i - int(duration[0]*fps))/(int(duration[1]*fps) - int(duration[0]*fps)))
print(str(duration[0]) + ' - ' + str(duration[1]) + '\tMarch:\t' + str(last_percentage) + '%')
time_per_beat = 60.0/bpm
skip = time_per_beat / (1.0/fps)
t = ((i/fps - duration[0]) % time_per_beat)/time_per_beat
#v = easeInOut(t, 0.0, 1.0, time_per_beat)
v = ElasticEaseInOut(t)
if int((i-duration[0]*fps) / skip) > last_div:
last_div = int((i-duration[0]*fps) / skip)
on = not(on)
if on:
v = 1.0 - v
v += 0.05
v = int(round(LED_COUNT*res* (v*(length_interval[1] - length_interval[0]) + length_interval[0])))
long_strip = np.ones((LED_COUNT*res, 3))*opacity_interval[1]
long_strip[v:] = [opacity_interval[0], opacity_interval[0], opacity_interval[0]]
short_strip = long_strip[::res]
short_strip = short_strip[:LED_COUNT]
# Fade in
if i - int(duration[0]*fps) < fadein*fps:
j = i - int(duration[0]*fps)
fade_percentage = j / (fadein*fps)
short_strip = short_strip*fade_percentage
# Fade out
if i - int(duration[0]*fps) > int(duration[1]*fps) - int(duration[0]*fps) - fadeout*fps:
j = i - int(duration[0]*fps) - (int(duration[1]*fps) - int(duration[0]*fps) - int(fadeout*fps))
fade_percentage = 1 - j / (fadeout*fps)
short_strip = short_strip*fade_percentage
renders[i] = renders[i]*short_strip
i += 1
def animate(strip, renders, x, interval):
remainder_time = 0
start_time = time.time()
i = int(interval[0]*fps)
if len(renders) < int(interval[1]*fps):
end = len(renders)
else:
end = int(interval[1]*fps)
while i < end:
iter_time = time.time()
applyNumpyColors(strip, renders[i]/x)
error = time.time() + interval[0] - start_time - i/60.0
#print('Actual time: ' + str(time.time() + interval[0] - start_time) + '\tError: ' + str(error))
# Calculate time to wait until next iteration
wait = 1.0/fps - (time.time() - iter_time) - error
remainder_time = 0
if wait > 0:
time.sleep(wait)
i += 1
if __name__ == '__main__':
global LED_COUNT, renders, remainder, fps
# LED strip configuration:
LED_COUNT = 144 # Number of LED pixels.
LED_PIN = 18 # GPIO pin connected to the pixels (18 uses PWM!).
#LED_PIN = 10 # GPIO pin connected to the pixels (10 uses SPI /dev/spidev0.0).
LED_FREQ_HZ = 800000 # LED signal frequency in hertz (usually 800khz)
LED_DMA = 10 # DMA channel to use for generating signal (try 10)
LED_BRIGHTNESS = 255 # Set to 0 for darkest and 255 for brightest
LED_INVERT = False # True to invert the signal (when using NPN transistor level shift)
LED_CHANNEL = 0 # set to '1' for GPIOs 13, 19, 41, 45 or 53
strip = Adafruit_NeoPixel(LED_COUNT, LED_PIN, LED_FREQ_HZ, LED_DMA, LED_INVERT, LED_BRIGHTNESS, LED_CHANNEL)
strip.begin()
fps = 60.0
renders = np.zeros((int(1000*fps), LED_COUNT, 3))
remainder = 0
try:
print('Rendering animations...')
x = 25
bt3 = 3*60.0/124
bt4 = 4*60.0/124
# Effect Demo
#constant_color(duration=[0.0,4.0], fadein=0, fadeout=0, color=[0,0,5])
#constant_color(duration=[4.0,8.0], fadein=0, fadeout=0, color=[0,0,5])
#multiple_colors(duration=[12.0,16.5], fadein=0, fadeout=1, bpm=120*4, filter_length=20, res=30, direction=1, color_list=[[0,0,5], [5,0,0]])
#noise(duration=[4.0,8.0], fadein=0, fadeout=0, intensity=0.8)
#constant_color(duration=[8.0,12.0], fadein=0, fadeout=0, color=[5,5,5])
#strobe(duration=[8.0,12.0], fadein=0, fadeout=0, bpm=120*16, intensity_interval=[0.0, 1.0])
#multiple_colors(duration=[12.0,16.5], fadein=0, fadeout=1, bpm=120*4, filter_length=20, res=30, direction=1, color_list=[[0,0,5], [5,0,0]])
#multiple_colors(duration=[15.5,20.0], fadein=1, fadeout=0, bpm=120*4, filter_length=20, res=30, direction=1, color_list=[[0,0,5], [0,5,0]])
#multiple_colors(duration=[20.0,24.0], fadein=0, fadeout=0, bpm=120*4, filter_length=20, res=30, direction=1, color_list=[[0,5,5], [0,0,5]])
#march(duration=[20.0, 24.0], fadein=0, fadeout=0, bpm=120*2, res=10, length_interval=[0.2, 0.5], opacity_interval=[0.2, 1.0])
#particles(duration=[24.0,32.0], bpm=120, tail_length=20, birth_rate=4, birth_random=0.8, res=10, direction=1, color_list=[[0,0,5], [0,5,5]])
#particles(duration=[28.0,32.0], bpm=120, tail_length=20, birth_rate=4, birth_random=0.8, res=10, direction=-1, color_list=[[5,0,0], [5,5,0]])
# Any Given Place Test
#multiple_colors(duration=[bt*0, bt*8], fadein=0, fadeout=0, bpm=141, filter_length=20, res=50, direction=1, color_list=[[0,0,5], [0,0,0]])
#multiple_colors(duration=[bt*4, bt*8], fadein=12.2, fadeout=0, bpm=141*16, filter_length=20, res=30, direction=1, color_list=[[10,10,10], [0,0,0]])
#multiple_colors(duration=[bt*9, bt*13+0.3], fadein=0, fadeout=0.6, bpm=141*4, filter_length=100, res=30, direction=1, color_list=[[10,9,6], [5,3,1]])
#multiple_colors(duration=[bt*13-0.3, bt*17], fadein=0.6, fadeout=0, bpm=141*4, filter_length=100, res=30, direction=1, color_list=[[10,6,9], [5,1,4]])
#multiple_colors(duration=[bt*17, bt*25], fadein=0, fadeout=0.0, bpm=141*4, filter_length=100, res=30, direction=1, color_list=[[10,10,10], [4,4,4]])
#strobe(duration=[bt*17,bt*25], fadein=0, fadeout=0, bpm=141*16, intensity_interval=[0.3, 1.0])
# Pixeldye
particles(duration=[bt3*0, bt3*24], bpm=124, tail_length=30, birth_rate=2, birth_random=0, res=10, direction=1, color_list=[[200,200,200], [0,110,200]])
multiple_colors(duration=[bt3*24-0.2, bt3*32+0.5], fadein=0.4, fadeout=1, bpm=124, filter_length=100, res=10, direction=1, color_list=[[200,0,0], [40,0,0]])
multiple_colors(duration=[bt3*32-0.5, bt3*72+1], fadein=1, fadeout=0, bpm=124/2, filter_length=100, res=10, direction=1, color_list=[[0,80,120], [0,0,30]])
multiple_colors(duration=[bt3*52, bt3*72+2], fadein=bt3*20, fadeout=2, bpm=124*9, filter_length=50, res=10, direction=1, color_list=[[190,220,255], [0,0,0]])
constant_color(duration=[bt3*72, bt3*81], fadein=0, fadeout=0, color=[1,5,10])
noise(duration=[bt3*72, bt3*81], fadein=0, fadeout=0, intensity=0.8)
constant_color(duration=[bt3*81, bt3*82], fadein=0, fadeout=1.4, color=[255,190,130])
multiple_colors(duration=[bt3*82, bt3*86+0.1], fadein=0, fadeout=0.2, bpm=124*6, filter_length=200, res=10, direction=1, color_list=[[255,30,30], [120,10,10]])
multiple_colors(duration=[bt3*86-0.1, bt3*90+0.1], fadein=0.2, fadeout=0.2, bpm=124*6, filter_length=200, res=10, direction=1, color_list=[[170,100,255], [50,0,125]])
multiple_colors(duration=[bt3*90-0.1, bt3*94+0.1], fadein=0.2, fadeout=0.2, bpm=124*6, filter_length=200, res=10, direction=1, color_list=[[220,100,255], [100,0,125]])
multiple_colors(duration=[bt3*94-0.1, bt3*96+0.1], fadein=0.2, fadeout=0.2, bpm=124*6, filter_length=200, res=10, direction=1, color_list=[[255,100,220], [125,0,100]])
multiple_colors(duration=[bt3*96-0.1, bt3*97+0.1], fadein=0.2, fadeout=0.2, bpm=124*6, filter_length=200, res=10, direction=1, color_list=[[255,100,160], [125,0,60]])
multiple_colors(duration=[bt3*97-0.1, bt3*98], fadein=0.2, fadeout=0, bpm=124*6, filter_length=200, res=10, direction=1, color_list=[[255,140,170], [125,55,85]])
constant_color(duration=[bt3*98,bt3*106], fadein=0, fadeout=0, color=[160,220,255])
strobe(duration=[bt3*98,bt3*106], fadein=0, fadeout=0, bpm=124*12, intensity_interval=[0.0, 1.0])
multiple_colors(duration=[bt3*106, bt3*108+0.1], fadein=0, fadeout=0.2, bpm=124*6, filter_length=200, res=10, direction=1, color_list=[[170,100,255], [50,0,125]])
multiple_colors(duration=[bt3*108-0.1, bt3*109+0.1], fadein=0.2, fadeout=0.2, bpm=124*6, filter_length=200, res=10, direction=1, color_list=[[255,100,220], [125,0,100]])
multiple_colors(duration=[bt3*109-0.1, bt3*110+0.1], fadein=0.2, fadeout=0.2, bpm=124*6, filter_length=200, res=10, direction=1, color_list=[[255,140,160], [125,55,85]])
multiple_colors(duration=[bt3*110-0.1, bt3*114+0.1], fadein=0.2, fadeout=0.2, bpm=124*6, filter_length=200, res=10, direction=1, color_list=[[170,100,255], [50,0,125]])
multiple_colors(duration=[bt3*114-0.1, bt3*117+0.1], fadein=0.2, fadeout=0.2, bpm=124*6, filter_length=200, res=10, direction=1, color_list=[[255,100,220], [125,0,100]])
multiple_colors(duration=[bt3*117-0.1, bt3*118+0.1], fadein=0.2, fadeout=0.2, bpm=124*6, filter_length=200, res=10, direction=1, color_list=[[255,140,160], [125,55,85]])
multiple_colors(duration=[bt3*118-0.1, bt3*120], fadein=0.2, fadeout=0, bpm=124*6, filter_length=200, res=10, direction=1, color_list=[[170,100,255], [50,0,125]])
strobe(duration=[bt3*106,bt3*120], fadein=0, fadeout=0, bpm=124*12, intensity_interval=[0.2, 1.0])
constant_color(duration=[bt3*120, bt3*120+4], fadein=0, fadeout=4, color=[200,100,255])
multiple_colors(duration=[bt3*124, bt3*186], fadein=bt3*4, fadeout=0, bpm=124/2, filter_length=100, res=10, direction=1, color_list=[[0,80,120], [0,0,30]])
multiple_colors(duration=[bt3*156, bt3*186], fadein=bt3*30, fadeout=0, bpm=124*4, filter_length=50, res=10, direction=1, color_list=[[40,100,120], [5,15,30]])
multiple_colors(duration=[bt3*186, bt3*194], fadein=0, fadeout=0, bpm=124, filter_length=100, res=10, direction=1, color_list=[[200,0,0], [40,0,0]])
multiple_colors(duration=[bt3*194, bt3*195], fadein=0, fadeout=0, bpm=124, filter_length=100, res=10, direction=1, color_list=[[0,200,0], [0,40,0]])
multiple_colors(duration=[bt3*195, bt3*195+bt4], fadein=0, fadeout=0, bpm=124, filter_length=100, res=10, direction=1, color_list=[[200,200,0], [40,40,0]])
multiple_colors(duration=[bt3*195+bt4, bt4+bt3*201], fadein=0, fadeout=0, bpm=124, filter_length=100, res=10, direction=1, color_list=[[200,0,0], [40,0,0]])
multiple_colors(duration=[bt3*201+bt4, bt4+bt3*202], fadein=0, fadeout=0, bpm=124, filter_length=100, res=10, direction=1, color_list=[[200,200,0], [40,40,0]])
multiple_colors(duration=[bt4+bt3*202, bt4+bt3*230+1], fadein=0, fadeout=0, bpm=124/2, filter_length=100, res=10, direction=1, color_list=[[0,40,120], [0,20,50]])
raw_input("Press Enter to start...")
print('Starting animations...')
animate(strip, renders, x, interval=[bt3*0,bt3*300])
except KeyboardInterrupt:
colorWipe()
<file_sep>import skvideo.io
import time
from neopixel import *
import numpy as np
import math
from scipy import ndimage
import scipy.misc
import random
from multiprocessing import Process
import socket
def applyNumpyColors(strip, frame):
for i in range(strip.numPixels()):
#frame = np.clip(frame,0,230)
strip.setPixelColor(i, Color(int(round(frame[i,1])), int(round(frame[i,0])), int(round(frame[i,2]))))
strip.show()
def colorWipe(strip):
for i in range(strip.numPixels()):
strip.setPixelColor(i, Color(0,0,0))
strip.show()
def animate(strip, renders, start_time, interval):
for i in range(len(renders)):
current_time = time.time() - start_time
goal_time = i/60.0
if goal_time < current_time:
continue
start_iter_time = time.time()
applyNumpyColors(strip, renders[i])
wait = 1/60.0 - (time.time() - start_iter_time)
if wait > 0:
time.sleep(wait)
if __name__ == '__main__':
# LED strip configuration:
LED_COUNT = 144 # Number of LED pixels.
LED_PIN = 12 # GPIO pin connected to the pixels (18 uses PWM!).
#LED_PIN = 10 # GPIO pin connected to the pixels (10 uses SPI /dev/spidev0.0).
LED_FREQ_HZ = 800000 # LED signal frequency in hertz (usually 800khz)
LED_DMA = 10 # DMA channel to use for generating signal (try 10)
LED_BRIGHTNESS = 255 # Set to 0 for darkest and 255 for brightest
LED_INVERT = False # True to invert the signal (when using NPN transistor level shift)
LED_CHANNEL = 0 # set to '1' for GPIOs 13, 19, 41, 45 or 53
strip1 = Adafruit_NeoPixel(144, 12, LED_FREQ_HZ, LED_DMA, LED_INVERT, LED_BRIGHTNESS, 0)
strip2 = Adafruit_NeoPixel(288, 13, LED_FREQ_HZ, LED_DMA, LED_INVERT, LED_BRIGHTNESS, 1)
strip3 = Adafruit_NeoPixel(288, 19, LED_FREQ_HZ, LED_DMA, LED_INVERT, LED_BRIGHTNESS, 1)
strip1.begin()
strip2.begin()
strip3.begin()
fps = 60.0
# Read video to numpy array
renders = np.zeros((22000,300,3))
videogen = skvideo.io.vreader('cloudless_lights_3.avi')
i = 0
for frame in videogen:
print(i)
renders[i] = frame[:,0,:]
i += 1
#renders = cv2.resize(img, dsize(10000,288,3), interpolation=cv2.INTER_NEAREST)
try:
server = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
server.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
server.bind(("", 44444))
raw_input('Press Enter to start')
print('Starting animations...')
start_time = time.time()
server.sendto(b"start_animations_now" + str(start_time), ('<broadcast>', 37020))
p1 = Process(target=animate, args=(strip1, renders, start_time, [0,8000]))
p2 = Process(target=animate, args=(strip2, renders, start_time, [0,8000]))
#p3 = Process(target=animate, args=(strip3, renders, start_time, [0,8000]))
p1.start()
p2.start()
#p3.start()
p1.join()
p2.join()
#p3.join()
except:
colorWipe(strip1)
colorWipe(strip2)
#colorWipe(strip3)
<file_sep>[tool.poetry]
name = "ana-lights"
version = "0.1.0"
description = ""
authors = ["Calle <<EMAIL>>"]
[tool.poetry.dependencies]
python = "^3.9"
ntplib = "^0.4.0"
imageio = "^2.21.0"
[tool.poetry.dev-dependencies]
black = "^22.1.0"
isort = "^5.10.1"
pylint = "^2.12.2"
pylama = "^8.3.7"
bandit = "^1.7.3"
mypy = "^0.931"
flake8 = "^4.0.1"
flake8-annotations = "^2.7.0"
mccabe = "0.6.1"
types-python-dateutil = "^2.8.9"
mss = "^6.1.0"
scikit-learn = "^1.0.2"
python-nmap = "^0.7.1"
python-dateutil = "^2.8.2"
scikit-video = "^1.1.11"
PyAutoGUI = "^0.9.53"
pynput = "^1.7.6"
opencv-python = "^4.5.5"
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"
[tool.black]
line-length = 90
[tool.coverage.report]
skip_empty = true
show_missing = true<file_sep>[flake8]
max-line-length = 90
exclude =
__pycache__,
.venv,
.git,
.github,
build,
dist
*pb2.py
ignore = W503, W504, ANN101, ANN102, ANN002, ANN003<file_sep>import time
from neopixel import *
import numpy as np
import math
import random
import socket
import json
import numpy as np
import time
import zlib
def applyNumpyColors(strip, frame):
for i in range(strip.numPixels()):
strip.setPixelColor(i, Color(int(frame[i][0][1]), int(frame[i][0][2]), int(frame[i][0][0])))
strip.show()
def colorWipe(strip):
for i in range(strip.numPixels()):
strip.setPixelColor(i, Color(0,0,0))
strip.show()
def intToBytes(n):
b = bytearray([0, 0, 0, 0]) # init
b[3] = n & 0xFF
n >>= 8
b[2] = n & 0xFF
n >>= 8
b[1] = n & 0xFF
n >>= 8
b[0] = n & 0xFF
return b
def bytesToInt(b):
n = (b[0]<<24) + (b[1]<<16) + (b[2]<<8) + b[3]
return n
def recv_all(conn, size):
data = conn.recv(size)
while len(data) < size:
diff = size - len(data)
data += conn.recv(diff)
#print('HEJ')
return data
if __name__ == '__main__':
# LED strip configuration:
LED_COUNT = 144 # Number of LED pixels.
LED_PIN = 17 # GPIO pin connected to the pixels (18 uses PWM!).
#LED_PIN = 10 # GPIO pin connected to the pixels (10 uses SPI /dev/spidev0.0).
LED_FREQ_HZ = 800000 # LED signal frequency in hertz (usually 800khz)
LED_DMA = 10 # DMA channel to use for generating signal (try 10)
LED_BRIGHTNESS = 255 # Set to 0 for darkest and 255 for brightest
LED_INVERT = False # True to invert the signal (when using NPN transistor level shift)
LED_CHANNEL = 0 # set to '1' for GPIOs 13, 19, 41, 45 or 53
strip1 = Adafruit_NeoPixel(144, 12, LED_FREQ_HZ, LED_DMA, LED_INVERT, LED_BRIGHTNESS, 0)
strip2 = Adafruit_NeoPixel(288, 13, LED_FREQ_HZ, LED_DMA, LED_INVERT, LED_BRIGHTNESS, 1)
strip3 = Adafruit_NeoPixel(288, 19, LED_FREQ_HZ, LED_DMA, LED_INVERT, LED_BRIGHTNESS, 1)
strip1.begin()
strip2.begin()
strip3.begin()
print('ready')
server = socket.socket()
server.bind(('192.168.1.100', 44446))
server.listen(1)
conn, client_address = server.accept()
while True:
try:
data = conn.recv(4)
data_size = bytesToInt(data)
data = recv_all(conn, data_size)
data = zlib.decompress(data)
frame = json.loads(data.decode())
applyNumpyColors(strip1, frame)
applyNumpyColors(strip2, frame)
applyNumpyColors(strip3, frame)
conn.sendall(intToBytes(1))
except:
colorWipe(strip1)
colorWipe(strip2)
colorWipe(strip3)
exit()
<file_sep>"""Global variables."""
from ..enums import Command
# pylint: disable=global-statement
command: Command
x: int
y: int
stream_window: dict[str, int]
def initialize() -> None:
"""Hej."""
global command, x, y, stream_window
command = Command.STOP
x = 0
y = 0
stream_window = {}
<file_sep>"""Hej."""
import socket
from dataclasses import dataclass
import json
import nmap
from ..enums import Port
@dataclass
class RaspberryPI:
"""Hej."""
ip: str
client: socket.socket
def connect_pies(port: Port, found_pies: list[str]) -> list[RaspberryPI]:
"""Connect to all raspberry pies."""
pies: list[RaspberryPI] = []
for found_pie in found_pies:
try:
pi = socket.socket()
pi.settimeout(5) # 10 seconds timeout
pi.connect((found_pie, port.value))
pies.append(
RaspberryPI(
ip=found_pie,
client=pi,
),
)
except Exception: # pylint: disable=broad-except
for pie in pies:
pie.client.close()
break
return pies
def read_saved_pies() -> list[str]:
"""Hej."""
with open("mapping/pi_ips.json", mode="r", encoding="utf-8") as f:
pi_ips = json.load(f)
return pi_ips
def write_new_pies() -> list[str]:
"""Hej."""
nbr_pies = int(input("Number of RPis: "))
pi_ips = [
"192.168.1.20" + input(f"{i+1} RPi IP: 192.168.1.20") for i in range(nbr_pies)
]
# Write raspberry pi IPs to file
with open("mapping/pi_ips.json", mode="w", encoding="utf-8") as f:
f.write(json.dumps(pi_ips))
return pi_ips
def scan_pies_on_network() -> list[dict[str, str]]:
"""Scan all raspberry pies on the network."""
nm = nmap.PortScanner()
while True:
nm.scan(hosts="192.168.1.0/24", arguments="-sP")
host_list = nm.all_hosts()
found_pies: list[dict[str, str]] = []
for host in host_list:
print(nm[host])
if "Raspberry" in json.dumps(nm[host]):
found_pies.append(
{
"ip": host,
"mac": nm[host]["addresses"]["mac"],
}
)
if len(found_pies) == 0:
print("---------------")
print("No raspberry pies found on network. Scanning again...")
else:
print("---------------")
print("Found raspberry pies:", found_pies)
print("1 - Finish scan")
print("2 - Scan again")
action = input("Select an action to perform: ")
if action == "1":
# Write raspberry pi IPs to file
with open("mapping/pi_ips.json", mode="w", encoding="utf-8") as f:
f.write(json.dumps(found_pies))
return found_pies
<file_sep>"""Thread listening for commands from the laptop."""
import time
import socket
import threading
import json
from typing import List
import numpy as np
from . import global_vars
from ..led_strip import LEDStrip
from ...color import Color
from ...enums import Command, Port
def get_command(lock: threading.Lock, laptop: socket.socket) -> Command:
"""Hej."""
try:
command_recv = Command(laptop.recv(1024).decode("utf-8"))
print("Received from laptop:", command_recv)
except ValueError as e:
print(e, "HEJ2")
with lock:
global_vars.command = Command.STOP
raise e
return command_recv
def start(
lock: threading.Lock,
barrier: threading.Barrier,
command_recv: Command,
laptop: socket.socket,
) -> None:
"""Hej."""
if command_recv == Command.START:
song_start_temp = float(laptop.recv(1024).decode("utf-8"))
laptop.send(Command.READY.value.encode("utf-8"))
start_time_temp = float(laptop.recv(1024).decode("utf-8"))
with lock:
global_vars.song_start = song_start_temp
global_vars.start_time = start_time_temp
if global_vars.command in (
Command.STOP,
Command.PAUSE,
Command.READY,
):
global_vars.command = Command.START
barrier.wait()
else:
global_vars.command = Command.START
def stop_pause_resume(
lock: threading.Lock,
barrier: threading.Barrier,
command_recv: Command,
) -> None:
"""Hej."""
if command_recv in (Command.STOP, Command.PAUSE):
with lock:
global_vars.command = command_recv
if command_recv == Command.RESUME:
with lock:
global_vars.command = Command.START
barrier.wait()
def mapping(
lock: threading.Lock,
barrier: threading.Barrier,
command_recv: Command,
laptop: socket.socket,
strip: LEDStrip,
) -> None:
"""Hej."""
if command_recv == Command.MAP:
with lock:
global_vars.command = Command.STOP
select = get_command(lock, laptop)
if select == Command.MAP_SELECT:
strip.render_color(red=10, green=10, blue=10)
with lock:
global_vars.command = Command.MAP_SELECT
barrier.wait()
# Get position from laptop
position = laptop.recv(1024).decode("utf-8")
# Write position to file
with open("mapping/pi_position.json", mode="w", encoding="utf-8") as f:
f.write(json.dumps({"position": position}))
# Load video at position
load_video(lock, position, strip)
with lock:
global_vars.command = Command.READY
else:
raise ValueError(f"Did not receive {Command.MAP_SELECT} from laptop.")
def load_video_from_saved_position(
lock: threading.Lock,
command_recv: Command,
strip: LEDStrip,
) -> None:
"""Hej."""
if command_recv == Command.LOAD and len(global_vars.video) == 0:
with lock:
global_vars.command = Command.STOP
# Read position from file
with open("mapping/pi_position.json", mode="r", encoding="utf-8") as f:
position = json.load(f)
# Load video at position
load_video(lock, position["position"], strip)
with lock:
global_vars.command = Command.READY
def load_video(lock: threading.Lock, position: str, strip: LEDStrip) -> None:
"""Hej."""
# Set strip status to yellow
strip.status(red=10, green=10, blue=0)
# Remove old video from RAM
with lock:
del global_vars.video
# Read number of lines (frames) in video file
nbr_lines = 0
with open(file=f"final_lights/strip_{position}.txt", mode="r", encoding="utf-8") as f:
for i, _ in enumerate(f):
# Skip every other frame to only get 30 fps
if i % 2 == 0:
continue
nbr_lines += 1
print("Will read", nbr_lines, "lines from video file")
# Initialize video list
video: List[List[int]] = [[]] * nbr_lines
# Load video from file
count = 0
with open(file=f"final_lights/strip_{position}.txt", mode="r", encoding="utf-8") as f:
for i, line in enumerate(f): # type: ignore
# Skip every other frame to only get 30 fps
if i % 2 == 0:
continue
# Render load progress on strip
if count % int(nbr_lines / 100) == 0:
percent = count / nbr_lines
progress_pixels = np.zeros(strip.led_count)
progress_pixels[: int(percent * int(strip.led_count / 2))] = 1
progress_pixels[
int(strip.led_count / 2) : int(strip.led_count / 2) # noqa
+ int(percent * int(strip.led_count / 2))
] = 1
strip.render(
pixels=[
Color(red=0, green=0, blue=int(10 * val))
for val in progress_pixels[::-1]
]
)
video[count] = json.loads(line) # type: ignore
count += 1
# Update global video variable
with lock:
global_vars.video = video
# Set strip status to green
strip.status(red=10, green=0, blue=0)
def stream(
lock: threading.Lock,
barrier: threading.Barrier,
command_recv: Command,
) -> None:
"""Hej."""
if command_recv == Command.STREAM:
with lock:
if global_vars.command in (
Command.STOP,
Command.PAUSE,
Command.READY,
):
global_vars.command = Command.STREAM
barrier.wait()
else:
global_vars.command = Command.STREAM
def command_thread(
lock: threading.Lock,
barrier: threading.Barrier,
strip: LEDStrip,
) -> None:
"""Thread listening for commands from the laptop."""
# Yellow status
strip.status(red=10, green=10, blue=0)
time.sleep(1)
# Create server and wait for laptop to connect
server = socket.socket()
server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server.bind(("0.0.0.0", Port.COMMAND.value)) # nosec
server.listen(1)
# Green status
strip.status(red=10, green=0, blue=0)
print("Ready")
try:
laptop, _ = server.accept()
# Update globals
with lock:
global_vars.laptop_ip = laptop.getpeername()[0]
global_vars.command = Command.STOP
while True:
command_recv = get_command(lock, laptop)
start(lock, barrier, command_recv, laptop)
stop_pause_resume(lock, barrier, command_recv)
mapping(lock, barrier, command_recv, laptop, strip)
load_video_from_saved_position(lock, command_recv, strip)
stream(lock, barrier, command_recv)
except Exception as e: # pylint: disable=broad-except
print(type(e), e, "HEJ0")
server.close()
laptop.close()
<file_sep>"""Laptop client."""
import threading
from ..enums import Command, Port
from .stream import stream_thread, set_stream_window
from .raspberry_pies import read_saved_pies, write_new_pies, connect_pies
from .commands import start, send_command, map_positions
from . import global_vars
if __name__ == "__main__": # noqa
global_vars.initialize()
lock = threading.Lock()
while True:
found_pies = read_saved_pies()
print("---------------")
print("Saved RPi IPs:", found_pies)
print("1 - Connect to saved RPi IPs")
print("2 - Write new RPi IPs")
action = input("Select an action to perform: ")
if action == "2":
found_pies = write_new_pies()
pies_command = connect_pies(Port.COMMAND, found_pies)
pies_stream = connect_pies(Port.STREAM, found_pies)
if len(pies_command) != len(found_pies) or len(pies_stream) != len(found_pies):
print("Could not connect to all RPis. Please re-scan for new RPi IPs.")
continue
break
print("---------------")
print("Connected to RPi IPs:", found_pies)
if action == "1":
send_command(lock, pies_command, Command.LOAD)
if action == "2":
map_positions(lock, pies_command)
while True:
print("---------------")
print("1 - Start")
print("2 - Stop")
print("3 - Pause")
print("4 - Resume")
print("5 - Mapping")
print("6 - Stream")
print("7 - Stream window")
action = input("Select an action to perform: ")
if action == "1":
start(lock, pies_command)
if action == "2":
send_command(lock, pies_command, Command.STOP)
if action == "3":
send_command(lock, pies_command, Command.PAUSE)
if action == "4":
send_command(lock, pies_command, Command.RESUME)
if action == "5":
map_positions(lock, pies_command)
if action == "6":
send_command(lock, pies_command, Command.STREAM)
threading.Thread(target=stream_thread, args=(lock, pies_stream)).start()
if action == "7":
set_stream_window(lock)
<file_sep>[MESSAGES CONTROL]
disable=duplicate-code, invalid-name, too-few-public-methods, too-many-arguments
ignored-classes=scoped_session
[FORMAT]
max-line-length = 90
[DESIGN]
max-args=7
[MASTER]
extension-pkg-whitelist=pydantic, cv2
ignore=
__pycache__,
.venv,
.git,
.github,
build,
dist
ignore-patterns=^.*pb2.py<file_sep>imageio==2.21.0; python_version >= "3.7"
ntplib==0.4.0
numpy==1.22.2; python_version >= "3.8"
pillow==9.0.1; python_version >= "3.7"
<file_sep>"""Stream pixels to all raspberry pies."""
import json
import threading
import numpy as np
import mss
import cv2
from pynput import mouse
from . import global_vars
from .commands import RaspberryPI
from ..color import Color
from ..enums import Command, LEDSettings
def stream_thread(lock: threading.Lock, pies: list[RaspberryPI]) -> None:
"""Stream pixels to all raspberry pies."""
# Read raspberry pi IP positions from JSON file
with open("mapping/ip_positions.json", mode="r", encoding="utf-8") as f:
ip_positions = json.load(f)
# Initialize screen capture
sct = mss.mss()
# Read stream window size from JSON file
with open("mapping/stream_window.json", mode="r", encoding="utf-8") as f:
with lock:
global_vars.stream_window = json.load(f)
while True:
with lock:
if global_vars.command != Command.STREAM:
break
# Screen capture, and resize to height is same as number of leds
with lock:
img = np.asarray(sct.grab(global_vars.stream_window))[::-1, :, :3] * 0.1
img = cv2.resize(
img, dsize=(img.shape[1], LEDSettings.COUNT), interpolation=cv2.INTER_NEAREST
)
# Send pixels to all raspberry pies
for pi in pies:
pixels = []
for i in range(len(img)):
# Get the x column of pixels in the image for the strip
with lock:
x = int(
global_vars.stream_window["width"]
/ len(pies)
* (int(ip_positions[pi.ip]) - 1)
)
# Append pixel to pixels
pixels.append(
Color(
red=int(img[i, x, 1]),
green=int(img[i, x, 2]),
blue=int(img[i, x, 0]),
)
)
# Send pixels to raspberry pi
pi.client.send(json.dumps(pixels, ensure_ascii=False).encode("utf-8"))
# Wait for ok to continue
for pi in pies:
command = Command(pi.client.recv(1024).decode("utf-8"))
if command != Command.NEXT:
raise ValueError(
f"Raspberry PI sent '{command}' instead of '{Command.NEXT.value}'"
)
def on_click(x: float, y: float, button: int, pressed: bool) -> bool:
"""Hej."""
if button == mouse.Button.left:
if pressed:
print(f"Mouse pressed {int(x)}, {int(y)}")
global_vars.x = int(x)
global_vars.y = int(y)
return False # Return False to stop mouse listener.
return True
def set_stream_window(lock: threading.Lock) -> None:
"""Hej."""
print("---------------")
window = {}
# Mouse click 1
with mouse.Listener(on_click=on_click) as listener:
listener.join()
x1 = global_vars.x
y1 = global_vars.y
# Mouse click 2
with mouse.Listener(on_click=on_click) as listener:
listener.join()
x2 = global_vars.x
y2 = global_vars.y
# Add window to dict
window["top"] = min(y2, y1)
window["left"] = min(x2, x1)
window["width"] = abs(x2 - x1)
window["height"] = abs(y2 - y1)
print("Updated stream window size to:", window)
# Write window dict to JSON file
with open("mapping/stream_window.json", mode="w", encoding="utf-8") as f:
f.write(json.dumps(window))
with lock:
global_vars.stream_window = window
<file_sep>"""Global variables."""
from typing import List
from ...enums import Command, LEDSettings
# pylint: disable=global-statement
# pylint: disable=line-too-long
command: Command
song_start: float
start_time: float
laptop_ip: str
offset: float
pixels_stream: list[int]
video: List[List[int]]
fps: int
def initialize() -> None:
"""Hej."""
global command, song_start, start_time, laptop_ip, offset, pixels_stream, video, fps # noqa
command = Command.STOP
song_start = 0.0
start_time = 0.0
laptop_ip = ""
offset = 0.0
pixels_stream = [0] * LEDSettings.COUNT
video = []
fps = 0
<file_sep>"""Common enums."""
from enum import Enum
SONGS = [
["Cloudless skies", "00:00:00.00"],
["Pixeldye", "00:04:15.827"],
["Lost yourself", "00:12:16.818"],
["Close to you", "00:17:18.082"],
["Lumiére", "00:21:47.593"],
["Regn", "00:26:23.286"],
["Starlight", "00:30:34.589"],
["You're somewhere", "00:35:30.639"],
]
class Command(Enum):
"""Available commands."""
STREAM = "stream"
RESUME = "resume"
PAUSE = "pause"
STOP = "stop"
START = "start"
MAP = "map"
MAP_SELECT = "map_select"
LOAD = "load"
READY = "ready"
NEXT = "next"
class Port(Enum):
"""Port numbers."""
COMMAND = 9100
STREAM = 9200
class LEDSettings:
"""Hej."""
COUNT = 288 # Number of LED pixels.
PIN = 13 # GPIO pin of the leds (18 uses PWM, 10 uses SPI /dev/spidev0.0)
FREQ_HZ = 800000 # LED signal frequency in hertz (usually 800khz)
DMA = 10 # DMA channel to use for generating signal (try 10)
BRIGHTNESS = 255 # Set to 0 for darkest and 255 for brightest
INVERT = False # True to invert signal (when using NPN transistor level shift)
CHANNEL = 1 # set to '1' for GPIOs 13, 19, 41, 45 or 53
<file_sep>.DEFAULT_GOAL: all
# Export raspberry pi requirements
.PHONY: rpi-requirements
rpi-requirements:
poetry export -f requirements.txt --output requirements.txt --without-hashes
# Install raspberry pi python requirements
.PHONY: rpi-install
rpi-install:
sudo pip install -r requirements.txt
# Run ntp server
.PHONY: ntp
ntp:
docker run --name=ntp --restart=always --detach --publish=123:123/udp cturra/ntp
# Run ntp server
.PHONY: ntp-stop
ntp-stop:
docker stop ntp
docker rm ntp
# Run laptop code
.PHONY: laptop
laptop:
sudo poetry run python -m ana_lights.client.client
# Run raspberry pi code
.PHONY: rpi
rpi:
sudo /usr/bin/python -m ana_lights.server.server
# Convert video to text files
.PHONY: convert-video
convert-video:
poetry run python -m ana_lights.convert_video
# Send final_lights folder to a raspberry pi ip
.PHONY: send-lights
send-lights:
scp -r final_lights/ pi@$(ip):~/ana-lights/
# Send code folder to a raspberry pi ip
.PHONY: send-code
send-code:
sshpass -p "raspberry" scp -r ana_lights/client/ pi@$(ip):~/ana-lights/ana_lights/
sshpass -p "raspberry" scp -r ana_lights/server/ pi@$(ip):~/ana-lights/ana_lights/
sshpass -p "raspberry" scp -r ana_lights/convert_video.py pi@$(ip):~/ana-lights/ana_lights/convert_video.py
sshpass -p "raspberry" scp -r ana_lights/color.py pi@$(ip):~/ana-lights/ana_lights/color.py
sshpass -p "raspberry" scp -r ana_lights/enums.py pi@$(ip):~/ana-lights/ana_lights/enums.py<file_sep>"""Raspberry pi server."""
import threading
from .led_strip import LEDStrip
from .threads.time import time_thread
from .threads.lights import lights_thread
from .threads.command import command_thread
from .threads.stream import stream_thread
from .threads import global_vars
from ..enums import LEDSettings
if __name__ == "__main__":
global_vars.initialize()
lock = threading.Lock()
barrier = threading.Barrier(2)
global_vars.offset = 0
strip = LEDStrip(
led_count=LEDSettings.COUNT,
pin=LEDSettings.PIN,
freq_hz=LEDSettings.FREQ_HZ,
dma=LEDSettings.DMA,
invert=LEDSettings.INVERT,
brightness=LEDSettings.BRIGHTNESS,
channel=LEDSettings.CHANNEL,
)
strip.black()
strip.status(red=0, green=0, blue=10)
threading.Thread(target=time_thread, args=(lock,)).start()
threading.Thread(target=lights_thread, args=(lock, barrier, strip)).start()
while True:
t1 = threading.Thread(target=stream_thread, args=(lock,))
t2 = threading.Thread(target=command_thread, args=(lock, barrier, strip))
t1.start()
t2.start()
t1.join()
t2.join()
print("Restarting stream and command thread")
<file_sep>"""Thread displaying pixels on the LED strip."""
import time
import threading
from ..led_strip import LEDStrip
from ...enums import Command
from . import global_vars
# pylint: disable=broad-except
FPS = 30
def lights_thread( # noqa
lock: threading.Lock,
barrier: threading.Barrier,
strip: LEDStrip,
) -> None:
"""Thread displaying pixels on the LED strip."""
barrier.wait()
fps = 0
while True:
# t = time.time()
with lock:
get_command = global_vars.command
if get_command == Command.START:
try:
with lock:
true_index = int(
abs(
(
get_laptop_time()
- global_vars.start_time
+ global_vars.song_start
)
* FPS
)
)
strip.render(global_vars.video[true_index])
except Exception as e:
with lock:
print(e)
global_vars.command = Command.STOP
elif get_command == Command.STOP:
print(fps, "fps")
strip.black()
barrier.wait()
elif get_command == Command.READY:
strip.black()
strip.status(red=10, green=0, blue=0)
barrier.wait()
elif get_command == Command.PAUSE:
barrier.wait()
# if get_command == Command.MAP_SELECT:
# strip.render_color(red=10, green=10, blue=10)
if get_command == Command.STREAM:
with lock:
if global_vars.pixels_stream is None:
continue
strip.render(global_vars.pixels_stream)
# global_vars.fps = int(1 / (time.time() - t))
def get_laptop_time() -> float:
"""Get the estimated laptop time."""
return time.time() + global_vars.offset
<file_sep>"""LED Strip wrapper class."""
from ..color import Color
from ..rpi_ws281x.python.neopixel import Adafruit_NeoPixel
class LEDStrip:
"""LED Strip wrapper class."""
strip: Adafruit_NeoPixel
led_count: int
def __init__(
self,
led_count: int,
pin: int,
freq_hz: int,
dma: int,
brightness: int,
invert: bool,
channel: int,
) -> None: # noqa
"""Initialize the LEDStrip."""
self.strip = Adafruit_NeoPixel(
num=led_count,
pin=pin,
freq_hz=freq_hz,
dma=dma,
invert=invert,
brightness=brightness,
channel=channel,
)
self.strip.begin()
self.led_count = led_count
def render(self, pixels: list[int]) -> None:
"""Render <pixels> on to the LED <strip>.
Args:
pixels: List of 24-bit int pixels to render on the strip.
"""
for i in range(self.strip.numPixels()):
self.strip.setPixelColor(
n=i, color=int(pixels[(len(pixels) - i) % len(pixels)])
)
self.strip.show()
def render_color(self, red: int, green: int, blue: int) -> None:
"""Render <pixels> on to the LED <strip>.
Args:
red: Red channel color value to display.
green: Green channel color value to display.
blue: Blue channel color value to display.
"""
for i in range(self.strip.numPixels()):
self.strip.setPixelColor(n=i, color=Color(red, green, blue))
self.strip.show()
def black(self) -> None:
"""Turn off the LED <strip>."""
self.render_color(red=0, green=0, blue=0)
def status(self, red: int, green: int, blue: int) -> None:
"""Display status via a few pixels on the strip with an RGB color.
Args:
red: Red channel color value to display.
green: Green channel color value to display.
blue: Blue channel color value to display.
"""
self.black()
for i in range(10):
self.strip.setPixelColor(
n=int(i * self.strip.numPixels() / 10),
color=Color(red=red, green=green, blue=blue),
)
self.strip.show()
<file_sep>"""Color convertion function for LED strip."""
def Color(red: int, green: int, blue: int, white: int = 0) -> int:
"""Convert the provided red, green, blue color to a 24-bit color value.
Each color component should be a value 0-255 where 0 is the lowest intensity
and 255 is the highest intensity.
"""
return (white << 24) | (red << 16) | (green << 8) | blue
<file_sep>"""Thread updating the time offset to the laptop."""
import threading
import time
from typing import List
from statistics import median
import ntplib
from . import global_vars
# pylint: disable=broad-except
def time_thread(lock: threading.Lock) -> None:
"""Thread updating the time offset to the laptop."""
ntp_client = ntplib.NTPClient()
ntp_offsets: List[float] = []
while True:
# print("FPS:", global_vars.fps)
try:
if global_vars.laptop_ip is not None:
response: ntplib.NTPStats = ntp_client.request(
host=global_vars.laptop_ip, version=4
)
# If there are more than 20 offset samples, pop the first sample
if len(ntp_offsets) > 20:
ntp_offsets.pop(0)
# Add ntp offset between rpi and laptop time to list
ntp_offsets.append(response.offset)
# Take median of all ntp offsets as the final
# between rpi and laptop time
offset = median(ntp_offsets)
with lock:
global_vars.offset = offset
except Exception as e:
print(type(e), e, "HEJ3")
time.sleep(3)
<file_sep>"""Thread receiving streamed pixels from the laptop."""
import json
import socket
import threading
from ...enums import Command, Port
from . import global_vars
# pylint: disable=broad-except
# pylint: disable=global-statement
def stream_thread(lock: threading.Lock) -> None:
"""Thread receiving streamed pixels from the laptop."""
print("Starting stream thread...")
server = socket.socket()
server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server.bind(("0.0.0.0", Port.STREAM.value)) # nosec
server.listen(1)
laptop, _ = server.accept()
while True:
try:
data = laptop.recv(4096)
data_decoded = data.decode("utf-8")
while data_decoded[-1] != "]":
data = laptop.recv(4096)
data_decoded += data.decode("utf-8")
laptop.send(Command.NEXT.value.encode("utf-8"))
except Exception as e:
print(e, "HEJ1")
laptop.close()
server.close()
break
with lock:
global_vars.pixels_stream = json.loads(data_decoded)
<file_sep>[mypy]
ignore_missing_imports = True
exclude=["__pycache__", ".venv", ".git", ".github", "build", "dist", "tests"]
[mypy-cs_data_pipeline.cloud.bigquery_stream.raw_schema_pb2]
ignore_errors = True<file_sep>"""Hej."""
import time
import json
import threading
import dateutil.parser
import numpy as np
from sklearn.linear_model import LinearRegression
from ..enums import SONGS, Command
from .raspberry_pies import RaspberryPI
from . import global_vars
def send_command(lock: threading.Lock, pies: list[RaspberryPI], command: Command) -> None:
"""Send a command to the raspberry pies."""
with lock:
global_vars.command = command
for pi in pies:
pi.client.send(str(command.value).encode("utf-8"))
def map_positions(lock: threading.Lock, pies: list[RaspberryPI]) -> None:
"""Send a map position for each raspberry pi, based on its ip address."""
# Set all raspberry pies in map mode
send_command(lock, pies, Command.MAP)
# Assign a position for each raspberry pi's IP
ip_positions = {}
for pi in pies:
pi.client.send(Command.MAP_SELECT.value.encode("utf-8"))
position = input("Select rpi position: ")
pi.client.send(position.encode("utf-8"))
ip_positions[pi.ip] = position
# Write ip map to file
with open("mapping/ip_positions.json", mode="w", encoding="utf-8") as f:
f.write(json.dumps(ip_positions))
def wait_pies_ready(pies: list[RaspberryPI]) -> None:
"""Hej."""
for pi in pies:
command = Command(pi.client.recv(1024).decode("utf-8"))
if command != Command.READY:
raise ValueError(
f"Raspberry PI sent '{command}' instead of '{Command.READY.value}'"
)
print("Raspberry pi ready to start", pi.ip)
def start(lock: threading.Lock, pies: list[RaspberryPI]) -> None:
"""Start raspberry pies by selecting which song or where to play from."""
send_command(lock, pies, Command.START)
print("---------------")
for i, song in enumerate(SONGS):
print(f"{i+1} - {song[0]} ({song[1]})")
text_input = input("Enter song or custom timecode to start from: ")
if len(text_input) == 0:
text_input = "1"
if text_input.isdigit():
song = SONGS[int(text_input) - 1]
song_start = total_seconds(song[1])
else:
song_start = total_seconds(text_input)
# Send song start time to raspberry pies
for pi in pies:
pi.client.send(str(song_start).encode("utf-8"))
# Wait for ready responses from RPi's
wait_pies_ready(pies)
# Ready
times = []
clicks = 4
for i in range(clicks):
input("Press enter to start: " + str(clicks - i))
times.append(time.time())
model = LinearRegression().fit(np.arange(clicks).reshape(-1, 1), times)
start_time = model.predict([[clicks - 1]])[0]
for pi in pies:
pi.client.send(str(start_time).encode("utf-8"))
def total_seconds(timestamp: str) -> float:
"""Get total seconds of a timestamp string."""
time_ = dateutil.parser.parse(timestamp).time()
return float(
time_.hour * 60 * 60
+ time_.minute * 60
+ time_.second
+ time_.microsecond / 1000000
)
<file_sep>import socket
import numpy as np
import skvideo.io
import time
from neopixel import *
import math
from scipy import ndimage
import scipy.misc
import random
def bytesToInt(b):
b = bytearray(b)
n = (b[0]<<24) + (b[1]<<16) + (b[2]<<8) + b[3]
return n
def applyNumpyColors(strip, frame):
for i in range(strip.numPixels()):
frame = np.clip(frame,0,255)
strip.setPixelColor(i, Color(int(round(frame[i,1])), int(round(frame[i,0])), int(round(frame[i,2]))))
strip.show()
def colorWipe():
global strip, LED_COUNT, start_time
for i in range(strip.numPixels()):
strip.setPixelColor(i, Color(0,0,0))
strip.show()
if __name__ == '__main__':
global LED_COUNT, renders, remainder, fps
# LED strip configuration:
LED_COUNT = 144 # Number of LED pixels.
LED_PIN = 18 # GPIO pin connected to the pixels (18 uses PWM!).
#LED_PIN = 10 # GPIO pin connected to the pixels (10 uses SPI /dev/spidev0.0).
LED_FREQ_HZ = 800000 # LED signal frequency in hertz (usually 800khz)
LED_DMA = 10 # DMA channel to use for generating signal (try 10)
LED_BRIGHTNESS = 255 # Set to 0 for darkest and 255 for brightest
LED_INVERT = False # True to invert the signal (when using NPN transistor level shift)
LED_CHANNEL = 0 # set to '1' for GPIOs 13, 19, 41, 45 or 53
strip = Adafruit_NeoPixel(LED_COUNT, LED_PIN, LED_FREQ_HZ, LED_DMA, LED_INVERT, LED_BRIGHTNESS, LED_CHANNEL)
strip.begin()
print('Waiting for client...')
serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
serversocket.bind(('192.168.1.33', 8089))
serversocket.listen(5)
conn, address = serversocket.accept()
try:
print('Starting strip stream...')
while True:
data = conn.recv(4)
length = bytesToInt(data)
data = conn.recv(length)
while len(data) < length:
data += conn.recv(length - len(data))
np_strip = np.fromstring(data, dtype=np.float64).reshape((144,3))
applyNumpyColors(strip, np_strip)
except KeyboardInterrupt:
colorWipe()
| ea09a11ce3b7dc771ed8eaba7a8b0fc582c1b57f | [
"TOML",
"Makefile",
"INI",
"Python",
"Text"
] | 25 | Python | callenilsson/ana-lights | 0a1423c51a4e10a72e90ac8d8b46d76c26812ad3 | a5880be4c2909c1782d6bc69426d11adccf57db5 |
refs/heads/master | <repo_name>1173710224/whatMeals<file_sep>/whatMeals2/Meal.py
# -*- coding: utf-8 -*-
"""
Created on Sat Mar 2 10:31:37 2019
@author: C-82
"""
import numpy as np
import time
import json
'''
List是要进行查找的所有文件名的一个数组
'''
def Find(List):
expectation = 0
tmpstring = ''
OBJ = Meal
for string in List:
obj = Meal(string)
tmpexpectation = obj.Expectation()
if tmpexpectation > expectation:
expectation = tmpexpectation
tmpstring = string
OBJ = obj
OBJ.left_eaten_num = OBJ.left_eaten_num - 1
OBJ.Check()
OBJ.last_eaten_time = time.time()
OBJ.ChanData()
tmpstring = tmpstring.replace("\n","")
name_place = tmpstring.split("_")
return name_place
def Reset():
IO_breakfast = open("1早饭.txt","r")
IO_lunch = open("2午饭.txt","r")
IO_dinner= open("3晚饭.txt","r")
IO_night = open("4夜宵.txt","r")
list_breakfast = IO_breakfast.readlines()
list_lunch = IO_lunch.readlines()
list_dinner = IO_dinner.readlines()
list_night = IO_night.readlines()
for string in list_breakfast:
a = Meal(string)
a.ChanData()
for string in list_lunch:
a = Meal(string)
a.ChanData()
for string in list_dinner:
a = Meal(string)
a.ChanData()
for string in list_night:
a = Meal(string)
a.ChanData()
IO_breakfast.close()
IO_lunch.close()
IO_dinner.close()
IO_night.close()
class Meal:
def __init__(self,name_place):
List = name_place.split("_")
self.last_eaten_time = 0.0
self.left_eaten_num = 5
self.name = List[0]
self.place = List[1]
self.name_place = name_place
self.GetData()
#这个函数用来返回吃当前这个范德概率
def Expectation(self):
if (time.time() - self.last_eaten_time) < 24 * 3600 * 5:
return 0
return np.random.uniform(0,float(self.left_eaten_num))
def Check(self):
if self.left_eaten_num == 0:
self.left_eaten_num = 5
def CreFile(self):
self.name_place = self.name_place.replace('\n','')
full_path = 'data' + '//' + self.name_place + '.json'
file = open(full_path,'r+')
file.write('1')
file.close()
#更新文件内容
def ChanData(self):
self.name_place = self.name_place.replace('\n','')
full_path = 'data' + '//' + self.name_place + '.json'
file = open(full_path,'w')
file.truncate()
write_dict ={"name":self.name,"place":self.place,"time":self.last_eaten_time,"num":self.left_eaten_num}
file.write(str(write_dict))
file.close()
#从文件中加载数据
def GetDate(self):
self.name_place = self.name_place.replace('\n','')
full_path = 'data' + '//' + self.name_place + '.json'
file = open(full_path,'r+')
string = json.load(file)
string = json.loads(string)
self.name = string["name"]
self.place = string["place"]
self.last_eaten_time = string["time"]
self.left_eaten_num = string["num"]
file.close()
<file_sep>/whatMeals2/whatMeals2.py
# -*- coding: utf-8 -*-
"""
Created on Fri Feb 22 09:03:45 2019
@author: C-82
"""
import Meal
IO_breakfast = open("1早饭.txt","r")
IO_lunch = open("2午饭.txt","r")
IO_dinner= open("3晚饭.txt","r")
IO_night = open("4夜宵.txt","r")
list_breakfast = IO_breakfast.readlines()
list_lunch = IO_lunch.readlines()
list_dinner = IO_dinner.readlines()
list_night = IO_night.readlines()
print("\n\n\nSir,do you want to know which meal of today?")
key_word = input("1 releates to breakfast\n2 releates to lunch\n3 releates to dinner\n4 releates to night\nand that 0 means all of them:\n")
key_word = int(key_word)
if key_word == 1:
print("It's really good,today your breakfast is:")
print(Meal.Find(list_breakfast))
if key_word == 2:
print("It's really good,today your lunch is:")
print(Meal.Find(list_lunch))
if key_word == 3:
print("It's really good,today your dinner is:")
print(Meal.Find(list_dinner))
if key_word == 4:
print("It's really good,today your night is:")
print(Meal.Find(list_night))
IO_breakfast.close()
IO_lunch.close()
IO_dinner.close()
IO_night.close()
| 6c2a3d0eec85afcdf7c63b53234e6ef610c0af53 | [
"Python"
] | 2 | Python | 1173710224/whatMeals | 307ec875eccd96f6ced98623c17e68281692aa97 | 0e9d16f10bf103ba81af48032a93c0e138d41d86 |
refs/heads/master | <repo_name>mycloud447/issues<file_sep>/issues/cores.txt
#!/usr/bin/bash
PATH="/sbin:/bin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin:/opt/sfw/bin:/usr/ucb:\
/usr/local/soe/sbin:/usr/local/soe/bin:/usr/local/Utility/bin:/usr/sfw/sbin:\
/usr/sfw/bin:/opt/OV/contrib:/usr/contrib/bin:/usr/cluster/bin:/opt/VRTS/bin"
# VERSION="1.0"
# Collecting host CPU information:
# Writen by MyCloud Tue Sep 27 15:22:27 EST 2016
#
if [ `uname -s` == "SunOS" ]; then
if [ `whoami` == root ];then
if [ `uname -r` == "5.10" ] || [ `uname -r` == "5.11" ] ; then
ps -ef |grep -i ldmd|grep -v grep > /dev/null 2>&1
if [ $? == 0 ] ; then
/usr/bin/kstat -m cpu_info | egrep "chip_id|core_id|module: cpu_info" > /var/tmp/T7_cpuinfo.log
nproc=`(grep chip_id /var/tmp/T7_cpuinfo.log | awk '{ print $2 }' | sort -u | wc -l | tr -d ' ')`
ncore=`(grep core_id /var/tmp/T7_cpuinfo.log | awk '{ print $2 }' | sort -u | wc -l | tr -d ' ')`
vproc=`(grep 'module: cpu_info' /var/tmp/T7_cpuinfo.log | awk '{ print $4 }' | sort -u | wc -l | tr -d ' ')`
if [ $ncore -eq 0 ] ; then
nstrandspercore=0
else
nstrandspercore=$(($vproc/$ncore))
fi
if [ $nproc -eq 0 ] ; then
ncoresperproc=0
else
ncoresperproc=$(($ncore/$nproc))
fi
speedinmhz=`(/usr/bin/kstat -m cpu_info | grep clock_MHz | awk '{ print $2 }' | sort -u)`
speedinghz=`echo "scale=2; $speedinmhz/1000" | bc`
T_Cores=`/usr/sbin/ldm list-devices -a core|awk '{print $1}'|sort -nr|head -1`
Total_Cores=`echo "scale=2; $T_Cores+1"|bc`
Total_Threads=`echo "scale=2; $Total_Cores*8"|bc`
echo "Server Model : `/usr/bin/kstat -m cpu_info |grep implementation | sed 's/implementation//g' |awk '{print $1}' |sort -u`"
echo "Total number of cores : $Total_Cores"
echo -e "Total number of Threads : $Total_Threads \n\n"
HOSTNAME=`/usr/bin/hostname`
echo "----------------------- $HOSTNAME ---------------------"
echo "Total number of physical processors assigned to $HOSTNAME : $nproc"
echo "Number of virtual processors assigned to $HOSTNAME: $vproc"
echo "Total number of cores assigned to $HOSTNAME: $ncore"
echo "Number of cores per physical processor assigned to $HOSTNAME: $ncoresperproc"
echo "Number of hardware threads (strands or vCPUs) per core: $nstrandspercore"
echo "Processor speed: $speedinmhz MHz ($speedinghz GHz)"
# now derive the vcpu-to-core mapping based on above information #
echo -e "\n** Socket-Core-vCPU mapping **"
let linenum=2
if grep "core_id" /var/tmp/T7_cpuinfo.log > /dev/null; then
i=1
while [ $i -le ${nproc} ]
do
chipid=`sed -n ${linenum}p /var/tmp/T7_cpuinfo.log | awk '{ print $2 }'`
echo -e "\nPhysical Processor $i (chip id: $chipid):"
i=$[$i+1]
j=1
while [ $j -le ${ncoresperproc} ]
do
let linenum=($linenum + 1)
coreid=`sed -n ${linenum}p /var/tmp/T7_cpuinfo.log | awk '{ print $2 }'`
echo -e "\tCore $j (core id: $coreid):"
let linenum=($linenum - 2)
vcpustart=`sed -n ${linenum}p /var/tmp/T7_cpuinfo.log | awk '{ print $4 }'`
let linenum=(3 * $nstrandspercore + $linenum - 3)
vcpuend=`sed -n ${linenum}p /var/tmp/T7_cpuinfo.log | awk '{ print $4 }'`
echo -e "\t\tvCPU ids: $vcpustart - $vcpuend"
let linenum=($linenum + 4)
j=$[$j+1]
done
done
else
i=1
while [ $i -le ${nproc} ]
do
chipid=`sed -n ${linenum}p /var/tmp/T7_cpuinfo.log | awk '{ print $2 }'`
echo -e "\nPhysical Processor $i (chip id: $chipid):"
i=$[$i+1]
linenum=$[$linenum+2]
done
fi
echo -e "\n\nCDOM's, LDOM's Memorey and cores used and their core id's"
echo "---------------------------------------------------------"
/usr/sbin/ldm list -l -o core,memory
echo -e "\n\nFree cores details"
echo "------------------"
echo "Free cores available :`/usr/sbin/ldm list-devices -a core |awk '{print $2}'|grep 100|wc -l`"
/usr/sbin/ldm list-devices -S core
echo -e "\n\nHost information :"
echo "------------------"
/usr/sbin/virtinfo -a
echo -e "\n\nProcessor information with psrinfo command"
echo "-------------------------------------------"
/usr/sbin/psrinfo -pv 2> /var/tmp/T7_error.log
echo
ps -ef |grep -i zsched |grep -v grep > /dev/null 2>&1
if [ $? == 0 ] ; then
echo -e "\n\n--------- zones information ----------"
/usr/sbin/zoneadm list -cv
fi
rm /var/tmp/T7_cpuinfo.log
rm /var/tmp/T7_error.log
elif [[ `uname -i` -eq "SUNW,Sun-Fire-15000" ]] || [[ `uname -i` -eq "SUNW,Sun-Fire-10000" ]] ; then
/usr/bin/kstat -m cpu_info | egrep "chip_id|core_id|module: cpu_info" > /var/tmp/T7_cpuinfo.log
nproc=`(grep chip_id /var/tmp/T7_cpuinfo.log | awk '{ print $2 }' | sort -u | wc -l | tr -d ' ')`
ncore=`(grep core_id /var/tmp/T7_cpuinfo.log | awk '{ print $2 }' | sort -u | wc -l | tr -d ' ')`
vproc=`(grep 'module: cpu_info' /var/tmp/T7_cpuinfo.log | awk '{ print $4 }' | sort -u | wc -l | tr -d ' ')`
if [ $ncore -eq 0 ] ; then
nstrandspercore=0
else
nstrandspercore=$(($vproc/$ncore))
fi
if [ $nproc -eq 0 ] ; then
ncoresperproc=0
else
ncoresperproc=$(($ncore/$nproc))
fi
speedinmhz=`(/usr/bin/kstat -m cpu_info | grep clock_MHz | awk '{ print $2 }' | sort -u)`
speedinghz=`echo "scale=2; $speedinmhz/1000" | bc`
HOSTNAME=`/usr/bin/hostname`
echo "----------------------- $HOSTNAME ---------------------"
echo "Server Model : `/usr/bin/kstat -m cpu_info |grep implementation | sed 's/implementation//g'|awk '{print $1}' |sort -u`"
echo "Total number of physical processors assigned to $HOSTNAME : $nproc"
echo "Number of virtual processors assigned to $HOSTNAME: $vproc"
echo "Total number of cores assigned to $HOSTNAME: $ncore"
echo "Number of cores per physical processor assigned to $HOSTNAME: $ncoresperproc"
echo "Number of hardware threads (strands or vCPUs) per core: $nstrandspercore"
echo "Processor speed: $speedinmhz MHz ($speedinghz GHz)"
echo -e "\n** Socket-Core mapping **\n"
psrinfo -pv|grep physical > /var/tmp/T7_psrinfo.log 2> /var/tmp/T7_error.log
i=1
while read line
do [ -z "$line" ] && continue ;
echo "Processor ($i) : $line"
let i=i+1
done < /var/tmp/T7_psrinfo.log
rm /var/tmp/T7_error.log
ps -ef |grep -i zsched |grep -v grep > /dev/null 2>&1
if [ $? == 0 ] ; then
echo -e "\n--------- zones information ----------"
/usr/sbin/zoneadm list -cv
fi
rm /var/tmp/T7_cpuinfo.log
rm /var/tmp/T7_psrinfo.log
else
/usr/bin/kstat -m cpu_info | egrep "chip_id|core_id|module: cpu_info" > /var/tmp/T7_cpuinfo.log
nproc=`(grep chip_id /var/tmp/T7_cpuinfo.log | awk '{ print $2 }' | sort -u | wc -l | tr -d ' ')`
ncore=`(grep core_id /var/tmp/T7_cpuinfo.log | awk '{ print $2 }' | sort -u | wc -l | tr -d ' ')`
vproc=`(grep 'module: cpu_info' /var/tmp/T7_cpuinfo.log | awk '{ print $4 }' | sort -u | wc -l | tr -d ' ')`
if [ $ncore -eq 0 ] ; then
nstrandspercore=0
else
nstrandspercore=$(($vproc/$ncore))
fi
if [ $nproc -eq 0 ] ; then
ncoresperproc=0
else
ncoresperproc=$(($ncore/$nproc))
fi
speedinmhz=`(/usr/bin/kstat -m cpu_info | grep clock_MHz | awk '{ print $2 }' | sort -u)`
speedinghz=`echo "scale=2; $speedinmhz/1000" | bc`
HOSTNAME=`/usr/bin/hostname`
echo "----------------------- $HOSTNAME ---------------------"
echo "Server Model : `/usr/bin/kstat -m cpu_info |grep implementation | sed 's/implementation//g'|awk '{print $1}' |sort -u`"
echo "Total number of physical processors assigned to $HOSTNAME : $nproc"
echo "Number of virtual processors assigned to $HOSTNAME: $vproc"
echo "Total number of cores assigned to $HOSTNAME: $ncore"
echo "Number of cores per physical processor assigned to $HOSTNAME: $ncoresperproc"
echo "Number of hardware threads (strands or vCPUs) per core: $nstrandspercore"
echo "Processor speed: $speedinmhz MHz ($speedinghz GHz)"
# now derive the vcpu-to-core mapping based on above information #
echo -e "\n** Socket-Core-vCPU mapping **"
let linenum=2
if grep "core_id" /var/tmp/T7_cpuinfo.log > /dev/null; then
i=1
while [ $i -le ${nproc} ]
do
chipid=`sed -n ${linenum}p /var/tmp/T7_cpuinfo.log | awk '{ print $2 }'`
echo -e "\nPhysical Processor $i (chip id: $chipid):"
i=$[$i+1]
j=1
while [ $j -le ${ncoresperproc} ]
do
let linenum=($linenum + 1)
coreid=`sed -n ${linenum}p /var/tmp/T7_cpuinfo.log | awk '{ print $2 }'`
echo -e "\tCore $j (core id: $coreid):"
let linenum=($linenum - 2)
vcpustart=`sed -n ${linenum}p /var/tmp/T7_cpuinfo.log | awk '{ print $4 }'`
let linenum=(3 * $nstrandspercore + $linenum - 3)
vcpuend=`sed -n ${linenum}p /var/tmp/T7_cpuinfo.log | awk '{ print $4 }'`
echo -e "\t\tvCPU ids: $vcpustart - $vcpuend"
let linenum=($linenum + 4)
j=$[$j+1]
done
done
else
i=1
while [ $i -le ${nproc} ]
do
chipid=`sed -n ${linenum}p /var/tmp/T7_cpuinfo.log | awk '{ print $2 }'`
echo -e "\nPhysical Processor $i (chip id: $chipid):"
i=$[$i+1]
linenum=$[$linenum+2]
done
fi
rm /var/tmp/T7_cpuinfo.log
echo
echo -e "\npsrinfo"
echo "-------"
echo
/usr/sbin/psrinfo -pv 2> /var/tmp/T7_error.log
rm /var/tmp/T7_error.log
ps -ef |grep -i zsched |grep -v grep > /dev/null 2>&1
if [ $? == 0 ] ; then
echo "--------- zones information ----------"
/usr/sbin/zoneadm list -cv
fi
fi
else
/usr/bin/kstat -m cpu_info | egrep "chip_id|core_id|module: cpu_info" > /var/tmp/T7_cpuinfo.log
nproc=`(grep chip_id /var/tmp/T7_cpuinfo.log | awk '{ print $2 }' | sort -u | wc -l | tr -d ' ')`
ncore=`(grep core_id /var/tmp/T7_cpuinfo.log | awk '{ print $2 }' | sort -u | wc -l | tr -d ' ')`
vproc=`(grep 'module: cpu_info' /var/tmp/T7_cpuinfo.log | awk '{ print $4 }' | sort -u | wc -l | tr -d ' ')`
if [ $ncore -eq 0 ] ; then
nstrandspercore=0
else
nstrandspercore=$(($vproc/$ncore))
fi
if [ $nproc -eq 0 ] ; then
ncoresperproc=0
else
ncoresperproc=$(($ncore/$nproc))
fi
speedinmhz=`(/usr/bin/kstat -m cpu_info | grep clock_MHz | awk '{ print $2 }' | sort -u)`
speedinghz=`echo "scale=2; $speedinmhz/1000" | bc`
HOSTNAME=`/usr/bin/hostname`
echo "----------------------- $HOSTNAME ---------------------"
echo "Server Model : `/usr/bin/kstat -m cpu_info |grep implementation | sed 's/implementation//g'|awk '{print $1}' |sort -u`"
echo "Total number of physical processors assigned to $HOSTNAME : $nproc"
echo "Number of virtual processors assigned to $HOSTNAME: $vproc"
echo "Total number of cores assigned to $HOSTNAME: $ncore"
echo "Number of cores per physical processor assigned to $HOSTNAME: $ncoresperproc"
echo "Number of hardware threads (strands or vCPUs) per core: $nstrandspercore"
echo "Processor speed: $speedinmhz MHz ($speedinghz GHz)"
# now derive the vcpu-to-core mapping based on above information #
echo -e "\n** Socket-Core-vCPU mapping **"
let linenum=2
if grep "core_id" /var/tmp/T7_cpuinfo.log > /dev/null; then
i=1
while [ $i -le ${nproc} ]
do
chipid=`sed -n ${linenum}p /var/tmp/T7_cpuinfo.log | awk '{ print $2 }'`
echo -e "\nPhysical Processor $i (chip id: $chipid):"
i=$[$i+1]
j=1
while [ $j -le ${ncoresperproc} ]
do
let linenum=($linenum + 1)
coreid=`sed -n ${linenum}p /var/tmp/T7_cpuinfo.log | awk '{ print $2 }'`
echo -e "\tCore $j (core id: $coreid):"
let linenum=($linenum - 2)
vcpustart=`sed -n ${linenum}p /var/tmp/T7_cpuinfo.log | awk '{ print $4 }'`
let linenum=(3 * $nstrandspercore + $linenum - 3)
vcpuend=`sed -n ${linenum}p /var/tmp/T7_cpuinfo.log | awk '{ print $4 }'`
echo -e "\t\tvCPU ids: $vcpustart - $vcpuend"
let linenum=($linenum + 4)
j=$[$j+1]
done
done
else
i=1
while [ $i -le ${nproc} ]
do
chipid=`sed -n ${linenum}p /var/tmp/T7_cpuinfo.log | awk '{ print $2 }'`
echo -e "\nPhysical Processor $i (chip id: $chipid):"
i=$[$i+1]
linenum=$[$linenum+2]
done
fi
rm /var/tmp/T7_cpuinfo.log
echo -e "\npsrinfo"
echo "-------"
echo
/usr/sbin/psrinfo -pv 2> /var/tmp/T7_error.log
rm /var/tmp/T7_error.log
echo
ps -ef |grep -i zsched |grep -v grep > /dev/null 2>&1
if [ $? == 0 ] ; then
echo "--------- zones information ----------"
/usr/sbin/zoneadm list -cv
fi
fi
else
echo -e "\nYou are not root user\n"
fi
else
echo "This script is compatible for Solaris OS, it seems not Oracle OS."
fi
<file_sep>/issues/vxvm_size.txt
#!/bin/bash
# scale=2 display value with 2 decimal.
foo ()
{
echo
echo -en "\033[32mEnter a number \033[33m1=Subdisk 2=Plex 3=Volume 4=Disk q=Quit\033[0m :\033[0m"
read num
if [ "$num" = "q" ];then
exit 0
else
if [ "$num" = "1" ]; then
echo -en "\033[32mEnter the Subdisk Name :\33[0m"
read subdisk
/usr/sbin/vxprint -sb |grep ENABLED |awk ' { print $2 } ' |grep -w "$subdisk" > /dev/null
if [ `echo $?` -eq 0 ]
then
echo -e "\033[32mThe Subdisk $subdisk size is :\033[0m \033[45m$(for i in `/usr/sbin/vxprint -g $DGS -Qqs $subdisk |awk ' { print $5 }' `; do echo "scale=2;$i/2/1024/1024" |bc;done) GB\033[0m"
else
echo "---------------------------------------------------------------------------------"
echo -e "\033[31mSub-disk $subdisk is not part of $DGS or incorrect sub-diskname\033[0m"
echo "---------------------------------------------------------------------------------"
fi
else
if [ "$num" = "2" ]; then
echo -en "\033[35mEnter the Plex Name :\033[0m"
read plex
/usr/sbin/vxprint -p |grep ACTIVE |awk ' { print $2 } ' |grep -w "$plex" > /dev/null
if [ `echo $?` -eq 0 ]
then
echo -e "\033[32mThe plex $plex size is :\033[0m \033[45m$(for i in `/usr/sbin/vxprint -g $DGS -Qqp $plex |awk ' { print $5 }' `; do echo "scale=2;$i/2/1024/1024" |bc;done) GB\033[0m"
else
echo "-----------------------------------------------------------------------"
echo -e "\033[31mPlex $plex is not part of $DGS or incorrect Plex name\033[0m"
echo "-----------------------------------------------------------------------"
fi
else
if [ "$num" = "3" ]; then
echo -en "\033[35mEnter the Volume Name :\033[0m"
read volume
/usr/sbin/vxprint -v |grep ENABLED |awk ' { print $2 } ' |grep -w "$volume" > /dev/null
if [ `echo $?` -eq 0 ]
then
echo -e "\033[32mThe volume $volume size is :\033[0m \033[45m$(for i in `/usr/sbin/vxprint -g $DGS -Qqv $volume |awk ' { print $5 }' `; do echo "scale=2;$i/2/1024/1024" |bc;done) GB\033[0m"
else
echo "-----------------------------------------------------------------------------"
echo -e "\033[31mVolume $volume is not part of $DGS or incorrect Volume name\033[0m"
echo "-----------------------------------------------------------------------------"
fi
else
if [ "$num" = "4" ]; then
echo -en "\033[35mEnter the Veritas Disk Name :\033[0m"
read DISK
/usr/sbin/vxdisk list |grep $DGS|grep $DISK > /dev/null
if [ `echo $?` -eq 0 ]
then
GADSK=`/usr/sbin/vxdisk list |grep "$DGS" |awk ' { print $1 } '|grep "$DISK" `
GDSK=`/usr/sbin/vxdisk list $GADSK |grep public |awk ' { print $4 } '|cut -f2 -d'=' `
echo -e "\033[32mThe size of the disk $DISK :\033[0m \033[45m"$(echo "scale=2;$GDSK/2/1024/1024"|bc)" GB\033[0m"
else
echo "-----------------------------------------------------------------------------"
echo -e "\033[31mThe entered disk is not part of $DGS or incorrect disk name\033[0m"
echo "-----------------------------------------------------------------------------"
fi
else
echo "-------------------------------------------------"
echo -e "\033[31mPlease enter the correct option\033[0m"
echo "-------------------------------------------------"
fi
fi
fi
fi
fi
foo
}
#Script begins
echo -en "\033[32mEnter the diskgroup name :\033[0m"
read DGS
/usr/sbin/vxdg list |grep -v ID |awk ' { print $1 } ' |grep -w $DGS > /dev/null
if [ `echo $?` -eq 0 ]
then
DGSPACE=`/usr/sbin/vxprint -g $DGS -dF "%publen" | awk 'BEGIN {s = 0} {s += $1} END {print s}' `
echo "Diskgroup $DGS size is = "$(echo "scale=2;$DGSPACE/2/1024/1024"|bc)" GB"
DGFREE=`/usr/sbin/vxdg -g $DGS free | awk ' { print $5 }' |grep -v LENGTH |awk 'BEGIN {s = 0} {s += $1} END {print s}' `
echo -e "\033[32mFree space/Unallocated space in diskgroup $DGS is :\033[0m \033[45m "$(echo "scale=2;$DGFREE/2/1024"|bc)" MB\033[0m"
foo
else
echo "--------------------------------------------------------------"
echo -e "\033[31mDiskgroup $DGS is not imported in `uname -n`\033[0m"
echo "--------------------------------------------------------------"
fi
<file_sep>/issues/I_O_fence_reconfig_manually_&_DG_split_requirements.txt
1. While doing disk group split VERITAS Storage Foundation Standard license will not work, required enterprise license
2. While doing quorum migration manually we need to follow below steps.
On all the cluster nodes
$ hastop -all
$ /etc/init.d/vxfen stop
$ /etc/init.d/odm stop
$ /sbin/vcsmmconfig -U
$ gabconfig -U
$ lltconfig -U
$ cat /etc/vxfendg ======> update with new VMAX fen diskgroup on all the nodes
$ /etc/init.d/vxfen start ======> once it's doine check /etc/vxfentab, will populate with new I/O fenc disks.
$ /sbin/vxfenconfig -c
$ lltconfig -c
$ gabconfig -c
$ /sbin/vcsmmconfig -c
$ /etc/init.d/odm start
$ hastart
<file_sep>/issues/I_O_fence_reconfig_online_(minimum_requirements_SF5.0_with_mp3.txt
$ /opt/VRTSspt/VRTSexplorer/VRTSexplorer -dbac –vxfen
Enable password-less ssh access from <host1> to all nodes
ssh-keygen -t rsa
cp -p id_rsa.pub authorized_keys
cp -p /etc/ssh/sshd_config /etc/ssh/sshd_config.`date +%d%m%Y`
cp -p /usr/local/soe/conf/soelogins.data /usr/local/soe/conf/soelogins.data.`date +%d%m%Y`
Modifiy /etc/ssh/sshd_config ( #PermitRootLogin no to #PermitRootLogin yes)
Modifiy /usr/local/soe/conf/soelogins.data (root:1) to allow root 1 direct login
svcs -a|grep -i ssh
svcadm refresh svc:/network/ssh:default
/etc/init.d/sshd restart
/sbin/vxfenadm -g all -f /etc/vxfentab | tee vxfenadm_-g_all.pre
/sbin/vxfenadm -r all -f /etc/vxfentab | tee vxfenadm_-r_all.pre
/etc/vx/bin/vxdisksetup -i <vmax_id>
/etc/vx/bin/vxdisksetup -i <vmax_id>
/etc/vx/bin/vxdisksetup -i <vmax_id>
vxdg init vmax_<cluster name>_fencdg fen001_<cluster name>_dm=<vmax_id>
vxdg -g vmax_<cluster name>_fencdg adddisk fen002_<cluster name>_dm=<vmax_id>
vxdg -g vmax_<cluster name>_fencdg adddisk fen003_<cluster name>_dm=<vmax_id>
vxdg -g vmax_<cluster name>_fencdg set coordinator=on
vxdg deport vmax_<cluster name>_fencdg
vxdg -t import vmax_<cluster name>_fencdg
vxdg deport vmax_<cluster name>_fencdg
/opt/VRTSvcs/vxfen/bin/vxfenswap -g vmax_<cluster name>_fencdg
cp /var/VRTSvcs/log/vxfen/vxfenswap.log.* .
cp -p /etc/vxfendg vxfendg.post
cp -p /etc/vxfenmode vxfenmode.post
cp -p /etc/vxfentab vxfentab.post
Destroy original fencing diskgroup
----------------------------------
vxdg -t import <cluster name>_fencdg
vxdg -o coordinator destroy <cluster name>_fencdg
/etc/vx/bin/vxdiskunsetup -C <dmx id>
/etc/vx/bin/vxdiskunsetup -C <dmx id>
/etc/vx/bin/vxdiskunsetup -C <dmx id>
(OR)
vxdg –t import tpni_ps_enr_rac_fendg
vxdg list
vxdg -g tpni_ps_enr_rac_fencdg set coordinator=off
vxdg destroy tpni_ps_enr_rac_fencdg
| e0a8a66eeb77c1ab60285cf61d938b6182ba481d | [
"Text",
"Shell"
] | 4 | Shell | mycloud447/issues | c5f5f9328844f5c16f9f1dee7f06a84eddbb5134 | 1b51bdd05750628849a8b2760a6df4b37a03fd6d |
refs/heads/master | <repo_name>kwakeham/ant_profiles<file_sep>/ant_fec/ant_fec_local.h
/* Copyright (c) 2015 Nordic Semiconductor. All Rights Reserved.
*
* The information contained herein is property of Nordic Semiconductor ASA.
* Terms and conditions of usage are described in detail in NORDIC
* SEMICONDUCTOR STANDARD SOFTWARE LICENSE AGREEMENT.
*
* Licensees are granted free, non-transferable use of the information. NO
* WARRANTY of ANY KIND is provided. This heading must NOT be removed from
* the file.
*
*/
/* Copyright (c) 2015 Nordic Semiconductor. All Rights Reserved.
*
* The information contained herein is property of Nordic Semiconductor ASA.
* Terms and conditions of usage are described in detail in NORDIC
* SEMICONDUCTOR STANDARD SOFTWARE LICENSE AGREEMENT.
*
* Licensees are granted free, non-transferable use of the information. NO
* WARRANTY of ANY KIND is provided. This heading must NOT be removed from
* the file.
*
*/
#ifndef ANT_FEC_LOCAL_H__
#define ANT_FEC_LOCAL_H__
#include <stdint.h>
#include <stdbool.h>
#include "ant_fec.h"
#ifdef __cplusplus
extern "C" {
#endif
/**
* @addtogroup ant_fec
* @{
*/
/**@brief Bicycle Power Sensor RX control block. */
typedef struct
{
uint16_t calib_timeout;
enum
{
FEC_DISP_CALIB_NONE, ///< Idle state.
FEC_DISP_CALIB_REQUESTED, ///< Calibration requested.
} calib_stat;
} ant_fec_disp_cb_t;
/**
* @}
*/
#ifdef __cplusplus
}
#endif
#endif // ANT_FEC_LOCAL_H__
<file_sep>/ant_fec/pages/ant_fec_common_data.c
/* Copyright (c) 2015 Nordic Semiconductor. All Rights Reserved.
*
* The information contained herein is property of Nordic Semiconductor ASA.
* Terms and conditions of usage are described in detail in NORDIC
* SEMICONDUCTOR STANDARD SOFTWARE LICENSE AGREEMENT.
*
* Licensees are granted free, non-transferable use of the information. NO
* WARRANTY of ANY KIND is provided. This heading must NOT be removed from
* the file.
*
*/
#include "sdk_common.h"
#if NRF_MODULE_ENABLED(ANT_FEC)
#include "ant_fec_common_data.h"
#include "ant_fec_utils.h"
#define NRF_LOG_MODULE_NAME ant_fec_common_data
#if ANT_FEC_COMMON_LOG_ENABLED
#define NRF_LOG_LEVEL ANT_FEC_COMMON_LOG_LEVEL
#define NRF_LOG_INFO_COLOR ANT_FEC_COMMON_INFO_COLOR
#else // ANT_FEC_COMMON_LOG_ENABLED
#define NRF_LOG_LEVEL 0
#endif // ANT_FEC_COMMON_LOG_ENABLED
#include "nrf_log.h"
NRF_LOG_MODULE_REGISTER();
/**@brief FEC common page data layout structure. */
typedef struct
{
uint8_t reserved0[2];
uint8_t instantaneous_cadence;
uint8_t reserved1[4];
}ant_fec_cadence_data_layout_t;
/**@brief Function for tracing common data.
*
* @param[in] p_common_data Pointer to the common data.
*/
static void cadence_data_log(ant_fec_common_data_t const * p_common_data)
{
if (p_common_data->instantaneous_cadence == 0xFF)
{
NRF_LOG_INFO("instantaneous cadence: -- rpm\r\n\n");
}
else
{
NRF_LOG_INFO("instantaneous cadence: %u rpm\r\n\n",
p_common_data->instantaneous_cadence);
}
}
void ant_fec_cadence_encode(uint8_t * p_page_buffer,
ant_fec_common_data_t const * p_common_data)
{
ant_fec_cadence_data_layout_t * p_outcoming_data = (ant_fec_cadence_data_layout_t *)p_page_buffer;
p_outcoming_data->instantaneous_cadence = p_common_data->instantaneous_cadence;
cadence_data_log(p_common_data);
}
void ant_fec_cadence_decode(uint8_t const * p_page_buffer,
ant_fec_common_data_t * p_common_data)
{
ant_fec_cadence_data_layout_t const * p_incoming_data = (ant_fec_cadence_data_layout_t *)p_page_buffer;
p_common_data->instantaneous_cadence = p_incoming_data->instantaneous_cadence;
cadence_data_log(p_common_data);
}
#endif // NRF_MODULE_ENABLED(ANT_FEC)
| 155144411e1db98ab8b13cbb57a9e659d99e591c | [
"C"
] | 2 | C | kwakeham/ant_profiles | c8c7baa0b828092c68f42e5e0b12997f8a84a081 | a220496f6cdc47e6f0ed133b7e5b64e3b7d5dc0e |
refs/heads/master | <repo_name>Taher-Ben-Abdallah/MERN-Project<file_sep>/client/src/reducers/auth.js
import { REGISTER_SUCCESS, REGISTER_FAIL,USER_LOADED,AUTH_ERROR, LOGIN_SUCCESS, LOGIN_FAIL,LOGOUT} from '../actions/types';
const initialState ={
token: localStorage.getItem('token'),
isAuthenticated: null,
loading: true,
user: null
}
export default function(state = initialState, action){
const {type,payload}= action;
switch (type) {
case USER_LOADED:
return { ...state,isAuthenticated: true,loading: false, user: payload }
case REGISTER_SUCCESS:
case LOGIN_SUCCESS:
localStorage.setItem('token',payload.token);
return {... state,...payload, isAuthenticated: true, loading: false}
case REGISTER_FAIL:
case AUTH_ERROR:
case LOGIN_FAIL:
case LOGOUT:
localStorage.removeItem('token');
return {... state,token: null, isAuthenticated: false, loading: false}
default:
return state;
}
}<file_sep>/client/src/actions/types.js
export const SET_ALERT='SET_ALERT';
export const REMOVE_ALERT='REMOVE_ALERT';
export const REGISTER_SUCCESS ='REGISTER_SUCCESS';
export const REGISTER_FAIL='REGISTER_FAIL';
export const USER_LOADED='USER_LOADED';
export const AUTH_ERROR='AUTH_ERROR';
export const LOGIN_SUCCESS='LOGIN_SUCCESS';
export const LOGIN_FAIL='LOGIN_FAIL';
export const LOGOUT='LOGOUT'; | f35b9389769634d486b334a00f8d9d3ed48b63fa | [
"JavaScript"
] | 2 | JavaScript | Taher-Ben-Abdallah/MERN-Project | 4c52a2e8de9554fe30a5036229e85fd9875d8f66 | ddaf4ffa2641df3fa18a3f0f88fc6bcc171dbd0e |
refs/heads/master | <repo_name>Ssergg/test_lumen<file_sep>/routes/web.php
<?php
/*
|--------------------------------------------------------------------------
| Application Routes
|--------------------------------------------------------------------------
|
| Here is where you can register all of the routes for an application.
| It is a breeze. Simply tell Lumen the URIs it should respond to
| and give it the Closure to call when that URI is requested.
|
*/
use Illuminate\Support\Facades\Route;
$router->get('/products', 'ProductsController@show');
$router->post('/products/{productId}', 'ProductsController@store');
$router->put('/products/{productId}', 'ProductsController@update');
$router->delete('/products/{productId}', 'ProductsController@delete');
<file_sep>/app/Http/Controllers/ProductsController.php
<?php
namespace App\Http\Controllers;
use App\Services\RecentlyProducts;
class ProductsController extends Controller
{
/**
* Show recently products list.
*
* @return array
*/
public function show()
{
$recentlyProducts = new RecentlyProducts();
return $recentlyProducts->show();
}
/**
* Store recently product in list.
*
* @param int $productId
* @return array
*/
public function store($productId)
{
$recentlyProducts = new RecentlyProducts();
return $recentlyProducts->store($productId);
}
/**
* Update recently product in list.
*
* @param int $productId
* @return array
*/
public function update($productId)
{
$recentlyProducts = new RecentlyProducts();
return $recentlyProducts->update($productId);
}
/**
* Delete recently product from list.
*
* @param int $productId
* @return array
*/
public function delete($productId)
{
$recentlyProducts = new RecentlyProducts();
return $recentlyProducts->delete($productId);
}
}
<file_sep>/README.md
Recently products microservice based on REDIS cache
/**************************************************/
Run command: composer install
Configure local server
Create .env file using .env.example
Use postman collection: Recently Viewed Products.postman_collection.json
<file_sep>/app/Services/RecentlyProducts.php
<?php
namespace App\Services;
use Illuminate\Support\Arr;
use Illuminate;
use Illuminate\Support\Facades\Cache;
use Illuminate\Support\Carbon;
class RecentlyProducts
{
const DEFAULT_PRODUCT_LIST = ['Default product list'];
const STORAGE_DAYS = 10;
const MAX_NUMBER = 100;
/**
* Store recently product in list.
*
* @param int $productId
* @return array
*/
public function store($productId)
{
$product = [$productId => time()];
$products = Cache::store('redis')->get($this->getUserKey());
if(!is_null($products)) {
Arr::pull($products, $productId);
} else $products = [];
$products = $this->checkList($product + $products);
Cache::store('redis')->put($this->getUserKey(), $products, Carbon::now()->addDays(self::STORAGE_DAYS));
return array_keys($products);
}
/**
* Show recently products list.
*
* @return array
*/
public function show()
{
$products = Cache::store('redis')->get($this->getUserKey());
if(!empty($products)){
return array_keys($this->checkList($products));
}
return $this->getDefaultList();
}
/**
* Update recently product in list.
*
* @param int $productId
* @return array
*/
public function update($productId)
{
return $this->store($productId);
}
/**
* Delete recently product from list.
*
* @param int $productId
* @return array
*/
public function delete($productId)
{
$products = Cache::store('redis')->get($this->getUserKey());
if($products and array_key_exists($productId, $products)){
$ttl = $products[array_key_first($products)] + self::STORAGE_DAYS*24*3600 - time();
Arr::pull($products, $productId);
Cache::store('redis')->put($this->getUserKey(), $products, $ttl);
}
return array_keys($products);
}
/**
* Return authenticated user id.
*
* @return int
*/
protected function getAuthenticatedUser()
{
return 1;
}
/**
* Return default recently products list.
*
* @return array
*/
protected function getDefaultList()
{
return self::DEFAULT_PRODUCT_LIST;
}
/**
* Form recently products list.
*
* @param array $products
* @return array
*/
protected function checkList($products){
return array_slice(array_filter($products, function ($expire){
return $expire > (time() - self::STORAGE_DAYS*24*3600);
}), 0, self::MAX_NUMBER, true);
}
/**
* Get user key.
*
* @return string
*/
protected function getUserKey(){
return 'user-' . $this->getAuthenticatedUser();
}
}
| dcf956bed3098ace0bb807990e4e6dadeb6a962b | [
"Markdown",
"PHP"
] | 4 | PHP | Ssergg/test_lumen | ee2a8c543436872484349937d60bd3619332df41 | 10a951531fe8d65a81b81f0bf9b1f6f2beef7460 |
refs/heads/master | <file_sep>import time
import subprocess
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
button = 22
GPIO.setup(button, GPIO.IN, GPIO.PUD_DOWN)
while True:
button_state = GPIO.input(button)
if button_state == GPIO.HIGH:
print ("shutdown now...")
subprocess.Popen(['sudo','shutdown','now'])
print ("adios!")
time.sleep(0.5)
GPIO.cleanup()
<file_sep>import time
import subprocess
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
button = 22
GPIO.setup(button, GPIO.IN, GPIO.PUD_DOWN)
while True:
button_state = GPIO.input(button)
if button_state == GPIO.HIGH:
print ("connected")
time.sleep(0.5)
GPIO.cleanup()
| 3d3f4ccca76b4feda78d006dbcf2d7cc507f37ac | [
"Python"
] | 2 | Python | emrysr/rpi-shutdown | 1faf403864c048cd62f7584e130cf9b492bd5829 | 1886397683989e9f26e74e0c369c268f040a7251 |
refs/heads/master | <repo_name>jweg/CMakeTouch<file_sep>/CMakeTouch/PkgCmdID.cs
using System;
namespace jweg.CMakeTouch
{
static class PkgCmdIDList
{
public const uint cmdidCMakeTouchProject = 0x100;
public const uint cmdidCMakeTouchNode = 0x110;
public const uint cmdidCMakeTouchFolder = 0x120;
};
}
<file_sep>/CMakeTouch/Guids.cs
using System;
namespace jweg.CMakeTouch
{
static class GuidList
{
public const string guidCMakeTouchPkgString = "826c0374-e5b5-428b-be3e-381384b6af43";
public const string guidCMakeTouchCmdSetString = "b133d899-2616-4dde-99c5-0483d4763e9a";
public static readonly Guid guidCMakeTouchCmdSet = new Guid(guidCMakeTouchCmdSetString);
};
}
<file_sep>/CMakeTouch/CMakeTouchPackage.cs
using System;
using System.Diagnostics;
using System.Globalization;
using System.Runtime.InteropServices;
using System.ComponentModel.Design;
using System.Collections.Generic;
using System.Linq;
using Microsoft.Win32;
using Microsoft.VisualStudio;
using Microsoft.VisualStudio.Shell.Interop;
using Microsoft.VisualStudio.OLE.Interop;
using Microsoft.VisualStudio.Shell;
using EnvDTE;
using EnvDTE80;
namespace jweg.CMakeTouch
{
/// <summary>
/// This is the class that implements the package exposed by this assembly.
///
/// The minimum requirement for a class to be considered a valid package for Visual Studio
/// is to implement the IVsPackage interface and register itself with the shell.
/// This package uses the helper classes defined inside the Managed Package Framework (MPF)
/// to do it: it derives from the Package class that provides the implementation of the
/// IVsPackage interface and uses the registration attributes defined in the framework to
/// register itself and its components with the shell.
/// </summary>
// This attribute tells the PkgDef creation utility (CreatePkgDef.exe) that this class is
// a package.
[PackageRegistration(UseManagedResourcesOnly = true)]
// This attribute is used to register the information needed to show this package
// in the Help/About dialog of Visual Studio.
[InstalledProductRegistration("#110", "#112", "1.0", IconResourceID = 400)]
// This attribute is needed to let the shell know that this package exposes some menus.
[ProvideMenuResource("Menus.ctmenu", 1)]
[Guid(GuidList.guidCMakeTouchPkgString)]
public sealed class CMakeTouchPackage : Package
{
private DTE2 _ide;
public CMakeTouchPackage()
{}
protected override void Initialize()
{
Debug.WriteLine (string.Format(CultureInfo.CurrentCulture, "Entering Initialize() of: {0}", this.ToString()));
base.Initialize();
OleMenuCommandService mcs = GetService(typeof(IMenuCommandService)) as OleMenuCommandService;
if ( null != mcs )
{
MenuCommand menuItem = new MenuCommand(MenuItemCallback,
new CommandID(GuidList.guidCMakeTouchCmdSet, (int)PkgCmdIDList.cmdidCMakeTouchProject));
mcs.AddCommand( menuItem );
menuItem = new MenuCommand(MenuItemCallback,
new CommandID(GuidList.guidCMakeTouchCmdSet, (int)PkgCmdIDList.cmdidCMakeTouchNode));
mcs.AddCommand(menuItem);
menuItem = new MenuCommand(MenuItemCallback,
new CommandID(GuidList.guidCMakeTouchCmdSet, (int)PkgCmdIDList.cmdidCMakeTouchFolder));
mcs.AddCommand(menuItem);
}
}
private void TouchFile(string fileName)
{
try
{
System.IO.File.SetLastWriteTimeUtc(fileName, DateTime.UtcNow);
}
catch (System.Exception)
{}
}
private void ProcessProjectItem(ProjectItem item, bool touchAnyFile)
{
if (item.Kind == EnvDTE.Constants.vsProjectItemKindPhysicalFile)
{
string fileName = item.get_FileNames(0);
if (touchAnyFile || fileName.EndsWith("CMakeLists.txt"))
{
TouchFile(fileName);
}
}
else if (item.Kind == EnvDTE.Constants.vsProjectItemKindPhysicalFolder ||
item.Kind == EnvDTE.Constants.vsProjectItemKindVirtualFolder)
{
// Recurse to children
foreach (ProjectItem child in item.ProjectItems) ProcessProjectItem(child, touchAnyFile);
}
}
private void MenuItemCallback(object sender, EventArgs e)
{
MenuCommand command = sender as MenuCommand;
bool touchAnyFile = PkgCmdIDList.cmdidCMakeTouchNode == command.CommandID.ID;
if (null == _ide) _ide = (DTE2)GetService(typeof(DTE));
IEnumerable<UIHierarchyItem> items = ((object[])_ide.ToolWindows.SolutionExplorer.SelectedItems).Cast<UIHierarchyItem>().ToList();
foreach (UIHierarchyItem item in items)
{
Project project = item.Object as Project;
if (null != project)
{
foreach (ProjectItem pItem in project.ProjectItems) ProcessProjectItem(pItem, touchAnyFile);
}
ProjectItem projItem = item.Object as ProjectItem;
if (null != projItem)
{
ProcessProjectItem(projItem, touchAnyFile);
}
}
}
}
}
| a1f3861bd513f1a7970d42f3bd6491f5e8219c3e | [
"C#"
] | 3 | C# | jweg/CMakeTouch | 06b219fd66e1e19613af35d6efe166130ac0b340 | 8a74ae3e19a3920f34251b2a6ef5a89edc714425 |
refs/heads/master | <repo_name>matay15/odevler<file_sep>/Odev3-3/Program.cs
using System;
namespace Odev3_3
{
class Program
{
static void Main(string[] args)
{
Console.WriteLine("müşteri Adı Giriniz");
Musteri musteri1 = new Musteri();
musteri1.Adi = Console.ReadLine();
Console.WriteLine("Müşteri Soyadı giriniz");
musteri1.Soyadi = Console.ReadLine();
musteri1.Id = 12345;
Console.WriteLine("---------------------------");
Console.WriteLine("müşteri Adı Giriniz");
Musteri musteri2 = new Musteri();
musteri2.Adi = Console.ReadLine();
Console.WriteLine("Müşteri Soyadı giriniz");
musteri2.Soyadi = Console.ReadLine();
musteri2.Id = 6789;
Console.WriteLine("---------------------------");
Musteri[] musteriler = new Musteri[] { musteri1,musteri2 };
foreach (Musteri musteri in musteriler)
{
Console.WriteLine(musteri.Adi);
Console.WriteLine(musteri.Soyadi);
Console.WriteLine(musteri.Id );
}
}
}
}
<file_sep>/Odev3-3/MusteriManager.cs
using System;
using System.Collections.Generic;
using System.Text;
namespace Odev3_3
{
class MusteriManager
{
public void Ekle(Musteri musteri)
{
Console.WriteLine("Müşteri Sisteme Eklendi" + musteri.Adi + musteri.Soyadi + musteri.Id);
}
public void Sil(Musteri musteri)
{
Console.WriteLine("Müşteri Sistemden Silindi");
}
}
}
| 7d490b9c2bd24e9ede8d68894f070f5b7bf076dc | [
"C#"
] | 2 | C# | matay15/odevler | 38375db686961efac9ea16d9734a31480fb5d457 | 33f0eef32ee03bf7d1f8ec9503cf7d63cdc641f7 |
refs/heads/master | <file_sep>package au.com.emc.cubing.stackmat;
import au.com.emc.cubing.stackmat.StackmatState;
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
/**
*
* @author User
*/
public interface StackmatListener {
public void stateUpdate(StackmatState oldState, StackmatState newState);
}
<file_sep>package au.com.emc.cubing.stackmat;
import java.util.List;
import java.util.ArrayList;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.Mixer;
import javax.sound.sampled.TargetDataLine;
class StackmatSample {
private int Value;
private double Time;
public StackmatSample(int theValue, double theTime) {
this.Value = theValue;
this.Time = theTime;
}
public double getTime() {
return this.Time;
}
public int bitValue() {
return (this.Value > 0) ? 1 : 0;
}
public int bitValue(boolean inverted) {
int retVal = this.bitValue();
if (inverted) {
retVal = (retVal == 0) ? 1 : 0;
}
return retVal;
}
}
public class StackmatInterpreter {
private final static Logger LOGGER = Logger.getLogger(StackmatInterpreter.class.getName());
private static final int BAUD = 1200;
private static final int BITS_PER_BYTE = 8;
// this is a Gen3 timer
private static final int BYTES_PER_MESSAGE = 10;
// there is a spacet of 2 bits inbetween each byte of a stackmat message
private static final int SPACER_BIT_SIZE = 2;
// the start spacer bit is special
private static final int startSpacer = SPACER_BIT_SIZE / 2;
// the total number of bits in a stackmat message
private static final int bitsPerMessage = startSpacer
+ BYTES_PER_MESSAGE * 8
+ SPACER_BIT_SIZE * 9;
private static final double messageLength = ((double) bitsPerMessage) / ((double) BAUD);
// the length of sequence of '1' between stackmat messages
private static final double syncThreshold = 0.5 * messageLength;
private int samplingRate;
private double samplesPerStackmatMessage;
private int switchThreshold;
private int theMixerNo;
private AudioFormat format;
public DataLine.Info info;
private TargetDataLine line;
private StackmatState state = null;
private boolean enabled = true;
private int bitValueBetweenMessages = -1;
private static Mixer.Info[] aInfos = AudioSystem.getMixerInfo();
public StackmatInterpreter(int samplingRate, int mixerNumber, int switchThreshold) {
initialize(samplingRate, mixerNumber, switchThreshold);
}
private void initialize(int samplingRate, int mixerNum, int switchThreshold) {
LOGGER.info("Initialising stackmat gen 3 interpreter");
this.samplingRate = samplingRate;
this.switchThreshold = switchThreshold;
this.samplesPerStackmatMessage = this.samplingRate * messageLength;
format = new AudioFormat(samplingRate
, 8 // sampleSizeInBits
, 1 // mono
, true // signed
, false // bigEndian
);
info = new DataLine.Info(TargetDataLine.class, format);
// store mixer number
theMixerNo = mixerNum;
}
private void cleanup() {
if (line != null) {
line.stop();
line.close();
}
line = null;
}
public void stop() {
this.enabled = false;
}
protected static int resolveMixerIndex(String stackmatTimerInputDeviceName) {
int retVal = -1;
if (aInfos != null) {
for (int i = 0; i < aInfos.length; i++) {
if (stackmatTimerInputDeviceName.equals(aInfos[i].getName())) {
retVal = i;
break;
}
}
}
return retVal;
}
protected void changeLine(int mixerNum) {
if (mixerNum < 0 || mixerNum >= aInfos.length) {
if (line != null) {
cleanup();
}
return;
}
try {
Mixer mixer = AudioSystem.getMixer(aInfos[mixerNum]);
if (mixer.isLineSupported(info)) {
if (line != null) {
cleanup();
}
line = (TargetDataLine) mixer.getLine(info);
line.open(format);
line.start();
}
} catch (LineUnavailableException e) {
LOGGER.log(Level.WARNING, "Detected mixer line unavailable", e);
cleanup();
}
synchronized (this) {
notify();
}
}
private double readLine(List<StackmatSample> currentPeriod, double timeCounter) {
int currentSample = 0;
int totalSamplesNeeded = (int) this.samplesPerStackmatMessage * 4;
int additionalSamplesNeeded = totalSamplesNeeded - currentPeriod.size();
if (additionalSamplesNeeded > 0) {
byte[] buffer = new byte[additionalSamplesNeeded];
if (line.read(buffer, 0, buffer.length) > 0) {
for (int c = 0; c < buffer.length; ++c) {
//little-endian encoding, bytes are in increasing order
currentSample = 0;
currentSample |= buffer[c]; //we don't mask with 255 so we don't lost the sign
currentPeriod.add(new StackmatSample(currentSample, timeCounter));
timeCounter += 1 / ((double) this.samplingRate);
}
}
}
return timeCounter;
}
private int syncMessageStartPoint(int testForBitValue, List<StackmatSample> sampleBuffer) {
int syncIndex = -1;
double startOnTime = -1;
for (int idx = 0; idx < sampleBuffer.size(); ++idx) {
StackmatSample sms = sampleBuffer.get(idx);
if (sms.bitValue() == testForBitValue) {
if (startOnTime < 0) {
startOnTime = sms.getTime();
}
} else if (startOnTime > 0) {
if ((sms.getTime() - startOnTime) > syncThreshold) {
syncIndex = idx;
break;
} else {
startOnTime = -1;
}
} else {
startOnTime = -1;
}
}
return syncIndex;
}
private int syncMessageEndPoint(List<StackmatSample> sampleBuffer) {
int retVal = this.syncMessageStartPoint(this.bitValueBetweenMessages, sampleBuffer);
if (retVal > 0) {
--retVal;
StackmatSample nextStart = sampleBuffer.get(retVal);
while (sampleBuffer.get(retVal).bitValue() == nextStart.bitValue()) {
--retVal;
}
}
return retVal;
}
private StackmatMessage parseMessageData(List<StackmatSample> messageSamples) {
double samplesPerBit = ((double) messageSamples.size()) / ((double) bitsPerMessage);
int spacerSize = startSpacer;
// work out what the points are we will use in the samples
double currIndex = 0;
int[][] sampleIdx = new int[BYTES_PER_MESSAGE][BITS_PER_BYTE];
for (int loop = 0; loop < BYTES_PER_MESSAGE; ++loop) {
// skip past spacer
currIndex += spacerSize * samplesPerBit;
// index for each of the bits in the message
for (int loop2 = 0; loop2 < BITS_PER_BYTE; ++loop2) {
// use the sample in the middle of the band as this will give the most reliable reading
currIndex += (samplesPerBit / 2);
sampleIdx[loop][loop2] = (int) currIndex;
currIndex += (samplesPerBit / 2);
}
// spacers are a set size past the first
spacerSize = SPACER_BIT_SIZE;
}
// now parse the message using the samples
StackmatMessage smm = new StackmatMessage();
for (int loop = 0; loop < BYTES_PER_MESSAGE; ++loop) {
StringBuilder currByte = new StringBuilder();
for (int loop2 = 0; loop2 < BITS_PER_BYTE; ++loop2) {
currByte.append(
messageSamples.get(
sampleIdx[loop][loop2]
).bitValue((this.bitValueBetweenMessages == 0))
);
}
int foo = Integer.parseInt(currByte.reverse().toString(), 2);
if (loop == 0) {
smm.Instruction = (char) foo;
} else if (loop < 7) {
if (smm.Time == null) {
smm.Time = Character.toString((char) foo);
} else {
smm.Time += (char) foo;
}
} else if (loop == 7) {
smm.Checksum = foo;
} else if (loop == 8) {
smm.LF = (char) foo;
} else {
smm.CR = (char) foo;
}
}
return smm;
}
public void doInBackground() {
LOGGER.info("Starting Stackmat background thread");
double timeCounter = 0;
List<StackmatSample> sampleBuffer = new ArrayList<StackmatSample>();
timeCounter = 0;
while (this.enabled) {
// sleep until we have a viable input signal
while (line == null) {
try {
LOGGER.fine("Waiting for LINE");
changeLine(this.theMixerNo);
Thread.sleep(500);
} catch (Exception ignore) {
}
}
try {
int syncIndex = -1;
int thebitValueBetweenMessages = 1;
while (this.enabled && (syncIndex < 0)) {
if (this.bitValueBetweenMessages < 0) {
thebitValueBetweenMessages = (thebitValueBetweenMessages == 0) ? 1 : 0;
LOGGER.fine("Attempting message synchronisation on bit value " + thebitValueBetweenMessages);
} else {
thebitValueBetweenMessages = this.bitValueBetweenMessages;
}
timeCounter = this.readLine(sampleBuffer, timeCounter);
syncIndex = this.syncMessageStartPoint(thebitValueBetweenMessages, sampleBuffer);
// if we could not locate a starting sync point then clear off buffer and try again
// the buffer is sized to hold multiple stackmat messages
if (syncIndex < 0) {
sampleBuffer.clear();
} else if (this.bitValueBetweenMessages < 0) {
LOGGER.fine("Achieved message synchronisation on bit value " + thebitValueBetweenMessages);
this.bitValueBetweenMessages = thebitValueBetweenMessages;
}
}
// only continue processinf if we have not exited the sync loop
// a as result of a cancel instruction
if (this.enabled) {
// we have a flip and at sync point, clear out all preceding data from buffer
for (int idx = 0; idx < syncIndex; ++idx) {
sampleBuffer.remove(0);
}
// find the end of the current message begininng at the sync point
int endIndex = this.syncMessageEndPoint(sampleBuffer);
if (endIndex > 0) {
// now extract out the message payload
List<StackmatSample> messageSamples = new ArrayList<StackmatSample>();
for (int idx = 0; idx <= endIndex; ++idx) {
messageSamples.add(sampleBuffer.get(idx));;
}
sampleBuffer.removeAll(messageSamples);
StackmatMessage smm = this.parseMessageData(messageSamples);
if (smm != null) {
StackmatManager sm = StackmatManager.getInstance();
List<String> errList = sm.setState(smm);
if (!errList.isEmpty()) {
StringBuffer sb = new StringBuffer();
sb.append("Detected invalid message: ");
sb.append(System.lineSeparator());
for (String em : errList) {
sb.append(" ");
sb.append(em);
sb.append(System.lineSeparator());
}
LOGGER.log(Level.WARNING, sb.toString());
}
}
}
}
} catch (Exception e) {
LOGGER.log(Level.SEVERE, "Detected error processing stackmat signal: " + e.getMessage(), e);
}
}
// release handles to audio input
this.cleanup();
}
public int getStackmatValue() {
return switchThreshold;
}
public void setStackmatValue(int value) {
this.switchThreshold = value;
}
}
<file_sep># StackMatGen3TimerAPI
StackMat Generation 3 Timer API
<file_sep>package au.com.emc.cubing.stackmat;
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
/**
*
* @author User
*/
public class Main {
public static void main(String[] args) {
StackmatManager smm = StackmatManager.getInstance();
smm.register(new StackmatReporterConsole());
smm.setSamplingRate(16000);
smm.setMixerNumber(5);
smm.setSwitchThreshold(100);
smm.start();
System.console().readLine();
}
}
| c0b71975ee93d5240497cd0625521af4ed95b97c | [
"Markdown",
"Java"
] | 4 | Java | LucasAlfare/StackMatGen3TimerAPI | 145a5ce0659643d70a15db3034dcd4f59cbfdc04 | d2ed8de77e0949e9b8f5a0f130e29c58284bd4a3 |
refs/heads/main | <repo_name>cenyG/transfer-front-test<file_sep>/src/api/index.js
import axios from 'axios'
import config from '../config'
export default class Api {
constructor() {
this.client = axios.create({ baseURL: '/api' })
this.client.interceptors.request.use(
axiosConfig => {
console.log('intercept')
return {
...axiosConfig,
headers: {
Authorization: `Bearer ${localStorage.getItem(config.LOCAL_STORAGE_NAME)}`
},
}
},
e => Promise.reject(e)
)
}
async register({ name, password }) {
return this.client.post('/auth/register', { name, password })
}
async login({ name, password }) {
return this.client.post('/auth/login', { name, password })
}
async account() {
return this.client.get('/account')
}
async getAccount(id) {
return this.client.get(`/account/${id}`)
}
async transfer(from, to, amount) {
return this.client.post(`/account/transfer/${from}/${to}`, { amount })
}
}<file_sep>/README.md
Transfer-test-front
---------------
### Created with Create React App
### Start:
```bash
yarn isntall
yarn start
```<file_sep>/src/config/index.js
const config = {
LOCAL_STORAGE_NAME: 'appLocal',
API_URL: process.env.API_URL || 'localhost',
API_PORT: process.env.API_PORT || 8080
}
module.exports = config<file_sep>/src/screens/LogIn.js
import React from 'react'
import { useState } from 'react'
import { Button, Container, TextField } from '@material-ui/core'
import { makeStyles } from '@material-ui/core/styles'
import { useHistory } from 'react-router-dom'
import { useSnackbar } from 'notistack'
import Api from '../api'
import config from '../config'
const useStyles = makeStyles(() => ({
wrap: {
position: 'absolute',
left: '50%',
top: '50%',
transform: 'translate(-50%, -50%)'
},
root: {
display: 'flex',
flexDirection: 'column',
maxWidth: '250px',
textAlign: 'center'
},
buttons: {
display: 'flex',
flexDirection: 'row',
justifyContent: 'space-between',
padding: '10px',
},
button: {
width: 100
},
success: {
display: 'flex',
flexDirection: 'column',
padding: '10px',
alignItems: 'center',
alignContent: 'center',
},
errorMessage: {
color: 'red'
}
}))
const LogIn = () => {
const classes = useStyles()
const history = useHistory()
const { enqueueSnackbar } = useSnackbar()
const api = new Api()
const [signUp, setSignUp] = useState(false)
const [name, setName] = useState('')
const [password, setPassword] = useState('')
const signInAction = () => {
api
.login({ name, password })
.then(({ data, status }) => {
if (status === 200) {
localStorage.setItem(config.LOCAL_STORAGE_NAME, data)
history.push('/home')
} else {
enqueueSnackbar(`Sign in problem: ${data}`, { variant: 'warning' })
}
})
.catch(err => {
enqueueSnackbar(`Sign in error: ${err.message}`, { variant: 'error' })
})
}
const registerAction = () => {
api
.register({ name, password })
.then(({ data, status }) => {
if (status === 201) {
enqueueSnackbar('Register success', { variant: 'success' })
} else {
enqueueSnackbar(`Register problem: ${data}`, { variant: 'warning' })
}
})
.catch(err => {
enqueueSnackbar(`Register error: ${err.message}`, { variant: 'error' })
})
}
return (
<Container className={classes.wrap}>
<Container className={classes.root}>
<TextField
id="standard-basic"
label="Username"
onChange={(event) => setName(event.target.value)}
value={name}
/>
<TextField
id="standard-basic"
label="Password"
onChange={(event) => setPassword(event.target.value)}
value={password}
/>
{
signUp ? (
<Container className={classes.buttons}>
<Button
className={classes.button}
variant="contained"
disableElevation
onClick={registerAction}
color="primary">Register</Button>
<Button
className={classes.button}
onClick={() => setSignUp(false)}>Sign In</Button>
</Container>
) : (
<Container className={classes.buttons}>
<Button variant="contained"
disableElevation
onClick={signInAction}
color="primary">Sign In</Button>
<Button onClick={() => setSignUp(true)}>Register</Button>
</Container>
)
}
</Container>
</Container>
)
}
export default LogIn<file_sep>/src/screens/User.js
import React from 'react'
import { useEffect, useState } from 'react'
import { Button, Container, TextField, Typography } from '@material-ui/core'
import { makeStyles } from '@material-ui/core/styles'
import { useSnackbar } from 'notistack'
import Api from '../api'
const User = () => {
const classes = useStyles()
const { enqueueSnackbar } = useSnackbar()
const api = new Api()
const [userState, setUserState] = useState({
id: 0,
name: '',
amount: ''
})
const [transferAmount, setTransferAmount] = useState(0)
const [toAccountId, setToAccountId] = useState(0)
useEffect(() => {
api
.account()
.then(({ data: { id, amount, name }, status }) => {
if (status === 200) {
setUserState({
id,
name,
amount
})
}
})
.catch((err) => {
enqueueSnackbar(`Something went wrong: ${err.message}`, { variant: 'error' })
})
}, [])
const transferAction = () => {
api
.transfer(userState.id, toAccountId, transferAmount)
.then(({ data: { id, amount, name } }) => {
enqueueSnackbar('Transfer success', { variant: 'success' })
setUserState({
id,
name,
amount
})
})
.catch((err) => {
enqueueSnackbar(`Transfer error: ${err.message}`, { variant: 'error' })
})
}
const handleAmountChange = (event) => { setTransferAmount(event.target.value) }
const handleToAccountIdChange = (event) => { setToAccountId(event.target.value) }
return (
<Container className={classes.root}>
<Container className={classes.root}>
<Typography variant="h4">Hi, {userState.name}</Typography>
<Typography variant="h5">You have, {parseFloat(userState.amount).toFixed(3)}$</Typography>
</Container>
<Typography variant="h5">Transfer</Typography>
<Container className={classes.transferForm}>
<TextField className={classes.innerContainer}
type="number"
value={toAccountId}
onChange={handleToAccountIdChange}
label="Account ID"/>
<TextField className={classes.innerContainer}
type="number"
value={transferAmount}
onChange={handleAmountChange}
label="Amount"/>
<Button className={classes.innerContainer} variant="contained" onClick={transferAction} color="primary">Submit</Button>
</Container>
</Container>
)
}
export default User
const useStyles = makeStyles(() => ({
root: {
display: 'flex',
flexDirection: 'column',
alignItems: 'center',
alignContent: 'center',
textAlign: 'center',
padding: '50px'
},
greeting: {
padding: '50px'
},
row: {
display: 'flex',
flexDirection: 'row',
justifyContent: 'space-around',
margin: '10px',
padding: '10px',
},
transferForm: {
display: 'flex',
flexDirection: 'column',
marginTop: '20px',
padding: '10px',
maxWidth: '400px'
},
innerContainer: {
padding: '10px'
},
header: {
marginBottom: '20px',
fontSize: 20,
fontWeight: 'bold'
},
card: {
minWidth: '400px'
}
})) | 2d2e5748bc727a212881c38e97e22da1d84c6de1 | [
"JavaScript",
"Markdown"
] | 5 | JavaScript | cenyG/transfer-front-test | aa2c930304635e785975fc1fd5672f8d4b8f3b67 | 290f369ef17613ee0625cd46baccc7352c099b67 |
refs/heads/master | <repo_name>shinmiji/random-style<file_sep>/src/js/index.js
(function(global, $, undefined){
'use strict';
var $panel = $('.panel');
var number = 0;
var action = null;
$('.start').on('click', function(){
randomStyleNumber();
});
$('.stop').on('click', function(){
clearInterval(action);
});
function randomStyleNumber() {
action = setInterval(function(){
var $insertNumber = $('<span>'+number+'</span>');
var color = "#" + (parseInt(Math.random() * 0xffffff)).toString(16);
var fontSize = parseInt(Math.random()*40)+'px';
$insertNumber.css({
'color': color,
'fontSize': fontSize,
'display': 'inline-block'
});
$panel.append($insertNumber);
number++;
}, 100);
this.window.scrollTo(0, window.document.body.scrollHeight);
};
})(window, window.jQuery);
<file_sep>/README.md
#random-style
## 글자 크기와 색상을 랜덤으로 적용
글자 크기와 색상을 랜덤으로 적용하여 숫자 출력.
append 함수 이용. | b963fc0978ba53662f7d6d7d73b1ced45cd83df1 | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | shinmiji/random-style | 6d9cc47a30f9dfd27c665d8b4835c89dceba6c37 | cf55a5e2e5f02e730a4626641d6a5f54e0928fad |
refs/heads/master | <repo_name>mameAmy466/poo-en-php<file_sep>/universite1/controleur/Chambre.php
<?php
class Chambre
{
private $id_b;
private $nom;
public function __construct($nom="",$id_b=0){
$this->nom=$nom;
$this->id_b=$id_b;
}
/**
* Get the value of id_batim
*/
public function getId_b()
{
return $this->id_b;
}
/**
* Set the value of id_batim
*
* @return self
*/
public function setId_b($id_b)
{
$this->id_b= $id_b;
return $this;
}
/**
* Get the value of nom
*/
public function getNom()
{
return $this->nom;
}
/**
* Set the value of nom
*
* @return self
*/
public function setNom($nom)
{
$this->nom = $nom;
return $this;
}
}
?>
<file_sep>/universite1/vue/index.php
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<title>Document</title>
<style>
html,
body {
height: 100%;
}
#page-content {
flex: 1 0 auto;
}
#sticky-footer {
flex-shrink: none;
}
/* Other Classes for Page Styling */
body {
background: #007bff;
background: linear-gradient(to right, #13027014, #ffff);
}
</style>
</head>
<body class="d-flex flex-column">
<header>
<?php
include "menu.php";
?>
</header>
<div id="page-content">
<div class="container text-center">
<div class="row justify-content-center">
<div class="col-md-7">
<h1 class="font-weight-light mt-4 text-dark"> Lorem ipsum dolor sit amet consectetur.</h1>
<p class="lead text-dark-50">Lorem ipsum dolor sit amet consectetur adipisicing elit. Pariatur, aliquam? Commodi aperiam, autem dolores expedita pariatur quos est molestiae in, tenetur ex iure ullam optio! Eaque iusto laudantium nulla omnis.</p>
</div>
</div>
</div>
</div>
<footer>
<?php
include "footer.php";
?>
</footer>
</body>
</html><file_sep>/universite1/controleur/nonbourcier.php
<?php
class nonbourcier extends etudiant
{
private $adress;
public function __construct($mat="",$nom="",$prenom="",$tel=0,$email="",$date_de_nss=0,$adress="")
{
parent::__construct($mat,$nom,$prenom,$tel,$email,$date_de_nss);
// $this->$id_etu=$id_etu;
$this->adress=$adress;
}
public function getAdress()
{
return $this->adress;
}
public function setAdress($adress)
{
$this->adress = $adress;
return $this;
}
}
?><file_sep>/universite1/controleur/Etudiant.php
<?php
abstract class etudiant
{
// private $id_etu;
private $mat;
private $nom;
private $prenom;
private $tel;
private $dateNs;
private $email;
public function __construct($mat="",$nom="",$prenom="",$tel=0,$dateNs=0,$email="")
{
// $this->$id_etu=$id_etu;
$this->mat=$mat;
$this->nom=$nom;
$this->prenom=$prenom;
$this->tel=$tel;
$this->dateNs=$dateNs;
$this->email=$email;
}
public function getNom()
{
return $this->nom;
}
public function setNom($nom)
{
$this->nom = $nom;
return $this;
}
public function getPrenom()
{
return $this->prenom;
}
public function setPrenom($prenom)
{
$this->prenom = $prenom;
return $this;
}
public function getTel()
{
return $this->tel;
}
public function setTel($tel)
{
$this->tel = $tel;
return $this;
}
public function getDateNs()
{
return $this->dateNs;
}
public function setDateNs($dateNs)
{
$this->dateNs = $dateNs;
return $this;
}
public function getEmail()
{
return $this->email;
}
public function setEmail($email)
{
$this->email = $email;
return $this;
}
/**
* Get the value of mat
*/
public function getMat()
{
return $this->mat;
}
/**
* Set the value of mat
*
* @return self
*/
public function setMat($mat)
{
$this->mat = $mat;
return $this;
}
/**
* Get the value of id
*/
public function getId()
{
return $this->id;
}
/**
* Set the value of id
*
* @return self
*/
public function setId($id_etu)
{
$this->id = $id_etu;
return $this;
}
}
?><file_sep>/universite1/vue/style.js
function changement(param)
{
var tab = ["bloc","adress","ch"];
var tag = document.getElementById(tab[param]);
//var tab1 =document.getElementById(tab[param+1]);
if(tag.style.display=="none")
{
tag.style.display="block";
}
else
{
tag.style.display="none";
}
}
$(document).ready(function() {
$('#example').DataTable({dom: "<'row'<'col-sm-4'f><'col-sm-offset-2 col-sm-6'B>>" +
"<'row'<'col-sm-12'tr>>" +
"<'row'<'col-xs-12 col-sm-7 col-sm-offset-5 text-right'p>>",
"aoColumnDefs": [{
'bSortable': false,
'aTargets': [-1]
}],
"oLanguage": {
"oPaginate": {
"sFirst": "Premier",
"sLast": "Dérnier",
"sNext": "Suivant",
"sPrevious": "Précedent",
},
"sSearch": "Recherche:",
"sEmptyTable": "Aucune donnée disponible",
"sInfo": "affichage de _START_ à _END_ sur _TOTAL_ éléments",
"sInfoEmpty": "Aucune donnée disponible",
"sInfoFiltered": "(Recherché sur _MAX_ éléments au total)",
"infoPostFix": "",
"thousands": ",",
"sLengthMenu": "Afficher par _MENU_ éléments",
"loadingRecords": "Chargement...",
"processing": "procéssus...",
"sZeroRecords": "Aucun résultat trouvé",
},
"iDisplayLength": 10,
"lengthChange": false,
"info": false,
responsive: false
});
} );
$(".sidebar-dropdown > a").click(function() {
$(".sidebar-submenu").slideUp(200);
if (
$(this)
.parent()
.hasClass("active")
) {
$(".sidebar-dropdown").removeClass("active");
$(this)
.parent()
.removeClass("active");
} else {
$(".sidebar-dropdown").removeClass("active");
$(this)
.next(".sidebar-submenu")
.slideDown(200);
$(this)
.parent()
.addClass("active");
}
});
$("#close-sidebar").click(function() {
$(".page-wrapper").removeClass("toggled");
});
$("#show-sidebar").click(function() {
$(".page-wrapper").addClass("toggled");
});
<file_sep>/universite1/controleur/loger.php
<?php
class loger extends bourcier
{
private $id_ch;
// private $id_b;
public function __contruct($mat="",$nom="",$prenom="",$tel=0,$email="",$date_de_nss=0,$id_type=0,$id_ch=0)
{
parent::__construct($mat,$nom,$prenom,$tel,$email,$date_de_nss,$id_type);
$this->idch=$id_ch;
}
/**
* Get the value of id_ch
*/
public function getId_ch()
{
return $this->id_ch;
}
/**
* Set the value of id_ch
*
* @return self
*/
public function setId_ch($id_ch)
{
$this->id_ch = $id_ch;
return $this;
}
/**
* Get the value of id_b
*/
public function getId_b()
{
return $this->id_b;
}
/**
* Set the value of id_b
*
* @return self
*/
public function setId_b($id_b)
{
$this->id_b = $id_b;
return $this;
}
}
?><file_sep>/universite1/vue/formulaire_ajout_chambre_batiment.php
<?php
include "../controleur/connection.php";
$conn=conn();
$req=$conn->prepare('SELECT * FROM batiment');
$req->execute();
$linge=$req->fetchAll();
function chargerMaClasse($classe) {
require "../controleur/".$classe.".php";
}
spl_autoload_register('chargerMaClasse');
$conn->setAttribute(PDO::ATTR_ERRMODE,PDO::ERRMODE_WARNING);
$sR= new ServicebatimentChambre($conn);
?>
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.1.3/css/bootstrap.min.css">
<title>Document</title>
</head>
<body>
<?php
include "menu.php";
?>
<div class="cotainer-flud ">
<div class="row bg-light justify-content-center" >
<h3 class="text-center">Ajouter des Chambre et Batiment</h3>
</div>
</div>
<hr>
<div class="cotainer">
<div class="row justify-content-center">
<div class="col-md-5">
<div class="form">
<h5 class="text-center">Ajouter un batiment</h5>
<form action="#" method="POST">
<label for="email" class="col-md-4" >Numero Batiment</label>
<div class="form-group">
<input type="text" class="form-control" name="btim">
</div>
<label for="email" class="col-md-4" >Nom Batiment</label>
<div class="form-group">
<input type="text" class="form-control" name="nom">
</div>
<div class="col-md-6 offset-md-4">
<button type="submit" class="btn btn-success" name="valu">
Validation
</button>
</div>
</form>
</div>
</div>
<div class="col-md-5">
<h5 class="text-center">Ajouter un chambre</h5>
<form action="#" method="POST">
<label for="email" class="col-md-4">Nom</label>
<div class="form-group">
<input type="text" class="form-control" name="nomc">
</div>
<label for="email" class="col-md-4">Nom Batiment</label>
<div class="form-group">
<select class="browser-default custom-select" name="batim">
<option selected>nom batiment</option>
<?php
foreach ($linge as $key => $val) {
?>
<option value="<?=$val['id']?>"><?=$val['nom']?></option>
<?php
}
?>
</select>
</div>
<div class="col-md-6 offset-md-4">
<button type="submit" class="btn btn-success" name="val">
Validation
</button>
</div>
</form>
</div>
</div>
</div>
<hr>
<div class="cotainer">
<div class="row justify-content-center">
<div class="col-md-5">
<h2 class="text-center text-primary">Liste des Batiments</h2>
<table class="table table-striped" cellspacing="0" width="100%">
<thead>
<tr>
<th>N+</th>
<th>Numero</th>
<th>Nom</th>
</tr>
</thead>
<tbody>
<?php
$sR->getBatiment();
?>
</tbody>
</table>
</div>
<div class="col-md-5">
<h2 class="text-center text-primary">Liste des Batiments</h2>
<table class="table table-striped" cellspacing="0" width="100%">
<thead>
<tr>
<th>N+</th>
<th>Nom</th>
<th>Nom Batiment</th>
</tr>
</thead>
<tbody>
<?php
$sR->getChambre();
?>
</tbody>
</table>
</div>
</div>
</div>
<footer>
<?php
include "footer.php";
?>
</footer>
<?PHP
if (isset($_POST['valu'])) {
$mat=$_POST['btim'];
$nom=$_POST['nom'];
$bt= new Batimant($mat,$nom);
$sR->addBatiment($bt);
}
if (isset($_POST['val'])) {
$nom=$_POST['nomc'];
$mat=$_POST['batim'];
$ch= new Chambre($nom,$mat);
$sR->addChambre($ch);
}
?>
</body>
</html><file_sep>/universite1/controleur/Batimant.php
<?php
class Batimant
{
// private $id;
private $numbatiment;
private $nom;
public function __construct($numbatiment="",$nom="")
{
// $this->id=$id;
$this->numbatiment=$numbatiment;
$this->nom=$nom;
}
/**
* Get the value of id
*/
public function getId()
{
return $this->id;
}
/**
* Set the value of id
*
* @return self
*/
public function setId($id)
{
$this->id = $id;
return $this;
}
/**
* Get the value of numbatiment
*/
public function getNumbatiment()
{
return $this->numbatiment;
}
/**
* Set the value of numbatiment
*
* @return self
*/
public function setNumbatiment($numbatiment)
{
$this->numbatiment = $numbatiment;
return $this;
}
/**
* Get the value of nom
*/
public function getNom()
{
return $this->nom;
}
/**
* Set the value of nom
*
* @return self
*/
public function setNom($nom)
{
$this->nom = $nom;
return $this;
}
}
?>
<file_sep>/universite1/vue/formulairInscription.php
<?php
include "../controleur/connection.php";
$conn = conn();
$req = $conn->prepare('SELECT * FROM typeBourse');
$req->execute();
$linge = $req->fetchAll();
$req1 = $conn->prepare('SELECT * FROM chambre');
$req1->execute();
$linge2 = $req1->fetchAll();
?>
<!------ <link href="//maxcdn.bootstrapcdn.com/bootstrap/4.1.1/css/bootstrap.min.css" rel="stylesheet" id="bootstrap-css">
<link href="style.css" rel="stylesheet" id="bootstrap-css">
<script src="//maxcdn.bootstrapcdn.com/bootstrap/4.1.1/js/bootstrap.min.js"></script>
<script src="//cdnjs.cloudflare.com/ajax/libs/jquery/3.2.1/jquery.min.js"></script>
Include the above in your HEAD tag ---------->
<!doctype html>
<html lang="en">
<head>
<!-- Required meta tags -->
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<!--
<link rel="dns-prefetch" href="https://fonts.gstatic.com">
<link href="https://fonts.googleapis.com/css?family=Raleway:300,400,600" rel="stylesheet" type="text/css">
<link rel="icon" href="Favicon.png">-->
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.1.3/css/bootstrap.min.css">
<link rel="stylesheet" href="css/style1.css">
<link rel="stylesheet" href="css/style.css">
<title>formulaire d'inscrition</title>
<script>
</script>
</head>
<body>
<header>
<?php
include "menu.php";
?>
<div class="main">
<div class="container">
<div class="signup-content">
<div class="signup-img">
<img src="images/Sonatel-Academy1.jpg" alt="">
<div class="signup-img-content">
<h2>Formulair D'inscrition</h2>
<p>Inscrire l'etudiant!</p>
</div>
</div>
<div class="signup-form">
<form name="my-form" class="register-form" action="#" method="POST">
<div class="form-row">
<div class="form-group">
<div class="form-input">
<label for="first_name" class="required">matricul</label>
<input type="text" id="name" class="form-control" name="mat">
</div>
<div class="form-input">
<label for="last_name" class="required">nom</label>
<input type="text" id="full_name" class="form-control" name="nom">
</div>
<div class="form-input">
<label for="company" class="required">prenom</label>
<input type="text" id="user_name" class="form-control" name="prenom">
</div>
<div class="form-input">
<label for="telephone" class="required">telephone</label>
<input type="number" id="phone_number" class="form-control" name="tel">
</div>
<div class="form-input">
<label for="date-ns" class="required">date de naissance</label>
<input type="date" id="permanent_address" class="form-control" name="dateNs">
</div>
<div class="form-input">
<label for="email" class="required">E-Mail Address</label>
<input type="text" id="email_address" class="form-control" name="email">
</div>
<div class="form-radio-group">
<div class="form-radio-item">
<input type="radio" name="typeBoursier" value="Boursier" id="boursier" onchange="changement(0);">
<label for="cash">Boursier</label>
<span class="check"></span>
</div>
<div class="form-radio-item">
<input type="radio" name="typeBoursier" value="Non_Boursier" id="non_boursier" onchange="changement(1);">
<label for="cash"> NON_Boursier</label>
<span class="check"></span>
</div>
</div>
<div class="form-input" id="adress" style="display:none;">
<label for=" Adresse" class="required"> Adresse</label>
<input type="text" id="address" class="form-control" name="Adresse">
</div>
<div id="bloc" style="display:none;">
<hr>
<div class="form-input" id="typebs" >
<select class="browser-default custom-select" name="type" id="tpbourse">
<option selected>Type Bourse</option>
<?php
foreach ($linge as $key => $val) {
$id_type[] = $val['id'];
?>
<option value="<?= $val['id'] ?>"><?= $val['lib'] ?></option>
<?php
}
?>
</select>
</div>
<div class="form-radio-group logerdiv" id="logerdiv">
<div class="form-radio-item">
<input class="[ btn-group ]" type="radio" name="loger" value="loger" id="loger" onchange="changement(2);">
<label for="cash"> Loger</label>
<span class="check"></span>
</div>
<div class="form-radio-item">
<input class="" type="radio" name="loger" value="nonloger" id="non_loger">
<label for="cash"> Non_Loger</label>
<span class="check"></span>
</div>
</div>
<div class="form-input" id="ch" style="display:none;">
<select class="browser-default custom-select" name="ch" >
<option selected>chambre</option>
<?php
foreach ($linge2 as $key => $val) {
?>
<option value="<?= $val['id']?>"><?= $val['nom'] ?></option>
<?php
}
?>
</select>
</div>
</div>
<hr>
<div>
<div class="form-submit">
<button type="submit" class="submit btn-success" name="valu">
Validation
</button>
</div>
</div>
</div>
</form>
</div>
</div>
</div>
</div>
</div>
</main>
<footer>
<?php
include "footer.php";
?>
</footer>
<?php
function chargerMaClasse($classe)
{
require "../controleur/" . $classe . ".php";
}
spl_autoload_register('chargerMaClasse');
$conn->setAttribute(PDO::ATTR_ERRMODE, PDO::ERRMODE_WARNING);
$etuSR = new EtudiantService($conn);
if (isset($_POST['valu'])) {
$mat = $_POST['mat'];
$nom = $_POST['nom'];
$prenom = $_POST['prenom'];
$tel = $_POST['tel'];
$email = $_POST['email'];
$dateNs = $_POST['dateNs'];
$adress = $_POST['Adresse'];
$id_type = $_POST['type'];
$id_ch = $_POST['ch'];
if ($_POST['typeBoursier'] == "Boursier")
{
$loge = $_POST['loger'];
$etudie = new bourcier($mat, $nom, $prenom, $tel, $email, $dateNs, $id_type);
if ($loge == "loger")
{
$l= new loger($mat, $nom,$prenom, $tel, $email, $dateNs, $id_type,$id_ch);
$etuSR->addLoger($l,$id_ch);
}
}
else
{
$etudie = new nonbourcier($mat, $nom, $prenom, $tel, $email, $dateNs, $adress);
}
$etuSR->addEtudiant($etudie,$id_type);
}
?>
<script src="style.js"></script>
<script src="https://code.jquery.com/jquery-3.3.1.slim.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/popper.js/1.14.3/umd/popper.min.js"></script>
<script src="https://stackpath.bootstrapcdn.com/bootstrap/4.1.3/js/bootstrap.min.js"></script>
</body>
</html><file_sep>/universite1/vue/tableaux2.php
<?php
include "../controleur/connection.php";
$conn = conn();
function chargerMaClasse($classe)
{
require "../controleur/" . $classe . ".php";
}
spl_autoload_register('chargerMaClasse');
$conn->setAttribute(PDO::ATTR_ERRMODE, PDO::ERRMODE_WARNING);
$etuSR = new EtudiantService($conn);
//$tab=$etuSR->getListEtudiant();
?>
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<title>Document</title>
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.1.3/css/bootstrap.min.css">
<link rel="stylesheet" href="https://cdn.datatables.net/1.10.19/css/dataTables.bootstrap4.min.css.css">
<script src="https://code.jquery.com/jquery-3.3.1.js"></script>
<script src="https://cdn.datatables.net/1.10.19/js/jquery.dataTables.min.js"></script>
<script src="https://cdn.datatables.net/1.10.19/js/dataTables.bootstrap4.min.js"></script>
</head>
<body>
<?php
include "menu.php";
?>
<table id="example" class="table table-striped table-bordered" style="width:100%">
<thead>
<tr>
<th>Matircul</th>
<th>Name</th>
<th>Prenom</th>
<th>Telephone</th>
<th>Date de naissance</th>
<th>Email</th>
</tr>
</thead>
<tbody>
<?php
$etuSR->getListEtudiantnonboursier();
?>
</tbody>
</table>
<footer>
<?php
include "footer.php";
?>
</footer>
<script src="style.js"></script>
</body>
</html><file_sep>/universite1/controleur/Servicetypebourso.php
<?php
class Servicetypebourso
{
private $db;
private $tabtypeboursier;
public function __construct($db)
{
$this->setDb($db);
$this->tabtypeboursier = [];
}
public function addTypebourse(Typebourso $tpb)
{
$req=$this->db->prepare('INSERT INTO typeBourse SET lib=:lib,montant=:montant');
$req->bindValue(':lib',$tpb->getLib(),PDO::PARAM_STR);
$req->bindValue(':montant',$tpb->getMontant(),PDO::PARAM_INT);
$req->execute();
// var_dump($req);
//die();
$req->closeCursor();
}
public function find()
{
// $tpb=[];
$req = $this->db->query('SELECT * FROM typeBourse');
while ($tab = $req->fetch())
{
?>
<tr>
<td><?=$tab['lib']?></td>
<td><?=$tab['montant']?></td>
</tr>
<?php
}
}
public function setDb($db)
{
$this->db = $db;
return $this;
}
public function findtab(Typeboursier $typeboursier)
{
$this->tabtypeboursier[] =$typeboursier;
}
}
?><file_sep>/universite1/controleur/bourcier.php
<?php
class bourcier extends etudiant
{
//private $id;
private $id_type;
public function __contructe($mat="",$nom="",$prenom="",$tel=0,$email="",$date_de_nss=0,$id_type=0)
{
parent::__construct($mat,$nom,$prenom,$tel,$email,$date_de_nss);
// $this->id=$id;
$this->id_type=$id_type;
}
public function getid_type()
{
return $this->id_type;
}
/**
* Set the value of id_type
*
* @return self
*/
public function setid_type($id_type)
{
$this->id_type = $id_type;
return $this;
}
}
?><file_sep>/universite1/controleur/ServicebatimentChambre.php
<?php
class ServicebatimentChambre
{
private $bd;
public function __construct($bd)
{
$this->setBd($bd);
}
public function addBatiment(Batimant $bt)
{
$req=$this->bd->prepare('INSERT INTO batiment SET numerobt=:numerobt,nom=:nom');
$req->bindValue(':numerobt',$bt->getNumbatiment(),PDO::PARAM_STR);
$req->bindValue(':nom',$bt->getNom(),PDO::PARAM_STR);
$req->execute();
$req->closeCursor();
}
public function addChambre(Chambre $ch)
{
$req=$this->bd->prepare('INSERT INTO chambre SET nom=:nom, id_b=:id_b');
$req->bindValue(':nom',$ch->getNom(),PDO::PARAM_STR);
$req->bindValue(':id_b',$ch->getId_b(),PDO::PARAM_INT);
//var_dump($ch->getId_b());
// die();
$req->execute();
$req->closeCursor();
}
public function getBatiment() {
$req = $this->bd->query('SELECT * FROM batiment');
while ($tab = $req->fetch()) {
?>
<tr>
<td><?=$tab['id']?></td>
<td><?=$tab['numerobt']?></td>
<td><?=$tab['nom']?></td>
</tr>
<?php
}
$req->closeCursor();
}
public function getChambre() {
$req = $this->bd->query('SELECT batiment.nom AS nomb,chambre.id,chambre.nom FROM chambre,batiment WHERE chambre.id_b= batiment.id');
while ($tab = $req->fetch()) {
?>
<tr>
<td><?=$tab['id']?></td>
<td><?=$tab['nom']?></td>
<td><?=$tab['nomb']?></td>
</tr>
<?php
}
$req->closeCursor();
}
public function setBd($bd)
{
$this->bd = $bd;
return $this;
}
}
?><file_sep>/universite1/controleur/connection.php
<?php
function conn()
{
try
{
$bdd = new PDO ('mysql:host=localhost;dbname=universite;charset=utf8','root','bqdqr1280');
}
catch (Exception $e)
{
die ("Erreur:".$e->getmessage());
}
return $bdd;
}
?><file_sep>/universite1/controleur/cnn.php
<?php
$con = new PDO ('mysql:host=localhost;dbname=universit;charset=utf8','root','b<PASSWORD>1280');
if ($con) {
echo "Hello word!!!";
}
?><file_sep>/universite1/controleur/Typebourso.php
<?php
class Typebourso
{
// private $id;
private $lib;
private $montant;
//public function __construct($numbatiment="",$nom="")
public function __construct($id=0,$lib="",$montant=0)
{
// $this->id=$id;
$this->lib=$lib;
$this->montant=$montant;
}
public function getLib()
{
return $this->lib;
}
public function setLib($lib)
{
$this->lib=$lib;
return $this;
}
public function getMontant()
{
return $this->montant;
}
public function setMontant($id)
{
$this->montant=$montant;
return $this;
}
/**
* Get the value of id
*/
public function getId()
{
return $this->id;
}
/**
* Set the value of id
*
* @return self
*/
public function setId($id)
{
$this->id = $id;
return $this;
}
}
?><file_sep>/universite1/controleur/EtudiantService.php
<?php
class EtudiantService {
private $_bdd;
public function __construct($bdd) {
$this->setDb($bdd);
}
public function addEtudiant(etudiant $etudiant,$id_type) {
$req = $this->_bdd->prepare('INSERT INTO etudient
SET
mat = :mat,
nom = :nom,
prenom = :prenom,
tel = :tel,
date_de_nss = :date_de_nss,
Email = :email
');
//$req->bindValue(':id',$etudiant->getId(),PDO::PARAM_STR);
$req->bindValue(':mat',$etudiant->getMat(),PDO::PARAM_STR);
$req->bindValue(':nom',$etudiant->getNom(),PDO::PARAM_STR);
$req->bindValue(':prenom',$etudiant->getPrenom(),PDO::PARAM_STR);
$req->bindValue(':tel',$etudiant->getTel(),PDO::PARAM_INT);
$req->bindValue(':email',$etudiant->getEmail(),PDO::PARAM_STR);
$req->bindValue(':date_de_nss',$etudiant->getDate_de_nss());
$req->execute();
$req->closeCursor();
$id=0;
$req = $this->_bdd->query('SELECT MAX(id) as id FROM etudient');
while ($datas = $req->fetch()) {
$id=$datas['id'];
break;
}
if (get_class($etudiant)=='bourcier') {
//var_dump(get_class($etudiant));
//die();
$req = $this->_bdd->prepare('INSERT INTO boursier
SET
id_etu=:id_etu,
id_type =:id_type
');
$req->bindValue(':id_etu',$id,PDO::PARAM_INT);
$req->bindValue(':id_type',$id_type,PDO::PARAM_INT);
$req->execute();
$req->closeCursor();
}else {
//$br=new nonbourcier();
$req = $this->_bdd->prepare('INSERT INTO nomBoursier
SET
id_etu=:id_etu,
adress=:adress
');
$req->bindValue(':id_etu',$id);
$req->bindValue(':adress',$etudiant->getAdress(),PDO::PARAM_STR);
// var_dump($etudiant->getAdress());
//die();
$req->execute();
$req->closeCursor();
}
}
public function addLoger(loger $l,$id_ch)
{
$req = $this->_bdd->query('SELECT MAX(id) as id FROM etudient');
while ($datas = $req->fetch()) {
$id=$datas['id'];
break;
}
$req = $this->_bdd->prepare('INSERT INTO loger
SET
id_cha=:id_cha,
id_b =:id_b
');
$req->bindValue(':id_cha',$id_ch,PDO::PARAM_INT);
$req->bindValue(':id_b',$id,PDO::PARAM_INT);
$req->execute();
// var_dump($req);
// die();
$req->closeCursor();
}
// Methode de suppression d'un etudiant dans la BDD
public function deleteEtudiant(etudiant $etudiant) {
$this->_bdd->exec('DELETE FROM etudiant WHERE id = ' . $etudiant->getId());
}
//Methode de selection d'un etudiant avec clause WHERE
public function getEtudiant($id) {
$id = (int) $id;
$req = $this->_bdd->query('SELECT * FROM etudiant WHERE id = '. $id);
$datas = $req->fetch(PDO::FETCH_ASSOC);
//var_dump($datas);
return new EtudiantTable($datas);
$req->closeCursor();
}
public function getEtudiantbymat($id) {
$req = $this->_bdd->query('SELECT * FROM etudiant WHERE etudient.id=boursier.id_etu ');
while ($datas = $req->fetch()) {
$id=$datas['id'];
break;
}
return $id;
$req->closeCursor();
}
//controle de saisie
// Methode de selection de toute la liste des etudiants
public function getListEtudiant() {
$etudie = [];
$req = $this->_bdd->query('SELECT * FROM etudient ORDER BY id');
while ($tab = $req->fetch(PDO::FETCH_ASSOC)) {
?>
<tr>
<td><?=$tab['mat']?></td>
<td><?=$tab['nom']?></td>
<td><?=$tab['prenom']?></td>
<td><?=$tab['tel']?></td>
<td><?=$tab['date_de_nss']?></td>
<td><?=$tab['email']?></td>
</tr>
<?php
}
//var_dump($etudie);
return $etudie;
$req->closeCursor();
}
public function getListEtudiantboursier() {
//$etudie = [];
$req = $this->_bdd->query('SELECT * FROM etudient,boursier WHERE etudient.id=boursier.id_etu');
while ($tab = $req->fetch(PDO::FETCH_ASSOC)) {
?>
<tr>
<td><?=$tab['mat']?></td>
<td><?=$tab['nom']?></td>
<td><?=$tab['prenom']?></td>
<td><?=$tab['tel']?></td>
<td><?=$tab['date_de_nss']?></td>
<td><?=$tab['email']?></td>
</tr>
<?php
}
$req->closeCursor();
}
public function getListEtudiantnonboursier() {
//$etudie = [];
$req = $this->_bdd->query('SELECT nomBoursier.adress AS adress,etudient.mat
,etudient.nom,etudient.prenom,etudient.tel,etudient.date_de_nss,email
FROM etudient,nomBoursier WHERE etudient.id=nomBoursier.id_etu');
while ($tab = $req->fetch(PDO::FETCH_ASSOC)) {
?>
<tr>
<td><?=$tab['mat']?></td>
<td><?=$tab['nom']?></td>
<td><?=$tab['prenom']?></td>
<td><?=$tab['tel']?></td>
<td><?=$tab['date_de_nss']?></td>
<td><?=$tab['email']?></td>
<td><?=$tab['adress']?></td>
</tr>
<?php
}
$req->closeCursor();
}
// Methode de mise à jour d'un etudiant dans la BDD
public function updateEtudiant(Etudiant $etudiant) {
$req = $this->_bdd->prepare('UPDATE etudiant
SET nom = :nom,
prenom = :prenom,
tel = :tel,
date_de_nss = :date_de_nss
WHERE id = :id
');
$req->bindValue(':nom',$etudiant->getNom(),PDO::PARAM_STR);
$req->bindValue(':prenom',$etudiant->getPrenom(),PDO::PARAM_INT);
$req->bindValue(':tel',$etudiant->getTel(),PDO::PARAM_INT);
$req->bindValue(':date_de_nss',$etudiant->getdate_de_nss(),PDO::PARAM_INT);
$req->bindValue(':email',$etudiant->getEmail(),PDO::PARAM_INT);
$req->bindValue(':id',$etudiant->getId(),PDO::PARAM_INT);
$req->execute();
$req->closeCursor();
}
/*
* Méthodes Mutateurs (Setters) - Pour modifier la valeur des attributs
*/
public function setDb(PDO $bdd) {
$this->_bdd = $bdd;
}
/**
* Get the value of tabetudie
*/
}<file_sep>/universite1/controleur/etudiant.php
<?php
abstract class etudiant
{
private $mat;
private $nom;
private $prenom;
private $tel;
private $date_de_nss;
private $email;
public function __construct($mat="",$nom="",$prenom="",$tel=0,$email="",$date_de_nss=0)
{
$this->mat=$mat;
$this->nom=$nom;
$this->prenom=$prenom;
$this->tel=$tel;
$this->email=$email;
$this->date_de_nss=$date_de_nss;
}
public function getNom()
{
return $this->nom;
}
public function setNom($nom)
{
$this->nom = $nom;
return $this;
}
public function getPrenom()
{
return $this->prenom;
}
public function setPrenom($prenom)
{
$this->prenom = $prenom;
return $this;
}
public function getTel()
{
return $this->tel;
}
public function setTel($tel)
{
$this->tel = $tel;
return $this;
}
public function getDate_de_nss()
{
return $this->date_de_nss;
}
public function setDate_de_nss($date_de_nss)
{
$this->date_de_nss = $date_de_nss;
return $this;
}
public function getEmail()
{
return $this->email;
}
public function setEmail($email)
{
$this->email = $email;
return $this;
}
public function getMat()
{
return $this->mat;
}
public function setMat($mat)
{
$this->mat = $mat;
return $this;
}
}
?><file_sep>/universite1/base/universite.sql
-- phpMyAdmin SQL Dump
-- version 4.6.6deb5
-- https://www.phpmyadmin.net/
--
-- Client : localhost:3306
-- Généré le : Lun 01 Juillet 2019 à 09:32
-- Version du serveur : 5.7.26-0ubuntu0.18.04.1
-- Version de PHP : 7.3.5-1+ubuntu18.04.1+deb.sury.org+1
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Base de données : `universite`
--
-- --------------------------------------------------------
--
-- Structure de la table `batiment`
--
CREATE TABLE `batiment` (
`id` int(11) NOT NULL,
`numerobt` varchar(20) NOT NULL,
`nom` varchar(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Contenu de la table `batiment`
--
INSERT INTO `batiment` (`id`, `numerobt`, `nom`) VALUES
(1, '12A', 'mame_amy'),
(2, '12m', 'batiment12'),
(3, '12mp', 'batiment13'),
(4, 'num12', 'batiment14');
-- --------------------------------------------------------
--
-- Structure de la table `boursier`
--
CREATE TABLE `boursier` (
`id` int(11) NOT NULL,
`id_etu` int(11) NOT NULL,
`id_type` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Contenu de la table `boursier`
--
INSERT INTO `boursier` (`id`, `id_etu`, `id_type`) VALUES
(2, 28, 1),
(3, 29, 1),
(4, 30, 1),
(5, 30, 1),
(7, 33, 1),
(8, 33, 1),
(1, 3, 2),
(6, 32, 2),
(9, 34, 2),
(10, 34, 2),
(11, 36, 2);
-- --------------------------------------------------------
--
-- Structure de la table `chambre`
--
CREATE TABLE `chambre` (
`id` int(11) NOT NULL,
`nom` varchar(30) NOT NULL,
`id_b` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Contenu de la table `chambre`
--
INSERT INTO `chambre` (`id`, `nom`, `id_b`) VALUES
(1, 'chambreA', 1),
(2, 'chambreB', 1),
(3, 'chambreC', 1),
(4, 'chambreD', 1),
(5, 'chambreE', 2),
(10, 'chambreZ', 3);
-- --------------------------------------------------------
--
-- Structure de la table `etudient`
--
CREATE TABLE `etudient` (
`id` int(11) NOT NULL,
`mat` varchar(11) NOT NULL,
`nom` varchar(30) NOT NULL,
`prenom` varchar(35) NOT NULL,
`tel` int(11) NOT NULL,
`email` varchar(30) NOT NULL,
`date_de_nss` date NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Contenu de la table `etudient`
--
INSERT INTO `etudient` (`id`, `mat`, `nom`, `prenom`, `tel`, `email`, `date_de_nss`) VALUES
(1, 'kl1280', 'gueye', 'ahmado', 776981298, '<EMAIL>', '2019-06-14'),
(2, '125kl', 'diop', 'junior', 784513580, '<EMAIL>', '2019-06-19'),
(3, 'rt125', 'fall', 'mame fatou', 774512803, '<EMAIL>', '2019-06-11'),
(4, 'loijk', 'gueye', 'tamara', 774444444, '<EMAIL>', '2004-04-04'),
(5, 'cgvhj', 'dfghjk', '(tguhjk', 875412, '<EMAIL>', '2019-06-13'),
(7, 'gbc', 'gbvnbn', 'dhivjpvk', 41326546, '<EMAIL>', '2019-06-06'),
(8, 'jkfkb; ', 'jfnl,', 'jnfs', 5641654, '<EMAIL>', '2019-06-05'),
(9, 'nvnl', 'diop', 'ahmadou', 216564, '<EMAIL>', '2019-06-19'),
(10, 'gh1280', 'sylla', 'rama', 777613530, '<EMAIL>', '1992-07-08'),
(11, 'DE12', 'diop', 'dame', 774667801, '<EMAIL>', '1981-06-07'),
(12, 'gt1254', 'lame', 'soda', 774512036, '<EMAIL>', '1996-06-12'),
(14, 'khlll', 'ehjfkje', 'jhfbjek,', 8562, '<EMAIL>', '1990-06-06'),
(15, 'sdfghj', 'sfdgfhjklk', 'qsdfghjklkm', 74556523, '<EMAIL>', '2019-06-05'),
(16, 'hud', 'shdsj', 'jjsd', 6549, '<EMAIL>', '2000-06-14'),
(17, 'cvhvb', 'vbn,;', 'hbij', 44659, '<EMAIL>', '1993-06-19'),
(18, 'xfxcvkj', 'xcvcvb', 'gfcgbk', 16552122, '<EMAIL>', '2000-06-21'),
(20, 'hss', 'sylla', 'rama', 777613530, '<EMAIL>', '1992-07-08'),
(21, 'KL0280', 'tall', 'ma ball', 774521208, '<EMAIL>', '2001-06-30'),
(22, 'sjasja', 'hdzjdizj', 'ssnjsj', 564648, '<EMAIL>', '2019-06-12'),
(23, 'xc', 'fall', 'abdou khadir', 776990399, '<EMAIL>', '2006-06-11'),
(24, 'ft', 'ba', 'ahmado', 774552155, '<EMAIL>', '2001-06-12'),
(25, 'bvj', 'b nj,', 'bbj', 8541, '<EMAIL>', '1999-06-15'),
(26, 'klml', 'gueye', 'ahmado', 774552108, '<EMAIL>', '2019-06-12'),
(27, ' xxbb', 'bbh', 'jwh', 774667906, '<EMAIL>', '2019-06-14'),
(28, 'bdbbb', 'bhb', 'mame amy', 1254876, '<EMAIL>', '2019-06-08'),
(29, 'dxfg', 'dffugui', 'gfygyyuho', 51564987, '<EMAIL>', '2019-06-12'),
(30, 'FDXC', 'mame', 'mame amy', 41595, '<EMAIL>', '2019-06-19'),
(32, 'fcg', 'gueye', 'tamara', 47456, '<EMAIL>', '2019-06-18'),
(33, 'jho', 'dgvhn;', 'h,;ùm', 6897, '', '2019-06-11'),
(34, 'ma12', 'mame', 'mame amy', 774667906, '<EMAIL>', '1994-06-11'),
(36, 'po45', 'poye', 'abdolaye', 774512865, '<EMAIL>', '1984-06-14'),
(37, 'ma52', 'ghfj', 'vvhvjn,;', 8946, '<EMAIL>', '2019-06-07'),
(40, 'jdklk', 'tyfgshj', 'mam', 45, '<EMAIL>', '2019-06-07'),
(41, 'dbfb', 'diop', 'ahmado', 1280, '<EMAIL>', '2019-06-07'),
(42, 'hl1250', 'gueye', 'tariame', 1502, '<EMAIL>', '2019-06-05');
-- --------------------------------------------------------
--
-- Structure de la table `loger`
--
CREATE TABLE `loger` (
`id` int(11) NOT NULL,
`id_cha` int(11) NOT NULL,
`id_b` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Contenu de la table `loger`
--
INSERT INTO `loger` (`id`, `id_cha`, `id_b`) VALUES
(8, 1, 34),
(6, 2, 33),
(7, 2, 34),
(5, 3, 33),
(4, 4, 33);
-- --------------------------------------------------------
--
-- Structure de la table `nomBoursier`
--
CREATE TABLE `nomBoursier` (
`id` int(11) NOT NULL,
`id_etu` int(11) NOT NULL,
`adress` varchar(30) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Contenu de la table `nomBoursier`
--
INSERT INTO `nomBoursier` (`id`, `id_etu`, `adress`) VALUES
(1, 1, 'guediawaye'),
(2, 3, 'guediawaye'),
(3, 9, 'touba'),
(4, 22, ''),
(5, 23, ''),
(6, 24, ''),
(7, 24, 'guediawaye'),
(8, 25, ''),
(9, 36, ''),
(10, 36, ''),
(11, 37, 'guediawaye'),
(12, 37, ''),
(13, 37, ''),
(14, 40, 'guediawaye'),
(15, 41, 'guediawaye'),
(16, 42, 'guediawaye');
-- --------------------------------------------------------
--
-- Structure de la table `typeBourse`
--
CREATE TABLE `typeBourse` (
`id` int(11) NOT NULL,
`lib` varchar(40) NOT NULL,
`montant` int(20) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Contenu de la table `typeBourse`
--
INSERT INTO `typeBourse` (`id`, `lib`, `montant`) VALUES
(1, 'demie_bourse', 20000),
(2, 'bourse entiere', 40000),
(3, 'bourse d\'excellence', 60000);
--
-- Index pour les tables exportées
--
--
-- Index pour la table `batiment`
--
ALTER TABLE `batiment`
ADD PRIMARY KEY (`id`),
ADD UNIQUE KEY `numerobt` (`numerobt`);
--
-- Index pour la table `boursier`
--
ALTER TABLE `boursier`
ADD PRIMARY KEY (`id`,`id_etu`),
ADD KEY `id_type` (`id_type`),
ADD KEY `id_etu` (`id_etu`);
--
-- Index pour la table `chambre`
--
ALTER TABLE `chambre`
ADD PRIMARY KEY (`id`),
ADD KEY `id_b` (`id_b`);
--
-- Index pour la table `etudient`
--
ALTER TABLE `etudient`
ADD PRIMARY KEY (`id`),
ADD UNIQUE KEY `mat` (`mat`);
--
-- Index pour la table `loger`
--
ALTER TABLE `loger`
ADD PRIMARY KEY (`id`,`id_b`),
ADD KEY `id_cha` (`id_cha`),
ADD KEY `id_b` (`id_b`);
--
-- Index pour la table `nomBoursier`
--
ALTER TABLE `nomBoursier`
ADD PRIMARY KEY (`id`,`id_etu`),
ADD KEY `id_etu` (`id_etu`);
--
-- Index pour la table `typeBourse`
--
ALTER TABLE `typeBourse`
ADD PRIMARY KEY (`id`);
--
-- AUTO_INCREMENT pour les tables exportées
--
--
-- AUTO_INCREMENT pour la table `batiment`
--
ALTER TABLE `batiment`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=5;
--
-- AUTO_INCREMENT pour la table `boursier`
--
ALTER TABLE `boursier`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=12;
--
-- AUTO_INCREMENT pour la table `chambre`
--
ALTER TABLE `chambre`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=11;
--
-- AUTO_INCREMENT pour la table `etudient`
--
ALTER TABLE `etudient`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=43;
--
-- AUTO_INCREMENT pour la table `loger`
--
ALTER TABLE `loger`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=9;
--
-- AUTO_INCREMENT pour la table `nomBoursier`
--
ALTER TABLE `nomBoursier`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=17;
--
-- AUTO_INCREMENT pour la table `typeBourse`
--
ALTER TABLE `typeBourse`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=4;
--
-- Contraintes pour les tables exportées
--
--
-- Contraintes pour la table `boursier`
--
ALTER TABLE `boursier`
ADD CONSTRAINT `boursier_ibfk_1` FOREIGN KEY (`id_etu`) REFERENCES `etudient` (`id`),
ADD CONSTRAINT `boursier_ibfk_2` FOREIGN KEY (`id_type`) REFERENCES `typeBourse` (`id`);
--
-- Contraintes pour la table `chambre`
--
ALTER TABLE `chambre`
ADD CONSTRAINT `chambre_ibfk_1` FOREIGN KEY (`id_b`) REFERENCES `batiment` (`id`);
--
-- Contraintes pour la table `loger`
--
ALTER TABLE `loger`
ADD CONSTRAINT `loger_ibfk_1` FOREIGN KEY (`id_b`) REFERENCES `boursier` (`id_etu`),
ADD CONSTRAINT `loger_ibfk_2` FOREIGN KEY (`id_cha`) REFERENCES `chambre` (`id`);
--
-- Contraintes pour la table `nomBoursier`
--
ALTER TABLE `nomBoursier`
ADD CONSTRAINT `nomBoursier_ibfk_1` FOREIGN KEY (`id_etu`) REFERENCES `etudient` (`id`);
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
<file_sep>/universite1/vue/addtype.php
<?php
include "../controleur/connection.php";
$conn=conn();
function chargerMaClasse($classe) {
require "../controleur/".$classe.".php";
}
spl_autoload_register('chargerMaClasse');
$srt= new Servicetypebourso($conn);
?>
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.1.3/css/bootstrap.min.css">
<title>Document</title>
</head>
<body>
<?php
include "menu.php";
?>
<div class="cotainer-flud p-3 mb-2 bg-light text-dark">
<h2 class="text-center"> l'insertion des type de bourse</h2>
</div>
<div class="cotainer">
<div class="row justify-content-center">
<div class="col-md-6">
<h2 class="text-center text-primary">!Ajoute Bourse</h2>
<div class="form">
<form action="#" method="POST">
<div class="form-group row">
<label for="mat" class="col-md-4 col-form-label text-md-right" >liballé</label>
<div class="col-md-6"><input type="text" class="form-control" name="lib"></div>
</div>
<div class="form-group row">
<label for="mat" class="col-md-4 col-form-label text-md-right" >Montant</label>
<div class="col-md-6"><input type="numbre" class="form-control" name="mont"></div>
</div>
<div class="col-md-6 offset-md-4">
<button type="submit" class="btn btn-success" name="btn"> Validation</button>
</div>
</form>
</div>
</div>
<div class="col-md-6">
<h2 class="text-center text-primary">Liste Des Bourse</h2>
<table class="table table-striped" cellspacing="0" width="100%">
<thead>
<tr>
<th>Lib</th>
<th>Montant</th>
</tr>
</thead>
<tbody>
<?php
$srt->find();
?>
</tbody>
</table>
</div>
</div>
</div>
<footer>
<?php
include "footer.php";
?>
</footer>
<?php
if (isset($_POST['btn'])) {
$lib=$_POST['lib'];
$mont=$_POST['mont'];
$conn->setAttribute(PDO::ATTR_ERRMODE,PDO::ERRMODE_WARNING);
$typb=new Typebourso($lib,$mont);
$srt->addTypebourse($typb);
}
?>
</body>
</html> | 5d351758c77b479154c47f6fc37743d518fe6fd6 | [
"JavaScript",
"SQL",
"PHP"
] | 20 | PHP | mameAmy466/poo-en-php | 3d793ce8d10c7bdd79007de7562025cc79b6c347 | 83cdcea6e1c416386b4de533b6a7370d3364a875 |
refs/heads/master | <repo_name>pedropablorj/jsAlgorithms<file_sep>/recursiveReverse.js
var str = "qwertyuiopasdfghjkl";
function reverse(str){
var arrStr = (typeof str === "string") ? str.split('') : str;
var c = arrStr.shift();
if(arrStr.length === 0){
return c;
}
return reverse(arrStr) + c;
}
//Input
console.log(str);
//Output
console.log(reverse(str));
<file_sep>/binarySearch.js
var arr = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,50];
var searchTerm = 3;
function binarySearch (arr, searchTerm){
var pivotIndex;
if(arr.length <= 1 && arr[0] !== searchTerm){
return false;
}
pivotIndex = Math.floor(arr.length/2);
if(arr[pivotIndex] === searchTerm){
return true;
}else if(arr[pivotIndex] < searchTerm){
return binarySearch(arr.slice(pivotIndex + 1, arr.length), searchTerm);
}else{
return binarySearch(arr.slice(0, pivotIndex), searchTerm);
}
}
console.log(binarySearch(arr, searchTerm));
<file_sep>/mergeSort.js
var arr = [1,0,9,7,9,87,54,2,7,8,432,664,3,2,6879,3,236,790];
function sort(arr){
if(arr.length <= 1){
return arr;
}
return merge(sort(arr.slice(0,Math.floor(arr.length/2))), sort(arr.slice(Math.floor(arr.length/2), arr.length)));
}
function merge(arrOne, arrTwo){
var newArr = [];
if(arrOne.length === 0){
return arrTwo;
}else if(arrTwo.length === 0){
return arrOne;
}else{
while(arrOne.length > 0 && arrTwo.length > 0){
if(arrOne[0] < arrTwo[0]){
newArr.push(arrOne.shift());
}else if(arrOne[0] > arrTwo[0]){
newArr.push(arrTwo.shift());
}else{
newArr.push(arrOne.shift());
newArr.push(arrTwo.shift());
}
}
return newArr.concat(arrOne, arrTwo);
}
}
//Input
console.log(arr);
//Output
console.log(sort(arr));
<file_sep>/bubbleSort.js
var arr = [8,9,3,234,667,7,4,33,6,8,5,3,22,4,9,7,6,5,45,2,97663];
function sort(arr, i){
i = (typeof i === "undefined") ? 0 : i;
var tmp;
if(!Array.isArray(arr)){
console.log("error");
return false;
}
if(i === arr.length - 1){
return arr;
}
for (var j = 1; j < arr.length - i; j++){
if(compare(arr[j-1], arr[j])){
//Swap
tmp = arr[j];
arr[j] = arr[j-1];
arr[j-1] = tmp;
}
}
return sort(arr, ++i);
}
function compare(a, b){
return (a > b) ? true : false;
}
//Input
console.log(arr);
//Output
console.log(sort(arr));
<file_sep>/binarySearchIndex.js
//Variation of binarySearch returning the index of the term found at the main array
var arr = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,50];
var searchTerm = 10;
function binarySearch(arr, searchTerm, upIndex, downIndex){
upIndex = (typeof(upIndex) === "undefined") ? arr.length-1 : upIndex;
downIndex = (typeof(downIndex) === "undefined") ? 0 : downIndex;
if(upIndex < downIndex){
return -1;
}
var pivot = (upIndex === downIndex) ? upIndex : (upIndex - (Math.floor((upIndex-downIndex)/2)));
if(arr[pivot] === searchTerm){
return pivot;
}
if(arr[pivot] > searchTerm ){
return binarySearch(arr, searchTerm, --pivot, downIndex);
}else{
return binarySearch(arr, searchTerm, upIndex, ++pivot);
}
}
console.log(binarySearch(arr, searchTerm));
<file_sep>/quickSort3.js
var arr = [1,2,56,3,3,4,7,9,6,4,3,5,7,86,4,4,5,23,4,4];
function quickSort (arr){
if(arr.length < 2){
return arr;
}
var p = arr[Math.floor(Math.random(arr.length-1))];
var c = [];
var l = [];
var r = [];
for(var i = 0; i < arr.length; i++){
if(arr[i] === p){
c.push(arr[i]);
}else if(arr[i] <= p){
l.push(arr[i]);
}else{
r.push(arr[i]);
}
}
return Array.prototype.concat(quickSort(l), c, quickSort(r));
}
console.log("Input:", arr);
console.log("Output:",quickSort(arr));
<file_sep>/quickSort.js
var arr = [8,9,3,23,6,7,4,33,6,8,5,3,22,4,9,7,6,5,5,2];
function quickSort(arr, l , r){
l = (typeof l === "undefined") ? 0 : l;
r = (typeof r === "undefined") ? arr.length -1 : r;
j = l;
var pivot = Math.floor((Math.random() * r) + l);
pivot = (pivot > r || pivot < l) ? l : pivot;
swap(arr, l, pivot);
for(var i = l + 1; i <= r; i++){
if(arr[i] <= arr[l]){
j++;
if(j !== i){
swap(arr, j, i);
}
}
}
swap(arr, j, l);
if(j < r){
arr = quickSort(arr, j+1, r);
}
if(j > l){
arr = quickSort(arr, l, j-1);
}
return arr;
}
function swap(arr, iOne, iTwo){
var tmp = arr[iOne];
arr[iOne] = arr[iTwo];
arr[iTwo] =tmp;
return arr;
}
console.log("Input: ",arr);
console.log("Output: ",quickSort(arr));
<file_sep>/selectionSort.js
var arr = [9,2,4,6,1,3,6,8,9,3,234,667,7,4,33,6,8,5,3,22,4,9,7,6,5,45,2,97663];
function selectionSort(arr, index){
index = (typeof index !== "undefined") ? index : 0;
var comp = index;
var tmp;
if(!Array.isArray(arr)){
console.log("error");
return false;
}
if(index === arr.length){
return arr;
}
for(var i = index; i < arr.length; i++){
if( arr[comp] < arr[i] ){
comp = i;
}
}
tmp = arr[comp];
arr[comp] = arr[index];
arr[index] = tmp;
return selectionSort(arr, index + 1);
}
console.log(selectionSort(arr));
<file_sep>/README.md
# jsAlgorithms
Basic sort and search algorithms made with JavaScript
| 3ffe4eb266a2ce5c7f43f9b3c7c0513d923ae8ee | [
"JavaScript",
"Markdown"
] | 9 | JavaScript | pedropablorj/jsAlgorithms | b15cc62d7d4992ab520ee64aa4fa824cf531234f | d13c9783368f7e3978c6a42ed644908ed58d3644 |
refs/heads/master | <file_sep>/*
Upload Data to IoT Server ThingSpeak (https://thingspeak.com/):
Support Devices: LG01
Example sketch showing how to get data from remote LoRa node,
Then send the value to IoT Server
It is designed to work with the other sketch dht11_client.
modified 24 11 2016
by <NAME> <<EMAIL>>
Dragino Technology Co., Limited
*/
#include <SPI.h>
#include <RH_RF95.h>
#include <Console.h>
#include "ThingSpeak.h"
#include "YunClient.h"
YunClient client;
RH_RF95 rf95;
//If you use Dragino IoT Mesh Firmware, uncomment below lines.
//For product: LG01.
#define BAUDRATE 115200
unsigned long myChannelNumber = 662568;
const char * myWriteAPIKey = "<KEY>";
const char * myReadAPIKey = "<KEY>";
uint16_t crcdata = 0;
uint16_t recCRCData = 0;
float frequency = 868.0;
void setup()
{
Bridge.begin(BAUDRATE);
//Console.begin();// Don't use Console here, since it is conflict with the ThinkSpeak library.
ThingSpeak.begin(client);
if (!rf95.init())
//Console.println("init failed");
;
// Setup ISM frequency
rf95.setFrequency(frequency);
// Setup Power,dBm
rf95.setTxPower(13);
//Console.println("Start Listening ");
}
void loop()
{
if (rf95.waitAvailableTimeout(2000))// Listen Data from LoRa Node
{
uint8_t buf[RH_RF95_MAX_MESSAGE_LEN];//receive data buffer
uint8_t len = sizeof(buf);//data buffer length
if (rf95.recv(buf, &len))//Check if there is incoming data
{
//Console.println("Get Data from LoRa Node");
if(buf[0] == 1||buf[1] == 1||buf[2] ==1) //Check if the ID match the LoRa Node ID
{
uint8_t data[] = " Server ACK";//Reply
data[0] = buf[0];
data[1] = buf[1];
data[2] = buf[2];
rf95.send(data, sizeof(data));// Send Reply to LoRa Node
rf95.waitPacketSent();
int newData[4] = {0, 0, 0, 0}; //Store Sensor Data here
for (int i = 0; i < 3; i++)
{
newData[i] = buf[i + 3];
}
int h = newData[0];
int t = newData[1];
int b = newData[2];
ThingSpeak.setField(1,h); //
ThingSpeak.setField(2,t);
ThingSpeak.setField(3,b);
ThingSpeak.writeFields(myChannelNumber, myWriteAPIKey); // Send Data to IoT Server.
}
}
}
else
{
//Console.println("recv failed");
;
}
}
<file_sep>#include <SPI.h>
#include <RH_RF95.h>
#include <SD.h>
#include <OneWire.h>
#include <DallasTemperature.h>
#define ONE_WIRE_BUS A3
OneWire oneWire(ONE_WIRE_BUS);
DallasTemperature sensors(&oneWire);
#define SensorPin A1 //pH meter Analog output to Arduino Analog Input 1
#define Turbidity_dpin A0
#define pH_dpin A1
#define DO_dpin A2
#define Temperature_dpin A3
RH_RF95 rf95;
const uint8_t pinTurbidity = A0;
const uint8_t pinPH = A1;
const uint8_t pinDO = A2;
const uint8_t pinTemperature = A3;
uint16_t datatemperature = 0;
int dataturbidity = 0;
unsigned long int avgValue; //Store the average value of the sensor feedback
float b;
int tuf[10],temp;
byte bGlobalErr;
float Temperature_dat[4];// Store Sensor Data
float Turbidity_dat[3];
float pH_dat[4];
char node_id[3] = {1, 1, 1}; //LoRa End Node ID
float frequency = 868.0;
unsigned int count = 1;
void setup()
{
sensors.begin();
Serial.begin(9600);
if (!rf95.init())
Serial.println("init failed");
// Setup ISM frequency
rf95.setFrequency(frequency);
// Setup Power,dBm
rf95.setTxPower(13);
rf95.setSyncWord(0x34);
Serial.println("LoRa End Node Example --");
Serial.println(" Temperature and Turbidity Sensor\n");
Serial.print("LoRa End Node ID: ");
for (int i = 0; i < 3; i++)
{
Serial.print(node_id[i], HEX);
}
Serial.println();
}
void loop()
{
int i;
for (i = 0; i < 5; i++){
sensors.requestTemperatures();
Temperature_dat[i]= sensors.getTempCByIndex(0);}
int Turbidity_or;
for(int i=0;i<10;i++) //Get 10 sample value from the sensor for smooth the value
{
tuf[i]=analogRead(SensorPin);
delay(10);
}
for(int i=0;i<9;i++) //sort the analog from small to large
{
for(int j=i+1;j<10;j++)
{
if(tuf[i]>tuf[j])
{
temp=tuf[i];
tuf[i]=tuf[j];
tuf[j]=temp;
}
}
}
avgValue=0;
for(int i=2;i<8;i++) //take the average value of 6 center sample
avgValue+=tuf[i];
float phValue=(float)avgValue*5.0/1024/6; //convert the analog into millivolt
pH_dat[0]=3.5*phValue;
delay(100);
Turbidity_or = analogRead(Turbidity_dpin); //Get A0 Status
float voltage = Turbidity_or * (5.0 / 1024.0);
Turbidity_dat[1]= ((-1120.4*voltage*voltage)+(5742.3*voltage)-(4352.9));
delay(100);
Serial.print("########### ");
Serial.print("COUNT=");
Serial.print(count);
Serial.println(" ###########");
count++;
char data[50] = {0} ;
int dataLength = 6; // Payload Length
// Use data[0], data[1],data[2] as Node ID
data[0] = node_id[0] ;
data[1] = node_id[1] ;
data[2] = node_id[2] ;
data[3] = Temperature_dat[0];
// data[4] = Temperature_dat[1];//Get Temperature Integer Part
data[4] = Turbidity_dat[1];//Get Temperature Decimal Part
data[5] = pH_dat[0];
//Get Temperature Decimal Part
rf95.send(data, sizeof(data));
Serial.print(" Current Temperature = ");
Serial.print(data[3], DEC);//Show humidity
// Serial.print(".");
//Serial.print(data[4], DEC);//Show humidity
Serial.print(" C ");
delay(300);
Serial.print(" Current Turbidity = ");
Serial.print(data[4], DEC);//Show humidity
Serial.print(" NTU");
delay(300);
Serial.print(" pH = ");
Serial.print(data[5], DEC);//Show humidity
// Serial.print(data[7], DEC);//Show humidity
}
<file_sep>#include <lmic.h>
#include <OneWire.h>
#include <DallasTemperature.h>
#include <hal/hal.h>
#include <SPI.h>
#include <SoftwareSerial.h>
#include <TinyGPS.h>
TinyGPS gps;
SoftwareSerial ss(12, 13); // Arduino RX, TX to conenct to GPS module.
#define ONE_WIRE_BUS 4
#define Turbidity_dpin A0
OneWire oneWire(ONE_WIRE_BUS);
DallasTemperature sensors(&oneWire);
float temperature;
int turbidity;
float turbidity1;
uint16_t temp;
int turb;
static void smartdelay(unsigned long ms);
unsigned int count = 1; //For times count
float flat, flon,falt;
static uint8_t mydata[19] = {0x05,0x67,0x00,0x00,0x02,0x67,0x00,0x00,0x03,0x88,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00};
/* LoRaWAN NwkSKey, network session key
This is the default Semtech key, which is used by the prototype TTN
network initially.
ttn*/
static const PROGMEM u1_t NWKSKEY[16] = { };
/* LoRaWAN AppSKey, application session key
This is the default Semtech key, which is used by the prototype TTN
network initially.
ttn*/
static const u1_t PROGMEM APPSKEY[16] = { };
/*
LoRaWAN end-device address (DevAddr)
See http://thethingsnetwork.org/wiki/AddressSpace
ttn*/
static const u4_t DEVADDR = ;
/* These callbacks are only used in over-the-air activation, so they are
left empty here (we cannot leave them out completely unless
DISABLE_JOIN is set in config.h, otherwise the linker will complain).*/
void os_getArtEui (u1_t* buf) { }
void os_getDevEui (u1_t* buf) { }
void os_getDevKey (u1_t* buf) { }
static osjob_t initjob,sendjob,blinkjob;
/* Schedule TX every this many seconds (might become longer due to duty
cycle limitations).*/
const unsigned TX_INTERVAL = 10;
// Pin mapping
const lmic_pinmap lmic_pins = {
.nss = 10,
.rxtx = LMIC_UNUSED_PIN,
.rst = 9,
.dio = {2, 6, 7},
};
void do_send(osjob_t* j){
// Check if there is not a current TX/RX job running
if (LMIC.opmode & OP_TXRXPEND) {
Serial.println("OP_TXRXPEND, not sending");
} else {
GPSRead();
Tem();
printdata();
// Prepare upstream data transmission at the next possible time.
// LMIC_setTxData2(1,datasend,sizeof(datasend)-1,0);
LMIC_setTxData2(1, mydata, sizeof(mydata), 0);
Serial.println("Packet queued");
Serial.print("LMIC.freq:");
Serial.println(LMIC.freq);
Serial.println("Receive data:");
}
// Next TX is scheduled after TX_COMPLETE event.
}
void onEvent (ev_t ev) {
Serial.print(os_getTime());
Serial.print(": ");
Serial.println(ev);
switch(ev) {
case EV_SCAN_TIMEOUT:
Serial.println(F("EV_SCAN_TIMEOUT"));
break;
case EV_BEACON_FOUND:
Serial.println(F("EV_BEACON_FOUND"));
break;
case EV_BEACON_MISSED:
Serial.println(F("EV_BEACON_MISSED"));
break;
case EV_BEACON_TRACKED:
Serial.println(F("EV_BEACON_TRACKED"));
break;
case EV_JOINING:
Serial.println(F("EV_JOINING"));
break;
case EV_JOINED:
Serial.println(F("EV_JOINED"));
break;
case EV_RFU1:
Serial.println(F("EV_RFU1"));
break;
case EV_JOIN_FAILED:
Serial.println(F("EV_JOIN_FAILED"));
break;
case EV_REJOIN_FAILED:
Serial.println(F("EV_REJOIN_FAILED"));
break;
case EV_TXCOMPLETE:
Serial.println(F("EV_TXCOMPLETE (includes waiting for RX windows)"));
Serial.println(F(""));
if(LMIC.dataLen) {
// data received in rx slot after tx
Serial.print(F("Data Received: "));
Serial.write(LMIC.frame+LMIC.dataBeg, LMIC.dataLen);
Serial.println();
}
// Schedule next transmission
os_setTimedCallback(&sendjob, os_getTime()+sec2osticks(TX_INTERVAL), do_send);
break;
case EV_LOST_TSYNC:
Serial.println(F("EV_LOST_TSYNC"));
break;
case EV_RESET:
Serial.println(F("EV_RESET"));
break;
case EV_RXCOMPLETE:
// data received in ping slot
Serial.println(F("EV_RXCOMPLETE"));
break;
case EV_LINK_DEAD:
Serial.println(F("EV_LINK_DEAD"));
break;
case EV_LINK_ALIVE:
Serial.println(F("EV_LINK_ALIVE"));
break;
default:
Serial.println(F("Unknown event"));
break;
}
}
void setup() {
// initialize digital pin as an output.
Serial.begin(9600);
ss.begin(9600);
sensors.begin();
while(!Serial);
Serial.println(F("LoRa GPS Example---- "));
Serial.println(F("Connect to TTN"));
#ifdef VCC_ENABLE
// For Pinoccio Scout boards
pinMode(VCC_ENABLE, OUTPUT);
digitalWrite(VCC_ENABLE, HIGH);
delay(1000);
#endif
// LMIC init
os_init();
// Reset the MAC state. Session and pending data transfers will be discarded.
LMIC_reset();
/*LMIC_setClockError(MAX_CLOCK_ERROR * 1/100);
Set static session parameters. Instead of dynamically establishing a session
by joining the network, precomputed session parameters are be provided.*/
#ifdef PROGMEM
/* On AVR, these values are stored in flash and only copied to RAM
once. Copy them to a temporary buffer here, LMIC_setSession will
copy them into a buffer of its own again.*/
uint8_t appskey[sizeof(APPSKEY)];
uint8_t nwkskey[sizeof(NWKSKEY)];
memcpy_P(appskey, APPSKEY, sizeof(APPSKEY));
memcpy_P(nwkskey, NWKSKEY, sizeof(NWKSKEY));
LMIC_setSession (0x1, DEVADDR, nwkskey, appskey);
#else
// If not running an AVR with PROGMEM, just use the arrays directly
LMIC_setSession (0x1, DEVADDR, NWKSKEY, APPSKEY);
#endif
// Disable link check validation
LMIC_setLinkCheckMode(0);
// TTN uses SF9 for its RX2 window.
LMIC.dn2Dr = DR_SF9;
// Set data rate and transmit power (note: txpow seems to be ignored by the library)
LMIC_setDrTxpow(DR_SF7,14);
// Start job
do_send(&sendjob);
}
void GPSRead()
{
unsigned long age;
gps.f_get_position(&flat, &flon, &age);
falt=gps.f_altitude(); //get altitude
flon == TinyGPS::GPS_INVALID_F_ANGLE ? 0.0 : flon, 6;//save six decimal places
flat == TinyGPS::GPS_INVALID_F_ANGLE ? 0.0 : flat, 6;
falt == TinyGPS::GPS_INVALID_F_ANGLE ? 0.0 : falt, 2;//save two decimal places
int32_t lat = flat * 10000;
int32_t lon = flon * 10000;
int32_t alt = falt * 100;
mydata[10] = lat >> 16;
mydata[11] = lat >> 8;
mydata[12] = lat;
mydata[13] = lon >> 16;
mydata[14] = lon >> 8;
mydata[15] = lon;
mydata[16] = alt >> 16;
mydata[17] = alt >> 8;
mydata[18] = alt;
}
void Tem()
{
sensors.requestTemperatures();
temperature = sensors.getTempCByIndex(0); //Temperature detection
turbidity1 = analogRead(Turbidity_dpin); //Get A0 Status
float voltage = turbidity1 * (5.0 / 1024.0);
turbidity = ((-1120.4*voltage*voltage)+(5742.3*voltage)-(4352.9));
temp = temperature * 10;
turb = turbidity * 10;
mydata[2] = temp >> 8;
mydata[3]= temp & 0xFF;
mydata[6] = turb >> 8;
mydata[7] = turb & 0xFF;
}
void printdata(){
Serial.print(F("########### "));
Serial.print(F("NO."));
Serial.print(count);
Serial.println(F(" ###########"));
count++;
Serial.println(F("The temperautre and turbidity :"));
Serial.print(F("["));
Serial.print(temperature);
Serial.print(F("℃"));
Serial.print(F(","));
Serial.print(turbidity);
Serial.print(F("NTU"));
Serial.print(F("]"));
Serial.println(F(""));
if(flon!=1000.000000)
{
Serial.println(F("The longtitude and latitude and altitude are:"));
Serial.print(F("["));
Serial.print(flon);
Serial.print(F(","));
Serial.print(flat);
Serial.print(F(","));
Serial.print(falt);
Serial.print(F("]"));
Serial.println(F(""));
}
smartdelay(1000);
}
static void smartdelay(unsigned long ms)
{
unsigned long start = millis();
do
{
while (ss.available())
{
gps.encode(ss.read());
}
} while (millis() - start < ms);
}
void loop() {
os_runloop_once();
}
| deb6d4923fe708901669e384e4a19b4afef0b419 | [
"C++"
] | 3 | C++ | pranjal53/watermonitoring | 446c26f5b8103e94ffbaac34b7bd435bae4e60d3 | e1550ddf4b3378452702ed46a8bd39e62ff01690 |
refs/heads/master | <repo_name>PeterZhouSZ/TranslationSync<file_sep>/data/disco-bp/DLib/DGaussianMixtureModel.cpp
#include <DGaussianMixtureModel.h>
#include <DCluster.h>
#include <DMultiDMatrix.h>
#include <float.h>
#include <DMath.h>
using namespace std;
template<class T>
void DGaussianMixtureModel<T>::learn(const _DMatrix<T> &data, int mixture_count,
float delta_thresh, int max_iters, int replicate_count)
{
KMeansCluster<T> km(mixture_count, replicate_count);
_DMatrix<int> assignments = km.do_clustering(data);
// cout << assignments << endl;
// There are two special results of do_clustering that must be handled here:
//
// (a) it could be that do_clustering produced fewer than mixture_count
// clusters, due to the make-up of the data.
// (b) it could be that a cluster contains only one (unique) point, in
// which case the estimate of the covariance matrix will be singular.
//
// handle case (a). This works because do_clustering() gives empty clusters
// the highest cluster id's. case(b) is handled in other learn().
mixture_count = km.get_actual_cluster_count();
vector<DGaussianModel<T> > initial_models(mixture_count);
vector<int> counts(mixture_count, 0);
vector<float> initial_weights(mixture_count, 0);
for(int i=0; i<data.rows(); i++)
counts[assignments[0][i]]++;
_DMatrixArray<T> datas(3,mixture_count);
for(int i=0; i<mixture_count; i++)
{
datas.get(i) = _DMatrix<T>(counts[i], 2);
initial_weights[i] = counts[i] / float(data.rows());
}
vector<int> indices(mixture_count, 0);
for(int i=0; i<data.rows(); i++)
{
int assign = assignments[0][i];
(datas.get(assign))[indices[assign]][0] = data[i][0];
(datas.get(assign))[indices[assign]][1] = data[i][1];
indices[assign]++;
}
for(int i=0; i<mixture_count; i++)
{
// cout << "-------> " << i << datas.get(i) << endl;
// cout<< "kkkkk " << i << endl;
// cout<< datas.get(i) << endl;
// handle case (b) above.
/* if(counts[i] == 1)
{
counts[i]++;
datas.get(i) = vert_concat(datas.get(i), datas.get(i));
}
*/
initial_models[i] = DGaussianModel<T>(datas.get(i));
if(initial_models[i].is_covariance_bad() || isnan(initial_models[i].log_constant()))
{
_DMatrix<T> cov = initial_models[i].covariance();
cov[0][1] = cov[1][0] = 0;
// cout << "here" << endl;
initial_models[i].set_covariance(cov);
if(initial_models[i].is_covariance_bad() || isnan(initial_models[i].log_constant()))
initial_models[i].set_covariance(_DMatrix<T>(2,2,_DMatrix<T>::identity) * 10.0);
}
// initial_models[i] = DGaussianModel<T>(datas.get(i) + _DMatrix<T>(counts[i], 2, _DMatrix<T>::random) * 5.0 - 2.5);
// cout<< initial_models[i] << endl;
}
learn(data, initial_models, initial_weights, delta_thresh, max_iters);
return;
}
template<class T>
void DGaussianMixtureModel<T>::learn(const _DMatrix<T> &data, const std::vector<DGaussianModel<T> > &initial_model,
const std::vector<float> &initial_weights, float delta_thresh,
int max_iters)
{
int mixture_count = initial_model.size();
assert((int)initial_weights.size() == mixture_count);
weights = initial_weights;
models = initial_model;
_DMatrix<float> G(data.rows(), mixture_count);
float last_like = -1e50, this_like = 0;
_DMatrix<T> s1(1,data.cols());
T *s1_ptr = s1[0];
_DMatrix<float> P(data.rows(), mixture_count);
_DMatrix<T> new_covars(2,2);
T *new_covars_ptr = new_covars[0];
int q=0;
for(q=0; q<max_iters; q++)
{
// cout << "ITERATION " << q << endl;
this_like = 0;
// dp->begin(4);
for(int j=0; j<mixture_count; j++)
{
std::vector<T> likelihoods;
// cout<< "mixture: " << j << endl << "+++++++++++ cov" << endl;
// cout<< models[j].covariance() << endl << "+++++++ inv cov " << models[j].inverse_covariance() << " +++++ log c " << models[j].log_constant() << " +++++ det " << models[j].determinant_of_covariance() << " ++++++ mean " << models[j].mean() << endl;
models[j].get_data_likelihood(data, likelihoods);
typename vector<T>::const_iterator likelihood_iter;
float *G_ptr = G[0]+j;
// FIXME
int i=0;
for(likelihood_iter = likelihoods.begin(); likelihood_iter != likelihoods.end(); ++likelihood_iter, G_ptr += G.cols(), i++)
{
*G_ptr = fast_exp(*likelihood_iter);
// cout << "yyy " << i << " " << j << " " << *likelihood_iter << " " << fast_exp(*likelihood_iter) << " " << exp(*likelihood_iter) << endl;
}
}
// dp->end(4);
// dp->begin(5);
float *G_ptr = G[0];
for(int i=0; i<data.rows(); i++)
{
float l = 0;
for(int j=0; j<mixture_count; j++)
{
l += *(G_ptr++) * weights[j];
}
// cout << " === " << l << " " << log(l) << " " << fast_log(l) << endl;
this_like += log(l);
}
// for(float iii = 1e-10; iii < 1e10; iii *= 2.0)
// cout << " yyy " << iii << " " << log(iii) << " " << fast_log(iii) << endl;
// cout << "ITERATION " << q << " " << this_like << " " << fabs(this_like - last_like) << " " << delta_thresh << endl;
// if likelihood hasn't changed much this iteration, then break
if(fabs(this_like - last_like) < delta_thresh)
break;
last_like = this_like;
// dp->end(5);
// dp->begin(6);
// the e-step
for(int i=0; i<data.rows(); i++)
{
float sum_j = 0;
G_ptr = G[i];
float *P_ptr = P[i];
for(int j=0; j<mixture_count; j++)
{
float tmp = weights[j] * G_ptr[j];
sum_j += tmp;
P_ptr[j] = tmp;
}
for(int j=0; j<mixture_count; j++)
{
if(sum_j != 0)
P_ptr[j] /= sum_j;
else if(P_ptr[j])
P_ptr[j] = 1e100;
else
P_ptr[j] = -1e100;
// cout<< "xxx " << i << " " << j << " " << P_ptr[j] << " " << weights[j] << " " << G_ptr[j] << " " << weights[j] * G_ptr[j] << endl;
}
}
// dp->end(6);
// dp->begin(7);
// the m-step
for(int j=0; j<mixture_count; j++)
{
s1=0;
float s=0;
T *data_cp = data[0];
float *P_ptr = P[0]+j;
for(int i=0; i<data.rows(); i++, P_ptr += P.cols())
{
float p = *P_ptr;
for(int l=0; l<data.cols(); l++)
s1_ptr[l] += p * *(data_cp++);
s += p;
}
// cout << "s = " << s << endl;
weights[j] = s / data.rows();
// not really clear what to do when s is 0. It should never happen, but
// presumably could.
models[j].set_mean(s1.transpose() / s);
if(isnan(models[j].mean()[0][0]))
models[j].set_mean(_DMatrix<T>(2,1,0.0));
// cout<< "MEAN " << j << " " << models[j] << " " << s << endl;
new_covars = 0;
data_cp = data[0];
float x_mean = (models[j].mean())[0][0];
float y_mean = (models[j].mean())[0][1];
P_ptr = P[0]+j;
for(int i=0; i<data.rows(); i++, P_ptr += P.cols())
{
float x_diff = *(data_cp++) - x_mean;
float y_diff = *(data_cp++) - y_mean;
float p = *P_ptr;
new_covars_ptr[0] += p * x_diff * x_diff;
new_covars_ptr[1] += p * x_diff * y_diff;
new_covars_ptr[3] += p * y_diff * y_diff;
}
new_covars[1][0] = new_covars[0][1];
models[j].set_covariance(new_covars / s);
// a negative determinant here is bad, since the covariance matrix should be
// positive definite.
//
// We want to make sure that the smallest eigenvalue is not too small (e.g. < k).
// In this context the smallest eigenvalue corresponds to the covariance of
// the dimension with the lowest covariance when rotated to be an axis-oriented
// gaussian. So k=1 is probably good.
if(models[j].is_covariance_bad() || isnan(models[j].log_constant()))
{
_DMatrix<T> cov = models[j].covariance();
cov[0][1] = cov[1][0] = 0;
models[j].set_covariance(cov);
if(models[j].is_covariance_bad() || isnan(models[j].log_constant()))
models[j].set_covariance(_DMatrix<T>(2,2,_DMatrix<T>::identity) * 10.0);
}
}
// dp->end(7);
}
// cout << "FINAL MODEL: " << endl;
// for(int i=0; i<mixture_count; i++)
// cout << i << " " << models[i] << " peak " << exp(models[i].log_constant()) * weights[i] << " weight " << weights[i] << endl;
_model_likelihood = this_like;
}
template<class T>
T DGaussianMixtureModel<T>::get_data_likelihood(const _DMatrix<T> &data)
{
std::vector<T> tmp;
return get_data_likelihood(data, tmp);
}
template<class T>
T DGaussianMixtureModel<T>::get_data_likelihood(const _DMatrix<T> &data, std::vector<T> &likelihoods)
{
// std::cout << "--> in gdl data " << data << " weights " << weights[0] << " " << weights[1] << " " << weights[2] << std::endl;
// std::cout << models[0] << " " << models[1] << " " << models[2] << std::endl;
likelihoods = vector<T>(data.rows(), 0);
for(int i=0; i<models.size(); i++)
{
vector<T> likes;
models[i].get_data_likelihood(data, likes);
// cout << " l of model " << i << " " << likes[0] << endl;
for(int j=0; j<data.rows(); j++)
likelihoods[j] += weights[i] * exp(likes[j]);
}
for(int j=0; j<data.rows(); j++)
likelihoods[j] = log(likelihoods[j]);
return accumulate(likelihoods.begin(), likelihoods.end(), T(0.0));
}
#define DECLARE(x) \
template class DGaussianMixtureModel<x>;
DECLARE(double)
DECLARE(float)
<file_sep>/src/plot_critical.py
import sys
import matplotlib.pyplot as plt
with open(sys.argv[1], 'r') as fin:
line = fin.readlines()[int(sys.argv[2])]
numbers = [float(s) for s in line.strip().split(' ')]
plt.hist(numbers, 10000, normed=10, facecolor='green', alpha=0.75)
plt.show()
#plt.plot()
<file_sep>/data/disco-bp/DLib/DImageIO.cpp
//
// DLib: A simple image processing library.
//
// <NAME>, 2003-2005
// <EMAIL>
//
// Please do not redistribute this code.
//
//
//
//
#include "stdio.h"
#include <fstream>
#include <iostream>
#include "DImage.h"
#include <assert.h>
using namespace std;
DImage ReadPPMImage(const char *filename)
{
FILE *fp = fopen(filename, "rb");
char temp[1024];
assert(fp);
fgets(temp, 1024, fp);
assert(temp[0] == 'P' && temp[1] == '6');
int width, height, maxval;
do {
fgets(temp, 1024, fp);
} while(temp[0] == '#');
sscanf(temp, "%d %d", &width, &height);
fgets(temp, 1024, fp);
sscanf(temp, "%d", &maxval);
DImage image(3, height, width);
for(int i=0; i<height; i++)
for(int j=0; j<width; j++)
for(int k=0; k<3; k++)
{
image[k][i][j] = fgetc(fp);
}
fclose(fp);
return image;
}
DImage ReadPBMImage(const char *filename)
{
FILE *fp = fopen(filename, "rb");
char temp[1024];
fgets(temp, 1024, fp);
assert(temp[0] == 'P' && temp[1] == '4');
int width, height;
do {
fgets(temp, 1024, fp);
} while(temp[0] == '#' || temp[0]=='\n');
sscanf(temp, "%d %d", &width, &height);
DImage image(1, height, width);
for(int i=0; i<height; i++)
for(int j=0; j<ceil(width/8.0); j++)
{
int tmp = fgetc(fp);
for(int k=128, l=0; k>=1 && l<8; k=k>>1, l++)
{
if(j*8+l < width)
image[0][i][j*8+l] = (tmp & k)?255:0;
}
}
fclose(fp);
return image;
}
void WritePPMImage(const _DImage<unsigned char> &img, const char *filename)
{
if(img.planes() == 1)
return WritePPMImage(img[0], filename);
assert(img.planes() == 3);
FILE *fp = fopen(filename, "wb");
// write magic number
fprintf(fp, "P6\n");
// write dimensions
fprintf(fp, "%d %d\n", img.cols(), img.rows());
// write max pixel value
fprintf(fp, "255\n");
for(int i=0; i<img.rows(); i++)
for(int j=0; j<img.cols(); j++)
for(int k=0; k<3; k++)
fputc(int(img[k][i][j]), fp);
fclose(fp);
return;
}
void WritePPMImage(const _DPlane<unsigned char> &img, const char *filename)
{
FILE *fp = fopen(filename, "wb");
// write magic number
fprintf(fp, "P6\n");
// write dimensions
fprintf(fp, "%d %d\n", img.cols(), img.rows());
// write max pixel value
fprintf(fp, "255\n");
for(int i=0; i<img.rows(); i++)
for(int j=0; j<img.cols(); j++)
for(int k=0; k<3; k++)
fputc(int(img[i][j]), fp);
fclose(fp);
return;
}
#ifdef IMGIO_SUPPORT
#include <corona.h>
static _DImage<unsigned char> load_helper(corona::Image *image, bool want_alpha);
_DImage<unsigned char> LoadDImage(const char *fname, bool want_alpha)
{
corona::Image* image = corona::OpenImage(fname, corona::PF_R8G8B8A8);
if (!image)
throw(std::string("couldn't open image " + std::string(fname)));
return load_helper(image, want_alpha);
}
_DImage<unsigned char> LoadDImage(const void *buffer, int buf_size, bool want_alpha)
{
corona::File* file = corona::CreateMemoryFile(buffer, buf_size);
if (!file)
throw(std::string("couldn't create memory file "));
corona::Image* image = corona::OpenImage(file, corona::PF_R8G8B8A8);
if (!image)
throw(std::string("couldn't open image "));
_DImage<unsigned char> img = load_helper(image, want_alpha);
delete file;
return img;
}
static _DImage<unsigned char> load_helper(corona::Image *image, bool want_alpha)
{
int width = image->getWidth();
int height = image->getHeight();
void* pixels = image->getPixels();
int planes = want_alpha ? 4 : 3;
_DImage<unsigned char> result(planes, height, width);
// we're guaranteed that the first eight bits of every pixel is red,
// the next eight bits is green, and so on...
unsigned char *out_cp1 = result[0][0], *out_cp2 = result[1][0], *out_cp3 = result[2][0], *out_cp4;;
unsigned char *in_cp = (unsigned char *) pixels;
if(want_alpha) out_cp4 = result[3][0];
int sz = height * width;
for(int i=0; i < sz; i++)
{
*(out_cp1++) = *(in_cp++);
*(out_cp2++) = *(in_cp++);
*(out_cp3++) = *(in_cp++);
if(want_alpha)
*(out_cp4++) = *in_cp;
in_cp++; // skip over alpha channel
}
delete image;
return result;
}
void SaveDImage(const char *filename, const _DImage<unsigned char> &img)
{
char *buf = new char[img.rows() * img.cols() * 3];
unsigned char *in_cp1, *in_cp2, *in_cp3;
if(img.planes() == 1)
in_cp1 = in_cp2 = in_cp3 = img[0][0];
else if(img.planes() == 3)
in_cp1 = img[0][0], in_cp2 = img[1][0], in_cp3 = img[2][0];
else
throw std::string("SaveDImage only supports images with 1 or 3 planes");
unsigned char *out_cp = (unsigned char *) buf;
for(int i=0; i < img.rows() * img.cols(); i++)
{
*(out_cp++) = *(in_cp1++);
*(out_cp++) = *(in_cp2++);
*(out_cp++) = *(in_cp3++);
}
corona::Image *image = corona::CreateImage(img.cols(), img.rows(), corona::PF_R8G8B8, buf);
corona::SaveImage(filename, corona::FF_PNG, image);
delete image;
delete[] buf;
}
void SaveDImage(const char *filename, const _DPlane<unsigned char> &img)
{
SaveDImage(filename, _DImage<unsigned char>(img));
}
#endif
<file_sep>/src/graph.py
import sys
import numpy
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import os.path
from settings import *
import shapely
from shapely.geometry import Polygon
from descartes import PolygonPatch
from utils import *
folder = 'uniform_n1p2_'+sys.argv[1]+'/graph'+sys.argv[2]+'_final'
print("drawing figure with data under %s" % folder)
min_TL2, median_TL2, max_TL2, min_CD, median_CD, max_CD, tmean_TL2, tmean_CD, \
zp_TL2, zp_CD, ratios = process(folder)
if sys.argv[3].startswith('graph'):
plots = {'min CD':min_CD, 'median CD':median_CD, 'max CD':max_CD,
'min TranSync':min_TL2, 'median TranSync':median_TL2,
'max TranSync':max_TL2}
down_CD = [(x, y) for (x, y) in zip(ratios, min_CD)]
up_CD = [(x, y) for (x, y) in zip(ratios, max_CD)]
up_CD.reverse()
down_TL2 = [(x, y) for (x, y) in zip(ratios, min_TL2)]
up_TL2 = [(x, y) for (x, y) in zip(ratios, max_TL2)]
up_TL2.reverse()
area_1 = Polygon(down_CD+up_CD)
area_2 = Polygon(down_TL2+up_TL2)
solution = area_1.intersection(area_2)
fig, ax = plt.subplots()
patch = PolygonPatch(area_1, facecolor=colors['min CD'],
edgecolor=colors['min CD'], alpha=1.0)
ax.add_patch(patch)
patch = PolygonPatch(area_2, facecolor=colors['min TranSync'],
edgecolor=colors['min TranSync'], alpha=1.0)
ax.add_patch(patch)
patch = PolygonPatch(solution, facecolor='r', edgecolor='r')
ax.add_patch(patch)
for label in ['min CD', 'median CD', 'max CD', 'min TranSync',
'median TranSync', 'max TranSync']:
ax.plot(ratios, plots[label], color=colors[label], label=label,
linestyle=linestyles[label], linewidth=linewidths[label],
marker=markers[label])
gs = setting_map[folder.split('/')[-1]]
legend = ax.legend(loc=gs['loc_g'], shadow=True, fontsize=gs['fs_g'])
plt.title(gs['title']+' ($\sigma = %s$)' % str(sys.argv[1]), fontsize=40)
ax.set_yscale('log')
plt.xlabel('$p$', fontsize=25)
plt.ylabel('$\log \|x^*-x^{gt}\|_{\infty}$', fontsize=25)
plt.axis([0.01, 1.0, 0, gs['max_diff']])
plt.savefig('graph'+str(gs['id'])+'s0'+sys.argv[1].split('.')[-1]+'.eps')
elif sys.argv[3].startswith('time'):
fig, ax = plt.subplots()
plots = {'CD' : tmean_CD, 'TranSync':tmean_TL2}
for label in ['CD', 'TranSync']:
ax.plot(ratios, plots[label], color=colors[label], label=label,
linestyle=linestyles[label], linewidth=linewidths[label],
marker=markers[label])
gs = setting_map[folder.split('/')[-1]]
legend = ax.legend(loc=gs['loc_t'], shadow=True, fontsize=gs['fs_t'])
plt.title(gs['title']+' ($\sigma = %s$)' % str(sys.argv[1]), fontsize=40)
plt.xlabel('$p$', fontsize=25)
plt.ylabel('Average Running Time', fontsize=25)
plt.axis([0.01, 1.0, 0, gs['max_time']])
plt.savefig('time'+str(gs['id'])+'s0'+sys.argv[1].split('.')[-1]+'.eps')
else:
fig, ax = plt.subplots()
plots = {'CD' : zp_CD, 'TranSync':zp_TL2}
for label in ['CD', 'TranSync']:
ax.plot(ratios, plots[label], color=colors[label], label=label,
linestyle=linestyles[label], linewidth=linewidths[label],
marker=markers[label])
gs = setting_map[folder.split('/')[-1]]
legend = ax.legend(loc=gs['loc_t'], shadow=True, fontsize=gs['fs_t'])
plt.title(gs['title']+' ($\sigma = %s$)' % str(sys.argv[1]), fontsize=40)
plt.xlabel('$p$', fontsize=25)
plt.ylabel('Success Rate of Exact Recovery', fontsize=15)
plt.axis([0.01, 1.0, 0, 1.0])
plt.savefig('zp'+str(gs['id'])+'.eps')
<file_sep>/src/main.cpp
#include <iostream>
#include <vector>
#include <cstring>
#include <fstream>
#include <cmath>
#include <cassert>
#include "graph.h"
#include <algorithm>
#include <time.h>
#include <omp.h>
#include "Truncated_L2.h"
#include "CoordinateDescent.h"
using namespace std;
int main(int argc, char** argv){
if (argc <= 12){
cerr << "./TranSync [n] [edge_density] [bias] [inc] [noise_type] [noise_ratio] [max_iter] [decay] [a/mean] [b/std_dev] [sigma] [output_name] (graph_file)" << endl;
return 0;
}
auto seed = time(NULL);
srand(seed);
cerr << "random seed=" << seed << endl;
Params* params = new Params(argc, argv);
cerr << params->n << endl;
Graph graph(params);
//graph.dump("graph4.meta");
//return 0;
Truncated_L2 TL(params);
CoordinateDescent CD(params);
string output_name(argv[12]);
vector<double> result_TL2;
vector<double> result_CD;
vector<double> time_TL2;
vector<double> time_CD;
int T = 100;
int P = 100;
for (int t = 0; t < T; t++){
result_TL2.push_back(t);
result_CD.push_back(t);
time_TL2.push_back(t);
time_CD.push_back(t);
}
int nr = round((params->noise_ratio)*P);
string nr_str = to_string(nr);
#pragma omp parallel for
for (int t = 0; t < T; t++){
string str = to_string(t);
Graph graph_t = graph.copy();
graph_t.resample();
double time2 = -omp_get_wtime();
double r2 = CD.solve(graph_t, output_name+"/ratio"+nr_str+"_"+str+".CD");
time2 += omp_get_wtime();
double time1 = -omp_get_wtime();
double r1 = TL.solve(graph_t, output_name+"/ratio"+nr_str+"_"+str+".TL2");
time1 += omp_get_wtime();
result_TL2[t] = r1;
time_TL2[t] = time1;
result_CD[t] = r2;
time_CD[t] = time2;
}
ofstream fout(output_name+"/ratio"+nr_str+"_summary");
for (int t = 0; t < T; t++){
fout << result_TL2[t] << " " << time_TL2[t] << " " << result_CD[t] << " " << time_CD[t] << endl;
}
//double zp_TL2 = zero_prob(result_TL2);
//double zp_CD = zero_prob(result_CD);
cerr << "noise_ratio=" << params->noise_ratio << ", TL2=(" << min(result_TL2) << "," << median(result_TL2) << "," << max(result_TL2) << ")";
cerr << ", CD=(" << min(result_CD) << "," << median(result_CD) << "," << max(result_CD) << ")";
cerr << ", time mean: TL2=" << mean(time_TL2) << ", CD=" << mean(time_CD) << endl;
fout.close();
return 0;
}
<file_sep>/src/run_graph.sh
#!/bin/bash
#graph type 1
for ratio in `seq $1 $2 99`; do
if [ ! -d "resampling/graph1_uniform_ratio${ratio}" ]; then
mkdir -p resampling/graph1_uniform_ratio${ratio}
make synthetic n=2000 edge_density=0.2 bias=1.0 inc=0.0 \
noise_type=2 noise_ratio=0.${ratio} decay=0.9 \
output=resampling/graph1_uniform_ratio${ratio}
fi
done
#graph type 2
for ratio in `seq $1 $2 99`; do
if [ ! -d "resampling/graph2_uniform_ratio${ratio}" ]; then
mkdir -p resampling/graph2_uniform_ratio${ratio}
make synthetic n=2000 edge_density=0.2 bias=0.1 inc=0.3 \
noise_type=2 noise_ratio=0.${ratio} decay=0.9 \
output=resampling/graph2_uniform_ratio${ratio}
fi
done
#graph type 3
for ratio in `seq $1 $2 99`; do
if [ ! -d "resampling/graph3_uniform_ratio${ratio}" ]; then
mkdir -p resampling/graph3_uniform_ratio${ratio}
make synthetic n=20000 edge_density=0.002 bias=1.0 inc=0.0 \
noise_type=2 noise_ratio=0.${ratio} decay=0.9 \
output=resampling/graph3_uniform_ratio${ratio}
fi
done
#graph type 4
for ratio in `seq $1 $2 99`; do
if [ ! -d "resampling/graph4_uniform_ratio${ratio}" ]; then
mkdir -p resampling/graph4_uniform_ratio${ratio}
make synthetic n=20000 edge_density=0.2 bias=0.05 inc=0.15 \
noise_type=2 noise_ratio=0.${ratio} decay=0.9 \
output=resampling/graph4_uniform_ratio${ratio}
fi
done
<file_sep>/src/util.h
#ifndef UTIL_H
#define UTIL_H
double l1_loss(int n, double* x, double* gt){
//compute l1 loss to ground truth
double loss = 0.0;
for (int i = 0; i < n; i++){
loss += fabs(gt[i] - x[i]);
}
return loss;
}
double linf_loss(int n, double* x, double* gt){
//compute l1 loss to ground truth
double loss = 0.0;
for (int i = 0; i < n; i++){
loss = max(loss, fabs(gt[i] - x[i]));
}
return loss;
}
double mean(vector<double> v){
double sum = 0.0;
for (auto it = v.begin(); it != v.end(); it++){
sum += *it;
}
if (v.size() > 0){
return sum/v.size();
} else {
return 0.0;
}
}
double median(vector<double> v){
vector<double> dup = v;
sort(dup.begin(), dup.end());
if (dup.size() % 2 == 1){
return dup[dup.size() / 2];
} else {
return (dup[dup.size() / 2] + dup[(dup.size() / 2)-1])/2.0;
}
}
double max(vector<double> v){
double ans = -1e100;
for (auto it = v.begin(); it != v.end(); it++){
if (*it > ans){
ans = *it;
}
}
return ans;
}
double min(vector<double> v){
double ans = 1e100;
for (auto it = v.begin(); it != v.end(); it++){
if (*it < ans){
ans = *it;
}
}
return ans;
}
double zero_prob(vector<double> v){
double sum = 0.0;
for (auto it = v.begin(); it != v.end(); it++){
if (*it < 1e-5){
sum += 1.0;
}
}
if (v.size() > 0){
return sum/v.size();
} else {
return 0.0;
}
}
#endif
<file_sep>/src/settings.py
from matplotlib import rc
#rc('font',**{'family':'sans-serif','sans-serif':['Helvetica']})
#rc('text', usetex=True)
colors = {'min CD':'m', 'median CD':'k', 'max CD':'m',
'min TranSync':'g', 'median TranSync':'b', 'max TranSync':'g', 'CD':'k',
'TranSync':'b'}
linestyles = {'min CD':'-', 'median CD':'-', 'max CD':'-',
'min TranSync':'-', 'median TranSync':'-', 'max TranSync':'-','CD':'-',
'TranSync':'-'}
linewidths = {'min CD':0.01, 'median CD':1, 'max CD':0.01,
'min TranSync':0.01, 'median TranSync':1, 'max TranSync':0.01,'CD':1,
'TranSync':1}
markers = {'min CD':'', 'median CD':'', 'max CD':'',
'min TranSync':'', 'median TranSync':'', 'max TranSync':'','CD':'',
'TranSync':''}
#graph1 = {'id':1, 'loc_g':(0.45, 0.45), 'loc_t':(0.3, 0.55), 'fs_g':19,
# 'fs_t':36, 'title':'Graph $G_{dr}$', 'max_diff':1, 'max_time':3}
#graph2 = {'id':2, 'loc_g':(0.57, 0.57), 'loc_t':(0.3, 0.55), 'fs_g':15,
# 'fs_t':36, 'title':'Graph $G_{di}$', 'max_diff':1, 'max_time':4}
#graph3 = {'id':3, 'loc_g':(0.03, 0.13), 'loc_t':(0.03, 0.23), 'fs_g':20,
# 'fs_t':25, 'title':'Graph $G_{sr}$', 'max_diff':2.0, 'max_time':18}
#graph4 = {'id':4, 'loc_g':(0.03, 0.13), 'loc_t':(0.03, 0.13), 'fs_g':24,
# 'fs_t':30, 'title':'Graph $G_{si}$', 'max_diff':2.8, 'max_time':15}
graph1 = {'id':1, 'loc_g':(0.45, 0.45), 'loc_t':(0.37, 0.55), 'fs_g':19,
'fs_t':33, 'title':'Graph $G_{dr}$', 'max_diff':1, 'max_time':3}
graph2 = {'id':2, 'loc_g':(0.57, 0.57), 'loc_t':(0.3, 0.55), 'fs_g':15,
'fs_t':36, 'title':'Graph $G_{di}$', 'max_diff':1, 'max_time':4}
graph3 = {'id':3, 'loc_g':(0.03, 0.13), 'loc_t':(0.03, 0.23), 'fs_g':20,
'fs_t':25, 'title':'Graph $G_{sr}$', 'max_diff':2.0, 'max_time':18}
graph4 = {'id':4, 'loc_g':(0.03, 0.03), 'loc_t':(0.03, 0.13), 'fs_g':24,
'fs_t':36, 'title':'Graph $G_{si}$', 'max_diff':2.8, 'max_time':15}
setting_map = {'graph1':graph1,
'graph1_final':graph1, 'graph2_final':graph2, 'graph3_final':graph3,
'graph4_final':graph4}
#dash = {'TRWS':[4, 2, 1, 2], 'AD3':[1, 1, 1, 1], 'PSDD':[4, 2, 4, 2, 1, 2], 'MPLP':[1, 2, 1, 2], 'GDMM':[], 'Soft-BCFW':[4, 2, 4, 2], 'Soft-BCFW-acc':[2, 4, 2, 4], 'LPsparse':[4, 2, 1, 2, 1, 2], 'smoothMSD':[]}
eps=1e-5
title_fontsize=38
<file_sep>/data/disco-bp/DLib/DLabelComponents.cpp
//
// DLib: A simple image processing library.
//
// <NAME>, 2003-2005
// <EMAIL>
//
// Please do not redistribute this code.
//
//
//
//
#include <DPlane.h>
#include <DLabelComponents.h>
//
// Label 8-connected plateaus
//
template<class T>
void DLabelComponents<T>::label_connected_components(const _DPlane<T> &in)
{
int pixels = in.rows() * in.cols();
int equiv_table[pixels];
memset(equiv_table,0,pixels*sizeof(int));
_DPlane<int> label(in.rows(),in.cols());
int nextlabel=1;
for(int i=0; i<in.rows(); i++)
for(int j=0; j<in.cols(); j++)
if(in[i][j] == 0)
label[i][j]=0;
else
{
bool lB=true, ulB=true, uB=true, urB=true;
if(j == 0)
lB=ulB=false;
else if(j == in.cols()-1)
urB=false;
if(i == 0)
uB=ulB=urB=false;
// check left, upper-left, upper, and upper-right neighbors
// mark current pixel with the first label found
// and update equivalence table
double this_altitude=in[i][j];
// std::cout << i << " " << j << std::endl;
if(lB && in[i][j-1] == this_altitude) {
label[i][j]=label[i][j-1];
if(ulB && in[i-1][j-1] == this_altitude)
equiv_union(equiv_table, (int)label[i][j],(int)label[i-1][j-1]);
if(uB && in[i-1][j] == this_altitude)
equiv_union(equiv_table, (int)label[i][j], (int)label[i-1][j]);
if(urB && in[i-1][j+1] == this_altitude)
equiv_union(equiv_table, (int)label[i][j], (int)label[i-1][j+1]);
}
else if(ulB && in[i-1][j-1] == this_altitude) {
label[i][j]=label[i-1][j-1];
if(uB && in[i-1][j] == this_altitude)
equiv_union(equiv_table, (int)label[i][j], (int)label[i-1][j]);
if(urB && in[i-1][j+1] == this_altitude)
equiv_union(equiv_table, (int)label[i][j], (int)label[i-1][j+1]);
}
else if(uB && in[i-1][j] == this_altitude) {
label[i][j]=label[i-1][j];
if(urB && in[i-1][j+1] == this_altitude)
equiv_union(equiv_table, (int)label[i][j],(int)label[i-1][j+1]);
}
else if(urB && in[i-1][j+1] == this_altitude)
label[i][j]=label[i-1][j+1];
else
label[i][j]=nextlabel++;
}
int *labelmap = new int[nextlabel+1];
int *lowestmap = new int[nextlabel];
memset(lowestmap, 0, sizeof(int)*nextlabel);
int nextlow=1;
regions.push_back(DRegion());
regions[0].pix_value = 0;
for(int i=1; i<nextlabel; i++) {
int root=equiv_find(equiv_table,i);
if(lowestmap[root])
labelmap[i]=lowestmap[root];
else
{
lowestmap[root]=nextlow;
labelmap[i]=nextlow;
nextlow++;
}
regions.push_back(DRegion());
}
for(int i=0; i<in.rows(); i++)
for(int j=0; j<in.cols();j++)
if(label[i][j] > 0)
{
label[i][j]=labelmap[(int)label[i][j]];
regions[(int)label[i][j]].pix_value = in[i][j];
}
int lastused=nextlow-1;
// +1 for background
region_count = lastused+1;
for(int i=0; i<in.rows(); i++)
for(int j=0; j<in.cols();j++)
{
int this_label = (int)label[i][j];
regions[this_label].count++;
if(regions[this_label]._bounding_box.left() > j)
regions[this_label]._bounding_box.left(j);
if(regions[this_label]._bounding_box.top() > i)
regions[this_label]._bounding_box.top(i);
if(regions[this_label]._bounding_box.bottom() < i)
regions[this_label]._bounding_box.bottom(i);
if(regions[this_label]._bounding_box.right() < j)
regions[this_label]._bounding_box.right(j);
regions[this_label]._centroid = regions[this_label]._centroid + DPoint(i,j);
}
for(int i=0; i<(int)regions.size(); i++)
if(regions[i].count)
regions[i]._centroid = regions[i]._centroid / regions[i].count;
delete[] labelmap;
delete[] lowestmap;
_label_plane = label;
}
#define DECLARE(x) \
template class DLabelComponents<x>;
DECLARE(double)
DECLARE(short)
DECLARE(int)
DECLARE(float)
DECLARE(char)
DECLARE(unsigned char)
<file_sep>/data/disco-bp/run_rotbp.sh
#!/bin/sh
export LD_LIBRARY_PATH=./corona-1.0.2/src/.libs/:$LD_LIBRARY_PATH
ROTBP_LABELSPACE='11 5' # MUST be odd
# Run bp-rot on the Acropolis dataset. The energy and labels are printed each iteration.
bin/bp-rot --nopreprop --threads 10 --labelspace ${ROTBP_LABELSPACE} --iters 100 \
--noconf --twoconf --vanish data/acropolis/tilt_twist.txt --geoplanar data/acropolis/geoplanar.geotags.txt \
data/acropolis/pairs.txt
<file_sep>/src/polygon.py
import matplotlib.pyplot as plt
import shapely
from shapely.geometry import Polygon
from descartes import PolygonPatch
import numpy as np
def create_tube(a,height):
x_tube_up = np.linspace(-4,4,300)
y_tube_up = a*x_tube_up**2 + height
x_tube_down = np.flipud(x_tube_up) #flip for correct definition of polygon
y_tube_down = np.flipud(y_tube_up - 2)
points_x = list(x_tube_up) + list(x_tube_down)
points_y = list(y_tube_up) + list(y_tube_down)
return Polygon([(points_x[i], points_y[i]) for i in range(600)])
def plot_coords(ax, ob):
x, y = ob.xy
ax.plot(x, y, '+', color='grey')
area_1 = Polygon() #First area, a MultiPolygon object
for h in [-5, 0, 5]:
area_1 = area_1.union(create_tube(2, h))
area_2 = Polygon()
for h in [8, 13, 18]:
area_2 = area_2.union(create_tube(-1, h))
solution = area_1.intersection(area_2) #What I was looking for
########## PLOT ##########
fig = plt.figure()
ax = fig.add_subplot(111)
for tube in area_1:
plot_coords(ax, tube.exterior)
patch = PolygonPatch(tube, facecolor='g', edgecolor='g', alpha=0.1)
ax.add_patch(patch)
for tube in area_2:
plot_coords(ax, tube.exterior)
patch = PolygonPatch(tube, facecolor='m', edgecolor='m', alpha=0.1)
ax.add_patch(patch)
for sol in solution:
plot_coords(ax, sol.exterior)
patch = PolygonPatch(sol, facecolor='r', edgecolor='r')
ax.add_patch(patch)
plt.show()
<file_sep>/data/disco-bp/DLib/DBoxMinTransform.cpp
//
// DLib: A simple image processing library.
//
// <NAME>, 2003-2005
// <EMAIL>
//
// Please do not redistribute this code.
//
//
//
//
#include <DBoxMinTransform.h>
#include <math.h>
using namespace std;
template<class T>
void DBoxMinTransform<T>::del_prime_d(T *delta, int cols, int inc,
int log_d, T **del_primes_2n)
{
T *del_primes_2n_0 = del_primes_2n[0];
if(inc == 1)
memcpy(del_primes_2n_0, delta, sizeof(T) * cols);
else
for(int i=0, cp=0; i<cols; ++i, cp+=inc)
del_primes_2n_0[i] = delta[cp];
int n_minus1_size = 1;
for(int n=1; n<=log_d; ++n, n_minus1_size*=2)
{
T *del_primes_2n_nminus1 = del_primes_2n[n-1];
T *del_primes_2n_n = del_primes_2n[n];
for(int i=0; i<cols-n_minus1_size; ++i)
del_primes_2n_n[i] = min(del_primes_2n_nminus1[i],
del_primes_2n_nminus1[i + n_minus1_size]);
for(int i=max(cols-n_minus1_size,0); i < cols; ++i)
del_primes_2n_n[i] = (del_primes_2n_nminus1[i]);
}
}
template<class T>
void DBoxMinTransform<T>::del_prime_d2(T *delta, int cols, int d, int inc,
int log_d, T **del_primes_2n)
{
T result[cols];
int n_size = 1;
for(int i=0; i<cols; i++)
result[i] = T(1e100);
int sofar = 0;
for(int n=0; n<=log_d; ++n, n_size *= 2)
{
if((d & n_size))
{
T *del_primes_2n_n = del_primes_2n[n];
// assert(cols-sofar >= 0);
for(int i=0; i<cols-sofar; ++i)
result[i] = min(result[i], del_primes_2n_n[i+sofar]);
sofar += n_size;
}
}
if(inc == 1)
memcpy(delta, result, sizeof(T) * cols);
else
for(int i=0, cp=0; i<cols; ++i, cp+=inc)
delta[cp] = result[i];
}
template<class T>
_DMatrix<T> DBoxMinTransform<T>::do_transform(int d_row, int d_col)
{
int log_d = int(ceil(log2(double(d_col+1))));
assert(log_d <= max_log_d);
for(int i=0; i<Delta1.rows(); i++)
{
// del_prime_d(Delta1[i], Delta.cols(), d_col+1, 1, log_d);
del_prime_d2(Delta1[i], Delta1.cols(), d_col+1, 1, log_d, del_primes_2n[i]);
}
log_d = int(ceil(log2(double(d_row+1))));
assert(log_d <= max_log_d);
T* tmp[log_d+1];
for(int i=0; i<log_d+1; i++)
tmp[i] = new T[Delta1.rows()];
for(int i=0; i<Delta1.cols(); i++)
{
del_prime_d(Delta1[0]+i, Delta1.rows(), Delta1.cols(), log_d, tmp);
del_prime_d2(Delta1[0]+i, Delta1.rows(), d_row+1, Delta1.cols(), log_d, tmp);
}
for(int i=0; i<log_d+1; i++)
delete[] tmp[i];
return Delta1;
}
template<class T>
DBoxMinTransform<T>::DBoxMinTransform(const _DMatrix<T> &img, int max_rows, int max_cols)
{
Delta1 = img;
max_log_d = max(int(ceil(log2(double(max_rows+1)))),
int(ceil(log2(double(max_cols+1)))));
del_primes_2n = new T**[img.rows()];
for(int i=0; i<img.rows(); i++)
{
del_primes_2n[i] = new T*[max_log_d+1];
for(int j=0; j<max_log_d+1; j++)
del_primes_2n[i][j] = new T[img.cols()];
}
// std::cout << "------------------- " << del_primes_2n[84][2][0] << std::endl;
for(int i=0; i<Delta1.rows(); i++)
del_prime_d(Delta1[i], Delta1.cols(), 1, max_log_d, del_primes_2n[i]);
}
template<class T>
DBoxMinTransform<T>::~DBoxMinTransform()
{
if(del_primes_2n)
{
for(int j=0; j<Delta1.rows(); j++)
{
for(int i=0; i<max_log_d+1; i++)
delete[] del_primes_2n[j][i];
delete[] del_primes_2n[j];
}
delete[] del_primes_2n;
}
}
template class DBoxMinTransform<double>;
template class DBoxMinTransform<float>;
template class DBoxMinTransform<int>;
template class DBoxMinTransform<short>;
template class DBoxMinTransform<char>;
<file_sep>/data/disco-bp/DLib/DRect.cpp
//
// DLib: A simple image processing library.
//
// <NAME>, 2003-2005
// <EMAIL>
//
// Please do not redistribute this code.
//
//
//
//
#include "DRect.h"
template<class T>
_DRect<T> bounding_rectangle(const _DRect<T> &r1, const _DRect<T> &r2)
{
return _DRect<T>(std::min(r1.top(), r2.top()),
std::min(r1.left(), r2.left()),
std::max(r1.bottom(), r2.bottom()),
std::max(r1.right(), r2.right()));
}
template<class T>
_DRect<T> intersection(const _DRect<T> &r1, const _DRect<T> &r2)
{
return _DRect<T>(std::max(r1.top(), r2.top()),
std::max(r1.left(), r2.left()),
std::min(r1.bottom(), r2.bottom()),
std::min(r1.right(), r2.right()));
}
template<class T>
T area_of_intersection(const _DRect<T> &r1, const _DRect<T> &r2)
{
T area = intersection(r1, r2).area();
if(area < 0)
area = 0;
return area;
}
template<class T>
T area_of_union(const _DRect<T> &r1, const _DRect<T> &r2)
{
return r1.area() + r2.area() - area_of_intersection(r1, r2);
}
template<class T>
std::ostream & operator<<(std::ostream &os, const _DRect<T> &rect)
{
os << "[" << rect.top_left() << ", " << rect.bottom_right() << "]";
return os;
}
template<class T>
std::istream & operator>>(std::istream &is, _DRect<T> &rect)
{
char c;
do { is >> c; } while(c == ' ');
assert(c=='[');
is >> rect._top_left;
do { is >> c; } while(c == ' ');
assert(c==',');
is >> rect._bottom_right;
do { is >> c; } while(c == ' ');
assert(c==']');
return is;
}
#define DECLARE(x) \
template _DRect<x> bounding_rectangle(const _DRect<x> &r1, const _DRect<x> &r2); \
template _DRect<x> intersection(const _DRect<x> &r1, const _DRect<x> &r2); \
template x area_of_union(const _DRect<x> &r1, const _DRect<x> &r2); \
template x area_of_intersection(const _DRect<x> &r1, const _DRect<x> &r2); \
template std::ostream & operator<<(std::ostream &os, const _DRect<x> &rect); \
template std::istream & operator>>(std::istream &is, _DRect<x> &rect);
DECLARE(double)
DECLARE(int)
DECLARE(float)
<file_sep>/data/disco-bp/DLib/DImage.cpp
//
// DLib: A simple image processing library.
//
// <NAME>, 2003-2005
// <EMAIL>
//
// Please do not redistribute this code.
//
//
//
//
#include <DImage.h>
std::istream &operator>>(std::istream &is, DImage &matrix)
{
int planes;
is >> planes;
std::cout << "reading " << planes << std::endl;
matrix = DImage(planes);
for(int i=0; i<planes; i++)
is >> matrix[i];
return is;
}
<file_sep>/data/disco-bp/DLib/DPlane.cpp
//
// DLib: A simple image processing library.
//
// <NAME>, 2003-2005
// <EMAIL>
//
// Please do not redistribute this code.
//
//
//
//
#include "DPlane.h"
#include "thin_lookup.h"
#include "DGaussianKernel.h"
//
#include <FrequencyTransforms.h>
using namespace std;
template<class T>
void _DPlane<T>::draw(const DRect &rect, T color)
{
for(int i=rect.top(); i <= rect.bottom(); i++)
{
if(i >= 0 && i < _DMatrix<T>::rows() && rect.left() >= 0 && rect.left() < _DMatrix<T>::cols())
(*this)[i][rect.left()] = color;
if(i >= 0 && i < _DMatrix<T>::rows() && rect.right() >= 0 && rect.right() < _DMatrix<T>::cols())
(*this)[i][rect.right()] = color;
}
for(int j=rect.left(); j <= rect.right(); j++)
{
if(rect.top() >= 0 && rect.top() < _DMatrix<T>::rows() && j >= 0 && j < _DMatrix<T>::cols())
(*this)[rect.top()][j] = color;
if(rect.bottom() >= 0 && rect.bottom() < _DMatrix<T>::rows() && j >= 0 && j < _DMatrix<T>::cols())
(*this)[rect.bottom()][j] = color;
}
}
template<class T>
void _DPlane<T>::draw(const DPoint &p1, const DPoint &p2, T color)
{
float xi=1, yi=1;
int steps;
int xdiff = p2.col() - p1.col(), ydiff = p2.row() - p1.row();
if(abs(xdiff)>abs(ydiff))
{
yi=ydiff/(float)abs(xdiff);
steps=abs(xdiff);
if(xdiff<0)
xi=-1;
}
else
{
xi=xdiff/(float)abs(ydiff);
steps=abs(ydiff);
if(ydiff<0)
yi=-1;
}
float cur_x = static_cast< float >(p1.col()), cur_y = static_cast< float >(p1.row());
for(int i=0; i<steps; i++, cur_x += xi, cur_y += yi)
{
DPoint p(int(rint(cur_y)), int(rint(cur_x)));
if(_DMatrix<T>::is_valid_index(p))
(*this)[p.row()][p.col()] = color;
}
}
template<class T>
_DPlane<T> _DPlane<T>::get_x_gradient() const
{
_DPlane<T> result(_DMatrix<T>::rows()-1, _DMatrix<T>::cols()-1);
result=0;
for(int i=0; i<_DMatrix<T>::rows()-1; i++)
for(int j=0; j<_DMatrix<T>::cols()-1; j++)
result[i][j] = (*this)[i+1][j+1] - (*this)[i][j];
return result;
}
template<class T>
_DPlane<T> _DPlane<T>::get_y_gradient() const
{
_DPlane<T> result(_DMatrix<T>::rows()-1, _DMatrix<T>::cols()-1);
result=0;
for(int i=0; i<_DMatrix<T>::rows()-1; i++)
for(int j=0; j<_DMatrix<T>::cols()-1; j++)
result[i][j] = (*this)[i+1][j] - (*this)[i][j+1];
return result;
}
/*
// FIXME:
// Warning : cross_correlate and cross_correlate_fft give different results for
// elements near matrix borders!
#define CROSS_CORR(T) \
template<> \
_DPlane<T> _DPlane<T>::cross_correlate_fft(const _DPlane<T> &kernel, bool normalize_flag) const \
{ \
\
int new_rows = kernel.rows() + rows() - 1; \
int new_cols = kernel.cols() + cols() - 1; \
\
_DPlane<T> padded_kernel(new_rows, new_cols), padded_input(new_rows, new_cols);\
\
DiscreteRealFourierTransform<T> fft(DPoint(new_rows, new_cols), true);\
DiscreteInverseRealFourierTransform<T> ifft(DPoint(new_rows, new_cols), true);\
\
padded_kernel = 0;\
padded_kernel.set_submatrix(DPoint(0,0), kernel.rotate_180());\
\
padded_input = 0;\
padded_input.set_submatrix(DPoint(0,0), (*this));\
\
_DComplexMatrix<T> c1 = fft.do_transform(padded_input);\
_DComplexMatrix<T> c2 = fft.do_transform(padded_kernel);\
\
_DComplexMatrix<T> product = pointwise_multiply(c1, c2);\
\
return (ifft.do_transform(product)).extract(DRect(kernel.size()/2, kernel.size()/2 + _DMatrix<T>::size() - DPoint(1,1)));\
}
CROSS_CORR(float)
CROSS_CORR(double)
*/
template<class T>
_DPlane<T> _DPlane<T>::cross_correlate_fft(const _DPlane<T> &kernel, bool normalize_flag) const
{
throw std::string("cross_correlate_fft only implemented for double and float types");
}
template<class T>
_DPlane<T> _DPlane<T>::cross_correlate_1d_rows(const _DPlane<T> &kernel, bool normalize) const
{
_DPlane<T> result(_DMatrix<T>::rows(), _DMatrix<T>::cols());
assert(_DMatrix<T>::cols() > kernel.cols());
assert(kernel.rows() == 1);
assert(!normalize);
T *result_cp = result[0];
T *kernel_cp = kernel[0];
int half_width = kernel.cols() / 2;
for(int i=0; i<_DMatrix<T>::rows(); i++)
{
T *img_cp = (*this)[i];
for(int j=0; j < half_width; j++)
{
T sum = 0;
for(int l=-half_width; l<=half_width; l++)
{
T kernel_val = kernel_cp[half_width+l];
if(j+l < 0)
sum += kernel_val * img_cp[0];
else
sum += kernel_val * img_cp[j+l];
}
*(result_cp++) = sum;
}
for(int j=half_width; j<_DMatrix<T>::cols() - half_width; j++)
{
T sum = 0;
// for(int l=-half_width; l<=half_width; l++)
T *img_cp2=img_cp+j-half_width;
for(int l=0; l<kernel.cols(); l++)
{
sum += kernel_cp[l] * img_cp2[l];
}
*(result_cp++) = sum;
}
for(int j=_DMatrix<T>::cols() - half_width; j < _DMatrix<T>::cols(); j++)
{
T sum = 0;
for(int l=-half_width; l<=half_width; l++)
{
T kernel_val = kernel_cp[half_width+l];
if(j+l >= _DMatrix<T>::cols())
sum += kernel_val * img_cp[_DMatrix<T>::cols()-1];
else
sum += kernel_val * img_cp[j+l];
}
*(result_cp++) = sum;
}
}
return result;
}
template<class T>
_DPlane<T> _DPlane<T>::cross_correlate_1d_rows_sep(const _DPlane<T> &kernel, bool normalize) const
{
_DPlane<T> result(_DMatrix<T>::cols(), _DMatrix<T>::rows());
assert(kernel.rows() == 1);
assert(!normalize);
T *kernel_cp = kernel[0];
int half_width = kernel.cols() / 2;
int kernel_cols = kernel.cols();
// optimized version assumes image is wider than kernel
if(_DMatrix<T>::cols() > kernel.cols())
{
// profiler->begin(10);
for(int i=0; i<_DMatrix<T>::rows(); i++)
{
T *result_cp = &(result[0][i]);
T *img_cp = (*this)[i];
// profiler->begin(12);
for(int j=0; j < half_width; j++)
{
T sum = 0;
for(int l=-half_width; l<=half_width; l++)
{
T kernel_val = kernel_cp[half_width+l];
if(j+l < 0)
sum += kernel_val * img_cp[0];
else
sum += kernel_val * img_cp[j+l];
}
*(result_cp) = sum;
result_cp += result.cols();
}
// profiler->end(12);
// profiler->begin(13);
int sz = _DMatrix<T>::cols() - half_width;
for(int j=half_width; j<sz; j++)
{
T sum = 0;
// for(int l=-half_width; l<=half_width; l++)
T *img_cp2=img_cp+j-half_width;
for(int l=0; l<kernel_cols; l++)
{
sum += kernel_cp[l] * img_cp2[l];
}
*(result_cp) = sum;
result_cp += result.cols();
}
// profiler->end(13);
// profiler->begin(14);
sz = _DMatrix<T>::cols();
for(int j=_DMatrix<T>::cols() - half_width; j < sz; j++)
{
T sum = 0;
for(int l=-half_width; l<=half_width; l++)
{
T kernel_val = kernel_cp[half_width+l];
if(j+l >= _DMatrix<T>::cols())
sum += kernel_val * img_cp[_DMatrix<T>::cols()-1];
else
sum += kernel_val * img_cp[j+l];
}
*(result_cp) = sum;
result_cp += result.cols();
}
// profiler->end(14);
}
// profiler->end(10);
}
else // unoptimized version
{
// profiler->begin(11);
for(int i=0; i<_DMatrix<T>::rows(); i++)
{
T *result_cp = &(result[0][i]);
T *img_cp = (*this)[i];
for(int j=0; j < _DMatrix<T>::cols(); j++)
{
T sum = 0;
for(int l=-half_width; l<=half_width; l++)
{
T kernel_val = kernel_cp[half_width+l];
if(j+l >= _DMatrix<T>::cols())
sum += kernel_val * img_cp[_DMatrix<T>::cols()-1];
else if(j+l < 0)
sum += kernel_val * img_cp[0];
else
sum += kernel_val * img_cp[j+l];
}
*(result_cp) = sum;
result_cp += result.cols();
}
}
// profiler->end(11);
}
return result;
}
template<class T>
_DPlane<T> _DPlane<T>::flip_horizontally() const
{
_DPlane<T> result(_DMatrix<T>::rows(), _DMatrix<T>::cols());
for(int i=0; i<result.rows(); i++)
{
const T *in_ptr = (*this)[i];
T *out_ptr = result[i] + result.cols() -1;
for(int j=0; j<result.cols(); j++)
*(out_ptr--) = *(in_ptr++);
}
return result;
}
template<class T>
_DPlane<T> _DPlane<T>::cross_correlate_1d_cols(const _DPlane<T> &kernel, bool normalize) const
{
_DPlane<T> result(_DMatrix<T>::rows(), _DMatrix<T>::cols());
assert(_DPlane<T>::rows() > kernel.rows());
assert(kernel.cols() == 1);
assert(!normalize);
T *kernel_cp = kernel[0];
int half_width = kernel.rows() / 2;
for(int j=0; j<_DMatrix<T>::cols(); j++)
{
T *img_cp = &((*this)[0][j]);
T *result_cp = &(result[0][j]);
for(int i=0; i < half_width; i++)
{
T sum = 0;
for(int l=-half_width; l<=half_width; l++)
{
T kernel_val = kernel_cp[half_width+l];
if(i+l < 0)
sum += kernel_val * img_cp[0];
else
sum += kernel_val * img_cp[(i+l)*_DMatrix<T>::cols()];
}
*result_cp = sum;
result_cp += _DMatrix<T>::cols();
}
for(int i=half_width; i<_DMatrix<T>::rows() - half_width; i++)
{
T sum = 0;
T *img_cp_cp = img_cp + _DMatrix<T>::cols() * (i-half_width);
for(int l=0; l<kernel.rows(); l++)
{
sum += kernel_cp[l] * (*img_cp_cp);
img_cp_cp += _DMatrix<T>::cols();
}
*result_cp = sum;
result_cp += _DMatrix<T>::cols();
}
for(int i=_DMatrix<T>::rows() - half_width; i < _DMatrix<T>::rows(); i++)
{
T sum = 0;
for(int l=-half_width; l<=half_width; l++)
{
T kernel_val = kernel_cp[half_width+l];
if(i+l >= _DMatrix<T>::rows())
sum += kernel_val * img_cp[(_DMatrix<T>::rows()-1)*_DMatrix<T>::cols()];
else
sum += kernel_val * img_cp[(i+l)*_DMatrix<T>::cols()];
}
*result_cp = sum;
result_cp += _DMatrix<T>::cols();
}
}
return result;
}
template<class T>
_DPlane<T> _DPlane<T>::cross_correlate_separable(const _DPlane<T> &row_kernel, const _DPlane<T> &col_kernel, bool normalize) const
{
return (cross_correlate_1d_rows_sep(row_kernel, normalize)).cross_correlate_1d_rows_sep(col_kernel.transpose(), normalize);
}
// cross_correlate with a gaussian
template<class T>
_DPlane<T> _DPlane<T>::convolve_gaussian(const _DMatrix<T> &sigma, float sigma_width, bool box_approximate) const
{
bool rotate = true;
double angle=0;
_DPlane<T> in_im(*this);
T row_sigma = sigma[1][1], col_sigma = sigma[0][0];
if(sigma[0][1] == 0 && sigma[1][0] == 0)
rotate=false;
if(rotate)
{
_DMatrix<T> eig_vector;
_DMatrix<T> res = sigma.eigen(eig_vector);
if(eig_vector[0][1] ==0)
angle=0;
else
angle = -atan((double) eig_vector[1][1]/eig_vector[0][1]);
in_im = in_im.rotate_image(angle, 0);
col_sigma = res[0][1];
row_sigma = res[0][0];
}
int f_width = int(ceil(sqrt((double) row_sigma)*sigma_width))*2+1;
int f_height = int(ceil(sqrt((double) col_sigma)*sigma_width))*2+1;
if(f_width >= in_im.cols()-1)
f_width = in_im.cols()-1 - (in_im.cols()) % 2;
if(f_height >= in_im.rows()-1)
f_height = in_im.rows()-1 - (in_im.rows()) % 2;
// profiler->begin(9);
// _DPlane<T> result;
// if(!box_approximate)
_DPlane<T> result = in_im.cross_correlate_separable(_DGaussianKernel<T>(sqrt((double) row_sigma), f_width),
_DGaussianKernel<T>(sqrt((double) col_sigma), f_height).transpose(), false);
// else
// result = in_im.convolve_gaussian_approximate(sqrt(row_sigma), sqrt(col_sigma));
// profiler->end(9);
if(rotate)
{
_DPlane<T> I4 = result.rotate_image(-angle, 0);
int r1 = this->rows(), c1 = this->cols();
int r2 = I4.rows(), c2 = I4.cols();
int rr1 = ((r2-r1)/2);
int cc1 = ((c2-c1)/2);
result = I4.extract(DRect(rr1, cc1, rr1+r1-1, cc1+c1-1));
}
return result;
}
template<class T>
_DPlane<T> _DPlane<T>::box_filter_rows_1D(int kernel_cols, bool normalize) const
{
_DPlane<T> result(cols(), rows());
int half_width = kernel_cols / 2;
// optimized version assumes image is wider than kernel
if(_DMatrix<T>::cols() > kernel_cols)
{
for(int i=0; i<_DMatrix<T>::rows(); i++)
{
T *result_cp = &(result[0][i]);
T *img_cp = (*this)[i];
for(int j=0; j < half_width; j++)
{
T sum = 0;
for(int l=-half_width; l<=half_width; l++)
{
if(j+l < 0)
sum += img_cp[0];
else
sum += img_cp[j+l];
}
*(result_cp) = sum;
result_cp += result.cols();
}
T cum_sum=0;
for(int l=0; l<kernel_cols; l++)
cum_sum += img_cp[l];
*(result_cp) = cum_sum;
result_cp += result.cols();
int sz = _DMatrix<T>::cols() - half_width;
for(int j=half_width+1; j<sz; j++)
{
cum_sum = *(result_cp) = cum_sum - img_cp[j-half_width-1] + img_cp[j-half_width-1+kernel_cols];
result_cp += result.cols();
}
sz = _DMatrix<T>::cols();
for(int j=_DMatrix<T>::cols() - half_width; j < sz; j++)
{
T sum = 0;
for(int l=-half_width; l<=half_width; l++)
{
if(j+l >= _DMatrix<T>::cols())
sum += img_cp[_DMatrix<T>::cols()-1];
else
sum += img_cp[j+l];
}
*(result_cp) = sum;
result_cp += result.cols();
}
}
}
else // unoptimized version
{
for(int i=0; i<_DMatrix<T>::rows(); i++)
{
T *result_cp = &(result[0][i]);
T *img_cp = (*this)[i];
for(int j=0; j < _DMatrix<T>::cols(); j++)
{
T sum = 0;
for(int l=-half_width; l<=half_width; l++)
{
if(j+l >= _DMatrix<T>::cols())
sum += img_cp[_DMatrix<T>::cols()-1];
else if(j+l < 0)
sum += img_cp[0];
else
sum += img_cp[j+l];
}
*(result_cp) = sum;
result_cp += result.cols();
}
}
}
return normalize ? result * (1.0 / T(kernel_cols)) : result;
}
template<class T>
_DPlane<T> _DPlane<T>::box_filter(int kernel_rows, int kernel_cols, bool normalize) const
{
return box_filter_rows_1D(kernel_cols, normalize).box_filter_rows_1D(kernel_rows, normalize);
}
static int factorial(int in)
{
if(in == 0) return 1;
else return in*factorial(in-1);
}
template<class T>
_DPlane<T> _DPlane<T>::convolve_gaussian_approximate(T row_sigma, T col_sigma)
{
// compute # of columns necessary for box filter;
const int reps = 4;
float kernel_cols = float( 0.005 ), kernel_rows = float( 0.005 );
for(int i=0; i<=(reps-1)/2; i++)
kernel_cols += (float) (pow(-1.0,i) * reps / float(factorial(i) * factorial(reps-i)) * pow(float(reps/2 - i), reps-1));
kernel_cols *= float( row_sigma * sqrt(2 * M_PI) );
for(int i=0; i<=(reps-1)/2; i++)
kernel_rows += (float) (pow(-1.0,i) * reps / float(factorial(i) * factorial(reps-i)) * pow(float(reps/2 - i), reps-1));
kernel_rows *= float( col_sigma * sqrt(2 * M_PI) );
kernel_rows = ceil(kernel_rows);
kernel_cols = ceil(kernel_cols);
std::cout << kernel_cols << " " << kernel_rows << std::endl;
_DPlane<T> result = (*this);
for(int i=0; i<reps; i++)
result = result.box_filter_rows_1D((int)kernel_cols, true);
result = result.transpose();
for(int i=0; i<reps; i++)
result = result.box_filter_rows_1D((int)kernel_rows, true);
return result.transpose();
}
template<class T>
_DPlane<T> _DPlane<T>::cross_correlate(const _DPlane<T> &kernel, bool normalize) const
{
_DPlane<T> result(_DMatrix<T>::rows(), _DMatrix<T>::cols());
result = 0;
int half_height = kernel._DMatrix<T>::rows()/2;
int half_width = kernel._DMatrix<T>::cols()/2;
int low_half_height = -half_height, low_half_width = -half_width;
int high_half_height = half_height, high_half_width = half_width;
if(!(half_height % 2))
high_half_height--;
if(!(half_width % 2))
high_half_width--;
T kernel_norm;
if(normalize)
kernel_norm = kernel.sum();
else
kernel_norm = T(1.0);
for(int i=0; i<_DMatrix<T>::rows(); i++)
for(int j=0; j<_DMatrix<T>::cols(); j++)
{
T sum=0;
for(int k=low_half_height; k<=high_half_height; k++)
{
T *kernel_cp, *img_cp;
kernel_cp = kernel[k+half_height];
if(i+k < 0)
img_cp = (*this)[0];
else if(i+k >= _DMatrix<T>::rows())
img_cp = (*this)[_DMatrix<T>::rows()-1];
else
img_cp = (*this)[i+k];
if(j-half_width < 0 || j + half_width >= _DMatrix<T>::cols())
{
for(int l=low_half_width; l<=high_half_width; l++)
{
T kernel_val = kernel_cp[half_width+l];
if(j+l < 0)
sum += kernel_val * img_cp[0];
else if(j+l >= _DMatrix<T>::cols())
sum += kernel_val * img_cp[_DMatrix<T>::cols()-1];
else
sum += kernel_val * img_cp[j+l];
}
}
else
{
for(int l=low_half_width; l<=high_half_width; l++)
{
sum += kernel_cp[l+half_width] * img_cp[j+l];
}
}
}
result[i][j] = sum/kernel_norm;
}
return result;
}
template<class T>
_DPlane<T> _DPlane<T>::subsample(int row_factor, int col_factor)
{
int r = (int) ceil((double) (_DMatrix<T>::rows()) / row_factor);
int c = (int) ceil((double) (_DMatrix<T>::cols()) / col_factor);
_DPlane<T> result(r, c);
T *out_cp = result[0];
int n=0;
for(int i=0; i<_DMatrix<T>::rows(); i+=row_factor)
for(int j=0; j<_DMatrix<T>::cols(); j+=col_factor, out_cp++,n++)
*out_cp = (*this)[i][j];
return result;
}
// sub-samples, taking the *maximum* value within each factor x factor block
//
// currently only works for factor == 2
//
// just chops off odd row or column
template<class T>
_DPlane<T> _DPlane<T>::downsample_max(int factor) const
{
// assert(factor == 2 || factor == 3);
int r = (int) floor(float(_DMatrix<T>::rows()) / factor);
int c = (int) floor(float(_DMatrix<T>::cols()) / factor);
_DPlane<T> result(r, c);
if(factor == 2)
{
T *out_cp = result[0];
for(int i=0; i < r; i++)
{
const T *in1_cp = (*this)[i*2];
const T *in2_cp = (*this)[i*2 + 1];
for(int j=0; j < c; j++, in1_cp+=2, in2_cp+=2, out_cp++)
*out_cp = std::max( std::max(*in1_cp, *(in1_cp+1)), std::max(*in2_cp, *(in2_cp+1)) );
}
}
else if(factor==3)
{
T *out_cp = result[0];
for(int i=0; i < r; i++)
{
const T *in1_cp = (*this)[i*3];
const T *in2_cp = (*this)[i*3 + 1];
const T *in3_cp = (*this)[i*3 + 2];
for(int j=0; j < c; j++, in1_cp+=3, in2_cp+=3, in3_cp+=3, out_cp++)
*out_cp = std::max( std::max( std::max( std::max(*in1_cp, *(in1_cp+1)), std::max(*(in1_cp+2), *(in2_cp))),
std::max( std::max(*(in2_cp+1), *(in2_cp+2)), std::max(*in3_cp, *(in3_cp+1)))),
*(in3_cp+2));
}
}
else
{
int in_rows = int(_DMatrix<T>::rows() / factor)*factor, in_cols = int(_DMatrix<T>::cols() / factor)*factor;
_DPlane<T> tmp(in_rows, c);
for(int i=0; i<in_rows; i++)
{
T *out_cp = tmp[i];
T *in_ptr = (*this)[i];
for(int j=0; j<in_cols; ++out_cp)
{
T local_max = in_ptr[j];
j++;
for(int k=1; k<factor; k++, j++)
{
if(in_ptr[j] > local_max)
local_max = in_ptr[j];
}
*out_cp = local_max;
}
}
for(int j=0; j<c; j++)
{
T *out_cp = result[0]+j;
T *in_ptr = tmp[0]+j;
for(int i=0; i<in_rows; out_cp += c)
{
T local_max = *in_ptr;
i++;
in_ptr+=c;
for(int k=1; k<factor; k++, i++, in_ptr+=c)
{
if(*in_ptr > local_max)
local_max = *in_ptr;
}
*out_cp = local_max;
}
}
}
return result;
}
inline
double float_part(double i)
{
return i - floor(i);
}
template<class T>
_DPlane<T> _DPlane<T>::bilinear_interpolate(T row_offset, T col_offset) const
{
assert(row_offset >= 0 && col_offset >= 0);
assert(row_offset < 1 && col_offset < 1);
int new_rows = _DMatrix<T>::rows() - (int)ceil((double) row_offset);
int new_cols = _DMatrix<T>::cols() - (int)ceil((double) col_offset);
_DPlane<T> result(new_rows, new_cols);
T row_pos, col_pos;
row_pos = row_offset;
for(int i=0; i<new_rows; i++, row_pos++)
{
col_pos = col_offset;
for(int j=0; j<new_cols; j++, col_pos++)
{
double row_fraction = float_part(row_pos);
double col_fraction = float_part(col_pos);
int row_int = (int)floor((double) row_pos);
int col_int = (int)floor((double) col_pos);
result[i][j] =
T((1.0-row_fraction) * (1.0-col_fraction) * (*this)[row_int][col_int] +
(1.0-row_fraction) * ( col_fraction) * (*this)[row_int][col_int+1] +
( row_fraction) * (1.0-col_fraction) * (*this)[row_int+1][col_int] +
( row_fraction) * ( col_fraction) * (*this)[row_int+1][col_int+1]);
}
}
return result;
}
template<class T>
void _DPlane<T>::get_subsample_vector(float start_row, float row_count, int &sz, float *&ptr) const
{
if(floor(start_row + row_count) == floor(start_row)) // start and end in same pixel
{
sz = 1;
ptr = new float[sz];
ptr[0] = row_count;
}
else
{
int i_part = int(start_row);
double f_part = float_part(start_row);
int ii=0;
sz = int(ceil(start_row + row_count) - floor(start_row));
ptr = new float[sz];
if(f_part > 0)
ptr[ii++] = static_cast< float >(1.0 - f_part); // first pixel
for(int i=0; i<int(floor(start_row + row_count) - ceil(start_row)); i++)
ptr[ii++] = 1.0;
i_part = int(start_row + row_count);
f_part = float_part(start_row + row_count);
if(f_part > 0 && ii < sz)
ptr[ii++] = static_cast< float >(f_part); // last pixel
if(ii != sz)
{
cout << start_row << " " << row_count << " " << sz << endl;
cout << "--- " << i_part << " " << f_part << " " << sz << endl;
assert(0);
}
}
}
template<class T>
_DPlane<T> _DPlane<T>::bilinear_rescale(float row_factor, float col_factor) const
{
_DPlane<T> result1(int(this->rows() * row_factor), this->cols());
_DPlane<T> result2(int(this->rows() * row_factor), int(this->cols() * col_factor));
float row_inc = float( 1.0 ) / row_factor;
assert(!_DMatrix<T>::is_any_nan());
if(row_factor == 1.0)
result1 = *this;
else if(row_factor < 1.0) // subsample
{
for(int i=0; i<result1.rows(); i++)
{
int sz;
float *vec;
get_subsample_vector(i * row_inc, row_inc, sz, vec);
int i_beg = int(i * row_inc);
T *result1_cp = result1[i];
for(int j=0; j<result1.cols(); j++)
{
float sum = 0;
float norm=0;
int i_max = i_beg + sz;
for(int ii = i_beg, cp=0; ii < i_max; ii++, cp++)
if(ii < this->rows())
sum += (*this)[ii][j] * vec[cp], norm += vec[cp];
result1_cp[j] = T(sum / norm);
}
delete[] vec;
}
}
else
{
for(int i=0; i<result1.rows(); i++)
{
double new_i = i * row_inc;
int i_part = int(new_i);
double f_part = float_part(new_i);
T *in_ptr_i = (*this)[i_part], *in_ptr_i1 = (*this)[i_part+1], *result1_ptr = result1[i];
for(int j=0; j<result1.cols(); j++)
{
if(i_part+1 < this->rows())
result1_ptr[j] = T(in_ptr_i[j] * (1.0 - f_part) + in_ptr_i1[j] * f_part);
else
result1_ptr[j] = T(in_ptr_i[j]);
}
}
}
assert(!result1.is_any_nan());
float col_inc = float( 1.0 ) / col_factor;
if(col_factor == 1.0)
result2 = result1;
else if(col_factor < 1.0) // subsample
{
for(int j=0; j<result2.cols(); j++)
{
float *vec = NULL;
int sz = 0;
get_subsample_vector(j * col_inc, col_inc, sz, vec);
int j_beg = int(j * col_inc);
for(int i=0; i<result1.rows(); i++)
{
float sum = 0;
float norm = 0;
T *result1_i = result1[i];
for(int jj = j_beg, cp=0; jj < j_beg + sz; jj++, cp++)
if(jj < this->cols())
{
sum += result1_i[jj] * vec[cp], norm += vec[cp];
}
result2[i][j] = T(sum / norm);
}
delete[] vec;
}
}
else
{
for(int i=0; i<result2.rows(); i++)
{
T *result2_ptr = result2[i], *result1_ptr = result1[i];
for(int j=0; j<result2.cols(); j++)
{
double new_j = j * col_inc;
int i_part = int(new_j);
double f_part = float_part(new_j);
if(i_part+1 < this->cols())
result2_ptr[j] = T(result1_ptr[i_part] * (1.0 - f_part) + result1_ptr[i_part+1] * f_part);
else
result2_ptr[j] = T(result1_ptr[i_part]);
}
}
}
assert(!result2.is_any_nan());
return result2;
}
template<class T>
_DPlane<T> _DPlane<T>::rotate_image(double angle, T bg_val) const
{
// cerr << "in rotate image" << endl;
DRect current_size = DRect(DPoint(0,0), _DMatrix<T>::size());
DPoint new_corner1 = current_size.top_left().rotate(static_cast< float >(angle));
DPoint new_corner2 = current_size.top_right().rotate(static_cast< float >(angle));
DPoint new_corner3 = current_size.bottom_left().rotate(static_cast< float >(angle));
DPoint new_corner4 = current_size.bottom_right().rotate(static_cast< float >(angle));
// cout << " -- corners " << new_corner1 << " " << new_corner2 << " " << new_corner3 << " " << new_corner4 << endl;
DPoint new_bottomright = elementwise_max( elementwise_max( new_corner1, new_corner2 ), elementwise_max( new_corner3, new_corner4 ) );
DPoint new_topleft = elementwise_min( elementwise_min( new_corner1, new_corner2 ), elementwise_min( new_corner3, new_corner4 ) );
DPoint new_size = new_bottomright - new_topleft + DPoint(5,5);
// cout << " - sizes " << new_bottomright << " " << new_size << " " << new_topleft << endl;
int new_rows = new_size.row() + 1-(new_size.row() % 2);
int new_cols = new_size.col() + 1-(new_size.col() % 2);
// cerr << new_rows << " " << new_cols << endl;
_DPlane<T> new_I(new_rows, new_cols);
int row_half = (_DMatrix<T>::rows()/2);
int col_half = (_DMatrix<T>::cols()/2);
int new_row_half = (new_rows/2);
int new_col_half = (new_cols/2);
float c = static_cast< float >(cos(angle));
float s = static_cast< float >(sin(angle));
T *cp = new_I[0];
for(int i=0; i<new_rows; ++i)
{
float i2 = (c*(i-new_row_half)+s*(-1-new_col_half)) + row_half;
float j2 = (-s*(i-new_row_half)+c*(-1-new_col_half)) + col_half;
for(int j=0; j<new_cols; ++j, ++cp)
{
i2 += s; j2 += c;
if(i2 >= 0 && j2 >= 0 && i2 < _DMatrix<T>::rows()-1 && j2 < _DMatrix<T>::cols()-1)
{
// compute upper-left pixel
int low_i = int(i2);
int low_j = int(j2);
float i_diff = i2-low_i;
float j_diff = j2-low_j;
T *cp1 = (*this)[low_i];
T *cp2 = (*this)[low_i+1];
*cp = T(((1-i_diff)*(1-j_diff)*cp1[low_j]) +
((1-i_diff)*j_diff*cp1[low_j+1]) +
(i_diff*(1-j_diff)*cp2[low_j]) +
(i_diff * j_diff * cp2[low_j+1]));
}
else
{
*cp = T(bg_val);
}
}
}
// cerr << "returning new I" << endl;
return new_I;
}
template<class T>
_DPlane<T> _DPlane<T>::rotate_image_nn(double angle) const
{
int new_rows = int(ceil(sqrt((double) _DMatrix<T>::rows()*_DMatrix<T>::rows()+_DMatrix<T>::cols()*_DMatrix<T>::cols())));
int new_cols = new_rows;
_DPlane<T> new_I(new_rows, new_cols);
new_I = T(1e100);
int row_half = (_DMatrix<T>::rows()/2);
int col_half = (_DMatrix<T>::cols()/2);
int new_row_half = (new_rows/2);
int new_col_half = (new_cols/2);
float c = static_cast< float >(cos(angle));
float s = static_cast< float >(sin(angle));
T *cp = new_I[0];
for(int i=0; i<new_rows; ++i)
{
float i2 = (c*(i-new_row_half)+s*(-1-new_col_half)) + row_half;
float j2 = (-s*(i-new_row_half)+c*(-1-new_col_half)) + col_half;
for(int j=0; j<new_cols; ++j, ++cp)
{
i2 += s; j2 += c;
if(i2 < 0 || j2 < 0 || i2>=_DMatrix<T>::rows()-1 || j2>=_DMatrix<T>::cols()-1)
continue;
int rnd_i = (int) round(i2);
int rnd_j = (int) round(j2);
*cp = T((*this)[rnd_i][rnd_j]);
}
}
return new_I;
}
template<class T>
_DPlane<T> _DPlane<T>::binary_thin(void) const
{
static bool lookup_table[256];
make_thin_lookup_table(lookup_table);
_DPlane<T> result = (*this);
for(int i=1; i<_DMatrix<T>::rows()-1; i++)
for(int j=1; j<_DMatrix<T>::cols()-1; j++)
{
int val=0, n=1;
for(int i2=-1; i2<=1; i2++)
{
T *result_row_cp = result[i+i2];
for(int j2=-1; j2<=1; j2++)
{
if(i2 == 0 && j2 == 0)
continue;
if(result_row_cp[j+j2])
val += n;
n=n << 1;
}
}
if(!lookup_table[val])
result[i][j] = 0;
}
return result;
}
template<class T>
_DPlane<T> _DPlane<T>::dilate(int square_size) const
{
_DPlane<T> kernel(square_size, square_size);
kernel = 1;
return dilate(kernel);
}
template<class T>
_DPlane<T> _DPlane<T>::dilate(const _DPlane<T> &kernel) const
{
return dilate(kernel, DPoint(kernel.rows()/2, kernel.cols()/2));
}
template<class T>
_DPlane<T> _DPlane<T>::dilate(const _DPlane<T> &kernel, const DPoint &se_origin) const
{
int seorigin = kernel.cols() * se_origin.row() + se_origin.col();
int *se2 = new int[kernel.total_pixel_count()];
T *se = kernel[0];
int numpoints = 0;
for (int i=0; i < kernel.total_pixel_count(); i++)
{
if(se[i])
se2[numpoints++] = ( (i / kernel.cols()) - seorigin / kernel.cols()) * this->cols() +
((i % kernel.cols())-(seorigin % kernel.cols()));
}
_DPlane<T> out(this->rows(), this->cols());
out = 0;
T *outbuf = out[0];
T *sbuf = (*this)[0];
int i_end = this->total_pixel_count() - kernel.rows() * this->cols() + kernel.cols();
for (int i = kernel.rows() * this->cols() + kernel.cols(); i < i_end; i++)
{
if (sbuf[i])
{
outbuf[i] = 1;
for (int j=0; j < numpoints; j++)
outbuf[i+se2[j]] = 1;
}
}
delete[] se2;
return out;
}
#define DECLARE(x) \
template class _DPlane<x>;
DECLARE(double)
DECLARE(short)
DECLARE(int)
DECLARE(float)
DECLARE(char)
DECLARE(unsigned char)
<file_sep>/src/experiment1.sh
#!/bin/bash
#graph type 1
for ratio in `seq 0 9`; do
mkdir -p results/graph1_uniform_ratio0${ratio}
for eid in `seq 1 100`; do
if [ ! -f "results/graph1_uniform_ratio0${ratio}/${eid}" ]; then
make synthetic n=2000 edge_density=0.2 bias=1.0 inc=0.0 \
noise_type=2 noise_ratio=0.${ratio} decay=0.9 \
output=results/graph1_uniform_ratio0${ratio}/${eid}
fi
done
done
#graph type 2
for ratio in `seq 0 9`; do
mkdir -p results/graph2_uniform_ratio0${ratio}
for eid in `seq 1 100`; do
if [ ! -f "results/graph2_uniform_ratio0${ratio}/${eid}" ]; then
make synthetic n=2000 edge_density=0.2 bias=0.05 inc=0.15 \
noise_type=2 noise_ratio=0.${ratio} decay=0.9 \
output=results/graph2_uniform_ratio0${ratio}/${eid}
fi
done
done
#graph type 3
for ratio in `seq 0 9`; do
mkdir -p results/graph3_uniform_ratio0${ratio}
for eid in `seq 1 100`; do
if [ ! -f "results/graph3_uniform_ratio0${ratio}/${eid}" ]; then
make synthetic n=20000 edge_density=0.002 bias=1.0 inc=0.0 \
noise_type=2 noise_ratio=0.${ratio} decay=0.9 \
output=results/graph3_uniform_ratio0${ratio}/${eid}
fi
done
done
#graph type 4
for ratio in `seq 0 9`; do
mkdir -p results/graph4_uniform_ratio0${ratio}
for eid in `seq 1 100`; do
if [ ! -f "results/graph4_uniform_ratio0${ratio}/${eid}" ]; then
make synthetic n=20000 edge_density=0.2 bias=0.005 inc=0.015 \
noise_type=2 noise_ratio=0.${ratio} decay=0.9 \
output=results/graph4_uniform_ratio0${ratio}/${eid}
fi
done
done
<file_sep>/data/disco-bp/DLib/DIntegralImage.cpp
//
// DLib: A simple image processing library.
//
// <NAME>, 2003-2005
// <EMAIL>
//
// Please do not redistribute this code.
//
//
//
//
#include "DIntegralImage.h"
void DIntegralImage::construct_integral_image(DPlane &plane)
{
integral_image = DPlane(plane.rows(), plane.cols());
// horizontal pass
double *in_ptr = plane[0];
double *ii_ptr = integral_image[0];
for(int i=0; i<plane.rows(); i++)
{
double sum_so_far = 0;
for(int j=0; j<plane.cols(); j++, in_ptr++, ii_ptr++)
sum_so_far = *ii_ptr = sum_so_far + *in_ptr;
}
// vertical pass
int col_count = plane.cols();
for(int j=0; j<plane.cols(); j++)
{
ii_ptr = integral_image[0]+j;
double sum_so_far = 0;
for(int i=0; i<plane.rows(); i++, ii_ptr += col_count)
sum_so_far = *ii_ptr += sum_so_far;
}
}
double DIntegralImage::get_sum(DRect &rect)
{
double result;
double top_region, left_region, corner_region;
if(rect.top() == 0)
top_region = 0;
else
top_region = integral_image[rect.top()-1][rect.right()];
if(rect.left() == 0)
left_region = 0;
else
left_region = integral_image[rect.bottom()][rect.left()-1];
if(rect.top() == 0 || rect.left() == 0)
corner_region = 0;
else
corner_region = integral_image[rect.top()-1][rect.left()-1];
result = integral_image[rect.bottom()][rect.right()] +
corner_region - top_region - left_region;
// result = integral_image[rect.bottom_right()] +
// 2.0 * integral_image[rect.top_left()] -
// integral_image[rect.top_right()] -
// integral_image[rect.bottom_left()];
return result;
}
<file_sep>/data/disco-bp/DLib/DPoint.cpp
//
// DLib: A simple image processing library.
//
// <NAME>, 2003-2005
// <EMAIL>
//
// Please do not redistribute this code.
//
//
//
//
#include "DPoint.h"
#include <iostream>
using namespace std;
template<class T>
_DPoint<T> operator+(const _DPoint<T> &p1, const _DPoint<T> &p2)
{
return _DPoint<T>(p1.row() + p2.row(), p1.col() + p2.col());
}
template<class T>
_DPoint<T> operator-(const _DPoint<T> &p1, const _DPoint<T> &p2)
{
return _DPoint<T>(p1.row() - p2.row(), p1.col() - p2.col());
}
template<class T>
_DPoint<T> operator-(const _DPoint<T> &p1, double p2)
{
return _DPoint<T>(T(p1.row() - p2), T(p1.col() - p2));
}
template<class T>
_DPoint<T> operator*(const _DPoint<T> &p1, const _DPoint<T> &p2)
{
return _DPoint<T>(p1.row() * p2.row(), p1.col() * p2.col());
}
template<class T>
_DPoint<T> operator*(const _DPoint<T> &p1, double factor)
{
return _DPoint<T>(T(p1.row() * factor), T(p1.col() * factor));
}
template<class T>
_DPoint<T> operator/(const _DPoint<T> &p1, double p2)
{
return _DPoint<T>(T(p1.row() / p2), T(p1.col() / p2));
}
template<class T>
_DPoint<T> operator/(const _DPoint<T> &p1, const _DPoint<T> &p2)
{
return _DPoint<T>(T(p1.row() / p2.row()), T(p1.col() / p2.col()));
}
template<class T>
bool operator<(const _DPoint<T> &p1, const _DPoint<T> &p2)
{
return p1.row() < p2.row() || (p1.row() == p2.row() && p1.col() < p2.col());
}
template<class T>
bool operator>(const _DPoint<T> &p1, const _DPoint<T> &p2)
{
return p1.row() > p2.row() || (p1.row() == p2.row() && p1.col() > p2.col());
}
template<class T>
bool operator<=(const _DPoint<T> &p1, const _DPoint<T> &p2)
{
return p1 < p2 || p1 == p2;
}
template<class T>
bool operator>=(const _DPoint<T> &p1, const _DPoint<T> &p2)
{
return p1 > p2 || p1 == p2;
}
template<class T>
bool operator==(const _DPoint<T> &p1, const _DPoint<T> &p2)
{
return p1.row() == p2.row() && p1.col() == p2.col();
}
template<class T>
bool operator!=(const _DPoint<T> &p1, const _DPoint<T> &p2)
{
return p1.row() != p2.row() || p1.col() != p2.col();
}
template<class T>
ostream &operator<<(ostream &os, const _DPoint<T> &p)
{
os << "(" << p.row() << ", " << p.col() << ")";
return os;
}
template<class T>
_DPoint<T> sqrt(const _DPoint<T> &p)
{
return _DPoint<T>((T)sqrt((double) p.row()), (T)sqrt((double) p.col()));
}
template<class T>
_DPoint<T> exp(const _DPoint<T> &p)
{
return _DPoint<T>((T)exp((double) p.row()), (T)exp((double) p.col()));
}
template<class T>
_DPoint<T> log(const _DPoint<T> &p)
{
return _DPoint<T>((T)log((double) p.row()), (T)log((double) p.col()));
}
template<class T>
double atan(const _DPoint<T> &p)
{
return 0; //_DPoint<T>((int)atan(p.row()), (int)atan(p.col()));
}
template<class T>
std::istream &operator>>(std::istream &is, _DPoint<T> &p){
char c;
do { is >> c; } while(c == ' ');
assert(c=='(');
is >> p._row;
do { is >> c; } while(c == ' ');
assert(c==',');
is >> p._col;
do { is >> c; } while(c == ' ');
assert(c==')');
return is;
}
#define DECLARE(x) \
template _DPoint<x> operator+(const _DPoint<x> &p1, const _DPoint<x> &p2); \
template _DPoint<x> operator-(const _DPoint<x> &p1, const _DPoint<x> &p2); \
template _DPoint<x> operator-(const _DPoint<x> &p1, double p2); \
template _DPoint<x> operator*(const _DPoint<x> &p1, const _DPoint<x> &p2); \
template _DPoint<x> operator*(const _DPoint<x> &p1, double factor); \
template bool operator<(const _DPoint<x> &p1, const _DPoint<x> &p2); \
template bool operator>(const _DPoint<x> &p1, const _DPoint<x> &p2); \
template bool operator<=(const _DPoint<x> &p1, const _DPoint<x> &p2); \
template bool operator>=(const _DPoint<x> &p1, const _DPoint<x> &p2); \
template bool operator==(const _DPoint<x> &p1, const _DPoint<x> &p2); \
template bool operator!=(const _DPoint<x> &p1, const _DPoint<x> &p2); \
template _DPoint<x> operator/(const _DPoint<x> &p1, double p2); \
template _DPoint<x> operator/(const _DPoint<x> &p1, const _DPoint<x> &p2); \
template _DPoint<x> sqrt(const _DPoint<x> &p); \
template _DPoint<x> log(const _DPoint<x> &p); \
template _DPoint<x> exp(const _DPoint<x> &p); \
template std::ostream &operator<<(std::ostream &os, const _DPoint<x> &p); \
template std::istream &operator>>(std::istream &is, _DPoint<x> &p);
DECLARE(double)
DECLARE(int)
DECLARE(float)
<file_sep>/data/disco-bp/DLib/DGaussianPyramid.cpp
//
// DLib: A simple image processing library.
//
// <NAME>, 2003-2005
// <EMAIL>
//
// Please do not redistribute this code.
//
//
//
//
#include <DGaussianPyramid.h>
DGaussianPyramid::DGaussianPyramid(int _scales, const DPlane &in_plane)
{
plane_data = new DPlane[_scales];
scale_count = _scales;
for(int i=0; i<_scales; i++)
plane_data[i] = DPlane(100,100);
compute_gaussian_pyramid(in_plane);
}
DGaussianPyramid::~DGaussianPyramid()
{
delete[] plane_data;
}
DGaussianPyramid::DGaussianPyramid(const DGaussianPyramid &other)
{
*this = other;
}
DGaussianPyramid &DGaussianPyramid::operator=(const DGaussianPyramid &other)
{
scale_count = other.scales();
plane_data = new DPlane[other.scales()];
for(int i=0; i<scale_count; i++)
plane_data[i] = DPlane(other[i]);
return *this;
}
void DGaussianPyramid::compute_gaussian_pyramid(const DPlane &in_image) const
{
DGaussianKernel g_filter(1.0, 1.0, 7, 7);
// first plane (original image)
plane_data[0] = in_image;
for(int i=1; i<scales(); i++)
{
printf("scale %d\n", i);
DPlane smoothed = plane_data[i-1].cross_correlate(g_filter);
plane_data[i] = smoothed.subsample(2, 2);
}
}
<file_sep>/src/utils.py
import numpy
import os
def process(folder):
min_TL2 = []
median_TL2 = []
max_TL2 = []
min_CD = []
median_CD = []
max_CD = []
tmean_TL2 = []
tmean_CD = []
zp_TL2 = []
zp_CD = []
ratios = range(100)
for ratio in ratios:
ml_TL2 = []
time_TL2 = []
ml_CD = []
time_CD = []
with open(folder+'/ratio'+str(ratio)+'_summary', 'r') as fin:
lines = fin.readlines()
assert len(lines) == 100
for line in lines:
vals = [float(token) for token in line.strip().split(' ')]
ml_TL2.append(vals[0])
time_TL2.append(vals[1])
ml_CD.append(vals[2])
time_CD.append(vals[3])
if len(ml_TL2) > 0 and len(ml_CD) > 0:
min_TL2.append(min(ml_TL2))
median_TL2.append(numpy.median(ml_TL2))
max_TL2.append(max(ml_TL2))
min_CD.append(min(ml_CD))
median_CD.append(numpy.median(ml_CD))
max_CD.append(max(ml_CD))
zp_TL2.append(len([e for e in ml_TL2 if abs(e) < 1e-2])/100.0)
zp_CD.append(len([e for e in ml_CD if abs(e) < 1e-2])/100.0)
tmean_CD.append(numpy.mean(time_CD))
tmean_TL2.append(numpy.mean(time_TL2))
ratios = [1.0-ratio/100.0 for ratio in ratios]
return min_TL2, median_TL2, max_TL2, min_CD, median_CD, max_CD, tmean_TL2, \
tmean_CD, zp_TL2, zp_CD, ratios
def process_in_details(folder):
min_TL2 = []
median_TL2 = []
max_TL2 = []
min_CD = []
median_CD = []
max_CD = []
tmean_TL2 = []
tmean_CD = []
zp_TL2 = []
zp_CD = []
ratios = range(100)
for ratio in ratios:
ml_TL2 = []
time_TL2 = []
ml_CD = []
time_CD = []
for eid in range(100):
name_TL2 = folder+'/ratio'+str(ratio)+'_'+str(eid)+'.TL2'
name_CD = folder+'/ratio'+str(ratio)+'_'+str(eid)+'.CD'
if os.path.isfile(name_TL2) and os.path.isfile(name_CD):
with open(name_TL2) as fin:
lines = fin.readlines()
min_loss=1e100
time=0.0
for line in lines:
ml = 0.0
t = 0.0
for token in line.strip().split(', '):
if token.startswith('min_loss'):
ml = float(token.split('=')[1])
if token.startswith('elapsed'):
t = float(token.split('=')[1])
if min_loss > ml:
min_loss = ml
time = t
if min_loss < 1e-5:
break
ml_TL2.append(min_loss)
time_TL2.append(time)
with open(name_CD) as fin:
lines = fin.readlines()
min_loss=1e100
time=0.0
for line in lines:
ml = 0.0
t = 0.0
for token in line.strip().split(', '):
if token.startswith('min_loss'):
ml = float(token.split('=')[1])
if token.startswith('elapsed'):
t = float(token.split('=')[1])
if min_loss > ml:
min_loss = ml
time = t
if min_loss < 1e-5:
break
ml_CD.append(min_loss)
time_CD.append(time)
if len(ml_TL2) > 0 and len(ml_CD) > 0:
min_TL2.append(min(ml_TL2))
median_TL2.append(numpy.median(ml_TL2))
max_TL2.append(max(ml_TL2))
min_CD.append(min(ml_CD))
median_CD.append(numpy.median(ml_CD))
max_CD.append(max(ml_CD))
zp_TL2.append(len([e for e in ml_TL2 if abs(e) < 1e-2])/100.0)
zp_CD.append(len([e for e in ml_CD if abs(e) < 1e-2])/100.0)
tmean_CD.append(numpy.mean(time_CD))
tmean_TL2.append(numpy.mean(time_TL2))
ratios = [1.0-ratio/100.0 for ratio in ratios]
return min_TL2, median_TL2, max_TL2, min_CD, median_CD, max_CD, tmean_TL2, \
tmean_CD, zp_TL2, zp_CD, ratios
<file_sep>/data/disco-bp/DLib/DMatrix.cpp
//
// DLib: A simple image processing library.
//
// <NAME>, 2003-2005
// <EMAIL>
//
// Please do not redistribute this code.
//
//
//
//
#include "DMatrix.h"
#include <string>
#include <vector>
#include <complex>
#include <fstream>
using namespace std;
template<class T>
void _DTwoDimArray<T>::deallocate_storage()
{
if(data)
{
delete[] data;
delete[] data_area;
data = 0;
data_area = 0;
}
}
template<class T>
void _DTwoDimArray<T>::initialize_storage()
{
// profiler->begin(2);
if(data)
deallocate_storage();
if(_rows > 0)
{
data = new T *[_rows];
data_area = new T[_rows * _cols];
}
else
{
data = 0;
data_area = 0;
}
T *cp = data_area;
for(int i=0; i<_rows; i++, cp+=_cols)
data[i] = cp;
// profiler->end(2);
}
template<class T>
_DTwoDimArray<T>::_DTwoDimArray()
{
data = 0;
data_area = 0;
_rows = _cols = 0;
}
template<class T>
_DTwoDimArray<T>::_DTwoDimArray(int __rows, int __cols)
{
_rows = __rows;
_cols = __cols;
data = 0;
data_area = 0;
initialize_storage();
}
template<class T>
_DTwoDimArray<T>::_DTwoDimArray(int __rows, int __cols, const T *array)
{
_rows = __rows;
_cols = __cols;
data = 0;
data_area = 0;
initialize_storage();
memcpy(data_area, array, _rows * _cols * sizeof(T));
}
template<class T>
_DMatrix<T>::_DMatrix(const std::vector<T> &vec)
{
_rows = 1;
_cols = (int) vec.size();
data = 0;
data_area = 0;
initialize_storage();
set_row(0, vec);
}
template<class T>
_DMatrix<T>::_DMatrix(int __rows, int __cols, T val)
{
_rows = __rows;
_cols = __cols;
data = 0;
data_area = 0;
initialize_storage();
(*this) = val;
}
template<class T>
_DMatrix<T>::_DMatrix(int __rows, int __cols, matrix_init_type type)
{
_rows = __rows;
_cols = __cols;
data = 0;
data_area = 0;
initialize_storage();
if(type == random)
{
int sz = _rows * _cols;
for(int i=0; i<sz; i++)
data_area[i] = T(drand48());
}
else if(type == identity)
{
for(int i=0; i<_rows; i++)
for(int j=0; j<_cols; j++)
if(i == j)
(*this)[i][j] = 1;
else
(*this)[i][j] = 0;
}
else if(type == zeros)
{
(*this) = 0;
}
else if(type == ones)
{
(*this) = 1;
}
else
throw(std::string("unknown matrix initializer type"));
}
template<class T>
bool same_size(const _DMatrix<T> &m1, const _DMatrix<T> &m2)
{
return(m1.rows() == m2.rows() && m1.cols() == m2.cols());
}
template<class T>
_DMatrix<T> _DMatrix<T>::operator+(const _DMatrix<T> &other) const
{
_DMatrix<T> result(_rows, _cols);
if(!same_size(*this, other))
throw string("Size mismatch in DMatrix operator +");
T *cp1 = data_area, *cp2 = other.data_area, *cp_out = result.data_area;
for(int i=0; i<_rows; i++)
for(int j=0; j<_cols; j++, cp1++, cp2++, cp_out++)
*cp_out = *cp1 + * cp2;
return result;
}
template<class T>
_DMatrix<T> _DMatrix<T>::operator+(T value) const
{
_DMatrix<T> result(_rows, _cols);
T *cp1 = data_area, *cp_out = result.data_area;
for(int i=0; i<_rows; i++)
for(int j=0; j<_cols; j++, cp1++, cp_out++)
*cp_out = *cp1 + value;
return result;
}
template<class T>
_DMatrix<T> _DMatrix<T>::operator-(const _DMatrix<T> &other) const
{
_DMatrix<T> result(_rows, _cols);
if(!same_size(*this, other))
throw string("Size mismatch in DMatrix operator -");
T *cp1 = data_area, *cp2 = other.data_area, *cp_out = result.data_area;
for(int i=0; i<_rows; i++)
for(int j=0; j<_cols; ++j, ++cp1, ++cp2, ++cp_out)
*cp_out = *cp1 - * cp2;
return result;
}
template<class T>
_DMatrix<T> _DMatrix<T>::operator-(T value) const
{
_DMatrix<T> result(_rows, _cols);
T *cp1 = data_area, *cp_out = result.data_area;
for(int i=0; i<_rows; i++)
for(int j=0; j<_cols; ++j, ++cp1, ++cp_out)
*cp_out = *cp1 - value;
return result;
}
template<class T>
_DMatrix<T> operator-(const _DMatrix<T> &m)
{
_DMatrix<T> result(m.rows(), m.cols());
T *cp_in = m.data_ptr(), *cp_out = result.data_ptr();
for(int i=0; i<m.rows(); ++i)
for(int j=0; j<m.cols(); ++j, ++cp_in, ++cp_out)
*cp_out = -*cp_in;
return result;
}
template<class T>
_DMatrix<T> _DMatrix<T>::operator*(const _DMatrix<T> &other) const
{
if(_cols != other._rows)
throw string("Size mismatch in DMatrix operator *");
_DMatrix<T> result(_rows, other._cols);
for(int i=0; i<_rows; i++)
{
T *data_i = data[i];
for(int j=0; j<other._cols; j++)
{
T res=0;
for(int k=0; k<other._rows; k++)
res += data_i[k] * other.data[k][j];
result[i][j] = res;
}
}
return result;
}
template<class T>
_DMatrix<T> _DMatrix<T>::operator*(T value) const
{
_DMatrix<T> result(_rows, _cols);
T *cp1 = data_area, *cp_out = result.data_area;
for(int i=0; i<_rows; i++)
for(int j=0; j<_cols; j++, cp1++, cp_out++)
*cp_out = T(*cp1 * value);
return result;
}
template<class T>
_DMatrix<T> _DMatrix<T>::operator/(double value) const
{
_DMatrix<T> result(_rows, _cols);
T *cp1 = data_area, *cp_out = result.data_area;
for(int i=0; i<_rows; i++)
for(int j=0; j<_cols; j++, cp1++, cp_out++)
*cp_out = T(*cp1 / value);
return result;
}
template<class T>
_DMatrix<T> operator*(T value, const _DMatrix<T> &other)
{
return(other * value);
}
template<class T>
void _DMatrix<T>::operator+=(const _DMatrix<T> &other)
{
assert(same_size(other, *this));
T *in = other.data_ptr();
T *out = data_ptr();
int count = rows() * cols();
for(int i=0; i<count; i++)
out[i] += in[i];
}
template<class T>
void _DMatrix<T>::operator-=(const _DMatrix<T> &other)
{
assert(same_size(other, *this));
T *in = other.data_ptr();
T *out = data_ptr();
int count = rows() * cols();
for(int i=0; i<count; i++)
out[i] -= in[i];
}
template<class T>
void _DMatrix<T>::operator+=(T value)
{
T *out = data_ptr();
int count = rows() * cols();
for(int i=0; i<count; i++)
out[i] += value;
}
template<class T>
void _DMatrix<T>::operator-=(T value)
{
T *out = data_ptr();
int count = rows() * cols();
for(int i=0; i<count; i++)
out[i] -= value;
}
template<class T>
void _DMatrix<T>::operator*=(double value)
{
T *out = data_ptr();
int count = rows() * cols();
for(int i=0; i<count; i++)
out[i] = T(out[i] * value);
}
template<class T>
void _DMatrix<T>::operator/=(T value)
{
T *out = data_ptr();
int count = rows() * cols();
for(int i=0; i<count; i++)
out[i] /= value;
}
template<class T>
T& _DMatrix<T>::operator[](const DPoint &point) const
{
return data[point.row()][point.col()];
}
template<class T>
_DTwoDimArray<T>::~_DTwoDimArray()
{
deallocate_storage();
}
template<class T>
_DTwoDimArray<T> &_DTwoDimArray<T>::operator=(const _DTwoDimArray<T> &other)
{
if(this == &other)
return *this;
// profiler->begin(4);
if(!data || _rows != other.rows() || _cols != other.cols())
{
_rows = other.rows();
_cols = other.cols();
initialize_storage();
}
memcpy(data_area, other.data_area, _rows * _cols * sizeof(T));
// profiler->end(4);
return *this;
}
template<class T>
_DMatrix<T> &_DMatrix<T>::operator=(T other)
{
T *cp = data_ptr();
int sz = rows() * cols();
for(int i=0; i<sz; i++)
*(cp++) = other;
return *this;
}
template<class T>
_DTwoDimArray<T>::_DTwoDimArray(const _DTwoDimArray<T> &other)
{
assert(this != &other);
data = 0;
data_area = 0;
*this = other;
}
template<class T>
_DMatrix<T> _DMatrix<T>::transpose() const
{
_DMatrix<T> result(_cols, _rows);
T *in_cp = data_ptr();
for(int i=0; i<_rows; i++)
{
// T *out_cp = result.data[0]+i;
//
for(int j=0; j<_cols; j++) //, out_cp += _rows)
result[j][i] = *(in_cp++);
}
return result;
}
template<class T>
_DMatrix<T> _DMatrix<T>::rotate_180() const
{
_DMatrix<T> result(_rows, _cols);
int sz = _rows * _cols;
const T *in_cp = data_ptr() + sz - 1;
T *out_cp = result.data_ptr();
for(int i=0; i<sz; i++)
*(out_cp++) = *(in_cp--);
return result;
}
template<class T>
_DMatrix<T> _DMatrix<T>::flip_horizontal() const
{
_DMatrix<T> result(_rows, _cols);
T *out_cp = result.data_ptr();
for(int i=0; i<_rows; i++)
{
const T *in_cp = data[i] + _cols - 1;
for(int j=0; j<_cols; j++)
*(out_cp++) = *(in_cp--);
}
return result;
}
template<class T>
_DMatrix<T> _DMatrix<T>::flip_vertical() const
{
_DMatrix<T> result(_rows, _cols);
T *out_cp = result.data_ptr();
for(int i=0; i<_rows; i++)
{
const T *in_cp = data[_rows-i-1];
for(int j=0; j<_cols; j++)
*(out_cp++) = *(in_cp++);
}
return result;
}
template<class T>
_DMatrix<T> operator+(T value, const _DMatrix<T> &other)
{
return(other + value);
}
template<class T>
_DMatrix<T> operator-(T value, const _DMatrix<T> &other)
{
_DMatrix<T> result(other.rows(), other.cols());
T *cp1 = other.data_area, *cp_out = result.data_area;
for(int i=0; i<other.rows(); i++)
for(int j=0; j<other.cols(); j++, cp1++, cp_out++)
*cp_out = -*cp1 + value;
return result;
}
template<class T>
istream &operator>>(istream &is, _DMatrix<T> &matrix)
{
int _rows=0, _cols=0;
is >> _rows >> _cols;
matrix = _DMatrix<T>(_rows, _cols);
for(int i=0; i<matrix.rows(); i++)
for(int j=0; j<matrix.cols(); j++)
is >> matrix[i][j];
return is;
}
template<class T>
ostream &operator<<(ostream &os, const _DMatrix<T> &matrix)
{
ofstream os2;
os2.copyfmt(os);
// ios::fmtflags fflags = os.flags();
os << matrix.rows() << " " << matrix.cols() << endl;
for(int i=0; i<matrix.rows(); i++)
{
for(int j=0; j<matrix.cols(); j++)
{
os.copyfmt(os2);
os << matrix[i][j] << " ";
}
os << endl;
}
return os;
}
template<class T2>
void fread(_DMatrix<T2> &matrix, FILE *fp, bool enforce_size)
{
int rows, cols, type;
fread(&rows, 1, sizeof(int), fp);
fread(&cols, 1, sizeof(int), fp);
fread(&type, 1, sizeof(int), fp);
assert(type == sizeof(T2));
if(enforce_size && (matrix.rows() != rows || matrix.cols() != cols))
throw(std::string("unexpected matrix size encountered during fread"));
if(!enforce_size)
matrix = _DMatrix<T2>(rows, cols);
fread(matrix.data_area, rows*cols, sizeof(T2), fp);
}
template<class T2>
void fread(_DMatrix<T2> &matrix, FILE *fp)
{
return fread(matrix, fp, false);
}
template<class T2>
void fwrite(const _DMatrix<T2> &matrix, FILE *fp)
{
int rows = matrix.rows(), cols = matrix.cols(), type = sizeof(T2);
fwrite(&rows, 1, sizeof(int), fp);
fwrite(&cols, 1, sizeof(int), fp);
fwrite(&type, 1, sizeof(int), fp);
fwrite(matrix.data_area, rows*cols, sizeof(T2), fp);
}
template<class T>
_DMatrix<T> _DMatrix<T>::LU_factor()
{
_DMatrix<T> result((*this));
for(int j=0; j<cols(); j++)
for(int i=j+1; i<rows(); i++)
{
T alpha = result[j][j] / result[j][i];
_DMatrix<T> this_row = extract_row(i);
result.set_row(i, this_row-this_row*alpha);
}
return result;
}
template<class T>
_DMatrix<T> _DMatrix<T>::extract_row(int row) const
{
return extract(DRect(row, 0, row, cols()-1));
}
template<class T>
_DMatrix<T> _DMatrix<T>::extract_col(int col) const
{
return extract(DRect(0, col, rows()-1, col));
}
template<class T>
_DMatrix<T> _DMatrix<T>::extract_rows(const std::vector<int> &row_list) const
{
_DMatrix<T> result((int) row_list.size(), cols());
vector<int>::const_iterator iter;
int out_i=0;
for(iter = row_list.begin(); iter != row_list.end(); ++iter, ++out_i)
{
result.copy_row_from(*this, *iter, out_i);
}
return result;
}
template<class T>
_DMatrix<T> _DMatrix<T>::extract_cols(const std::vector<int> &col_list) const
{
_DMatrix<T> result(rows(),(int) col_list.size());
vector<int>::const_iterator iter;
int out_j=0;
for(iter = col_list.begin(); iter != col_list.end(); ++iter, ++out_j)
{
result.copy_col_from(*this, *iter, out_j);
}
return result;
}
template<class T>
_DMatrix<T> _DMatrix<T>::extract(const DRect &rect) const
{
if(rect.top() < 0 || rect.left() < 0 || rect.right() >= cols() || rect.bottom() >= rows())
{
ostringstream oss;
oss << "in _DMatrix::extract() : couldn't extract " << rect << " from " << DRect(DPoint(0,0), size()) << endl;
throw oss.str();
}
_DMatrix<T> result(rect.height(), rect.width());
if(result.rows() == 0 || result.cols() == 0)
return result;
T *cp = result.data_ptr();
int rect_bottom = rect.bottom(), rect_right = rect.right();
for(int i=rect.top(); i <= rect_bottom; i++)
{
const T *in_cp = (*this)[i];
for(int j=rect.left(); j <= rect_right; j++, cp++)
*cp = in_cp[j];
}
return result;
}
template<class T>
void _DMatrix<T>::set_submatrix(const DPoint &pt, const _DMatrix<T> &in)
{
if(!(pt.row() + in.rows() <= rows() && pt.row() >= 0) ||
!(pt.col() + in.cols() <= cols() && pt.col() >= 0))
{
ostringstream oss;
oss << "in _DMatrix::set_submatrix() : couldn't do set_submatrix at " << pt << " on matrix of size " << size() << " with matrix of size " << in.size() << endl;
throw oss.str();
}
const T *in_ptr = in.data_ptr();
for(int i=0; i<in.rows(); i++)
{
T *out_ptr = (*this)[i+pt.row()] + pt.col();
for(int j=0; j<in.cols(); j++)
out_ptr[j]=*(in_ptr++);
}
}
template<class T>
void _DMatrix<T>::set_submatrix(const DRect &rect, T val)
{
assert(rect.top() >= 0 && rect.left() >= 0 && rect.bottom() < rows() && rect.right() < cols());
for(int i=rect.top(); i <= rect.bottom(); i++)
{
T *out_ptr = (*this)[i];
for(int j=rect.left(); j <= rect.right(); j++)
out_ptr[j] = val;
}
}
template<class T>
void _DMatrix<T>::set_row(int row, const _DMatrix<T> &in)
{
assert(in.cols() == cols());
assert(row >= 0 && row < rows());
copy_row_from(in, 0, row);
}
template<class T>
void _DMatrix<T>::set_row(int row, const std::vector<T> &in)
{
assert(int(in.size()) == cols());
assert(row >= 0 && row < rows());
T *out_cp = (*this)[row];
typename std::vector<T>::const_iterator iter;
for(iter = in.begin(); iter != in.end(); ++iter, ++out_cp)
*out_cp = *iter;
}
template<class T>
void _DMatrix<T>::set_col(int col, const _DMatrix<T> &in)
{
assert(in.rows() == rows());
assert(col >= 0 && col < cols());
copy_col_from(in, 0, col);
}
template<class T>
_DMatrix<T> _DMatrix<T>::reshape(int new_rows, int new_cols)
{
assert(new_rows * new_cols == rows() * cols());
_DMatrix<T> result(new_rows, new_cols);
T *in_cp = data_ptr();
T *out_cp = result.data_ptr();
for(int i=0; i<new_rows*new_cols; i++, out_cp++, in_cp++)
*out_cp = *in_cp;
return result;
}
template<class T>
_DMatrix<T> sqrt(const _DMatrix<T> &m1)
{
_DMatrix<T> result(m1.rows(), m1.cols());
T *in_cp = m1.data_ptr(), *out_cp = result.data_ptr();
for(int i=0; i<m1.rows(); i++)
for(int j=0; j<m1.cols(); j++, in_cp++, out_cp++)
*out_cp = T(sqrt((double) *in_cp));
return result;
}
template<class T>
_DMatrix<T> sqr(const _DMatrix<T> &m1)
{
_DMatrix<T> result(m1.rows(), m1.cols());
T *in_cp = m1.data_ptr(), *out_cp = result.data_ptr();
for(int i=0; i<m1.rows(); i++)
for(int j=0; j<m1.cols(); j++, in_cp++, out_cp++)
*out_cp = (*in_cp) * (*in_cp);
return result;
}
template<class T>
void _DMatrix<T>::sqr_ip()
{
T *in_cp = data_ptr();
for(int i=0; i<rows(); i++)
for(int j=0; j<cols(); j++, in_cp++)
*in_cp = (*in_cp) * (*in_cp);
return;
}
template<class T>
_DMatrix<T> pointwise_multiply(const _DMatrix<T> &m1, const _DMatrix<T> &m2)
{
assert(same_size(m1, m2));
_DMatrix<T> result(m1.rows(), m2.cols());
const T *cp1 = m1.data_ptr(), *cp2 = m2.data_ptr();
T *out_cp = result.data_ptr();
int count = m1.rows() * m1.cols();
for(int i=0; i<count; i++)
out_cp[i] = cp1[i] * cp2[i];
return result;
}
template<class T>
_DMatrix<T> pointwise_divide(const _DMatrix<T> &m1, const _DMatrix<T> &m2)
{
assert(same_size(m1, m2));
_DMatrix<T> result(m1.rows(), m2.cols());
for(int i=0; i<m1.rows(); i++)
for(int j=0; j<m1.cols(); j++)
result[i][j] = m1[i][j] / m2[i][j];
return result;
}
template<class T>
_DMatrix<T> pointwise_min(const _DMatrix<T> &m1, const _DMatrix<T> &m2)
{
assert(same_size(m1, m2));
_DMatrix<T> result(m1.rows(), m2.cols());
T *cp1 = m1.data_ptr(), *cp2 = m2.data_ptr(), *cpr = result.data_ptr();
for(int i=0; i<m1.rows(); i++)
for(int j=0; j<m1.cols(); j++, cp1++, cp2++, cpr++)
if(*cp1 < *cp2)
*cpr = *cp1;
else
*cpr = *cp2;
return result;
}
template<class T>
_DMatrix<T> pointwise_max(const _DMatrix<T> &m1, const _DMatrix<T> &m2)
{
assert(same_size(m1, m2));
_DMatrix<T> result(m1.rows(), m2.cols());
T *cp1 = m1.data_ptr(), *cp2 = m2.data_ptr(), *cpr = result.data_ptr();
for(int i=0; i<m1.rows(); i++)
for(int j=0; j<m1.cols(); j++, cp1++, cp2++, cpr++)
if(*cp1 > *cp2)
*cpr = *cp1;
else
*cpr = *cp2;
return result;
}
template<class T>
_DMatrix<T> pointwise_min(const _DMatrix<T> &m1, T val)
{
_DMatrix<T> result(m1);
T *cpr = result.data_ptr();
int sz = m1.rows() * m1.cols();
for(int i=0; i<sz; i++)
if(val < cpr[i])
cpr[i] = val;
return result;
}
template<class T>
_DMatrix<T> pointwise_max(const _DMatrix<T> &m1, T val)
{
_DMatrix<T> result(m1);
T *cpr = result.data_ptr();
int sz = m1.rows() * m1.cols();
for(int i=0; i<sz; i++)
if(val > cpr[i])
cpr[i] = val;
return result;
}
template<class T>
_DMatrix<T> fabs(const _DMatrix<T> &m)
{
_DMatrix<T> result(m.rows(), m.cols());
const T *in = m.data_ptr();
T *out = result.data_ptr();
int sz = m.rows() * m.cols();
for(int i=0; i<sz; i++)
out[i] = T(fabs(in[i]));
return result;
}
template<class T>
_DMatrix<T> log(const _DMatrix<T> &m)
{
_DMatrix<T> result(m.rows(), m.cols());
const T *in = m.data_ptr();
T *out = result.data_ptr();
int sz = m.rows() * m.cols();
for(int i=0; i<sz; i++)
out[i] = T(log((double) in[i]));
return result;
}
template<class T>
_DMatrix<T> exp(const _DMatrix<T> &m, bool fast)
{
_DMatrix<T> result(m.rows(), m.cols());
const T *in = m.data_ptr();
T *out = result.data_ptr();
int sz = m.rows() * m.cols();
if(!fast)
for(int i=0; i<sz; i++)
out[i] = T(exp((double) in[i]));
else
for(int i=0; i<sz; i++)
out[i] = T(fast_exp(in[i]));
return result;
}
template<class T>
_DMatrix<T> exp(const _DMatrix<T> &m)
{
return exp(m, false);
}
// compute mean of all entries in matrix
template<class T>
T _DMatrix<T>::mean() const
{
return sum() / (rows() * cols());
}
// compute sum of all entries in matrix
template<class T>
T _DMatrix<T>::sum() const
{
T sum = 0;
const T *in_ptr = data_ptr();
int sz = rows() * cols();
for(int i=0; i<sz; i++)
sum += in_ptr[i];
return sum;
}
// compute sum of all entries in each matrix row
template<class T>
_DMatrix<T> _DMatrix<T>::sum_rows() const
{
_DMatrix<T> result(rows(), 1);
const T *in_ptr = data_ptr();
for(int i=0; i<rows(); i++)
{
T row_sum = 0;
for(int j=0; j<cols(); j++, in_ptr++)
row_sum += *in_ptr;
result[i][0] = row_sum;
}
return result;
}
// compute sum of all entries in each matrix column
template<class T>
_DMatrix<T> _DMatrix<T>::sum_cols() const
{
_DMatrix<T> result(1, cols());
result = 0;
T *res_ptr = result[0];
for(int i=0; i<rows(); i++)
{
const T *row_ptr = (*this)[i];
for(int j=0; j<cols(); j++)
res_ptr[j] += row_ptr[j];
}
return result;
}
// compute median of all entries in matrix
template<class T>
T _DMatrix<T>::median() const
{
vector<T> vect;
for(int i=0; i<rows(); i++)
for(int j=0; j<cols(); j++)
vect.push_back((*this)[i][j]);
sort(vect.begin(), vect.end());
return vect[vect.size()/2];
}
template<class T>
_DMatrix<T> _DMatrix<T>::operator==(T value)
{
_DMatrix<T> result(rows(), cols());
int sz = rows() * cols();
const T *in_ptr = data_ptr();
T *out_ptr = result.data_ptr();
for(int i=0; i<sz; i++)
out_ptr[i] = (in_ptr[i] == value);
return result;
}
template<class T>
_DMatrix<T> _DMatrix<T>::operator<(T value)
{
_DMatrix<T> result(rows(), cols());
for(int i=0; i<rows(); i++)
for(int j=0; j<cols(); j++)
result[i][j] = (*this)[i][j] < value;
return result;
}
template<class T>
_DMatrix<T> _DMatrix<T>::operator>(T value)
{
_DMatrix<T> result(rows(), cols());
for(int i=0; i<rows(); i++)
for(int j=0; j<cols(); j++)
result[i][j] = (*this)[i][j] > value;
return result;
}
// computes the mean of *each column* of the input matrix.
//
template<class T>
_DMatrix<T> _DMatrix<T>::means() const
{
_DMatrix<T> result(1, cols());
T *res_ptr = result.data_ptr();
const T *in_ptr = data_ptr();
for(int j=0; j<cols(); j++)
{
const T *in_cp = in_ptr+j;
T mean=0;
for(int i=0; i<rows(); i++, in_cp += cols())
mean += *(in_cp);
res_ptr[j] = mean / (rows());
}
return result;
}
template<class T>
_DMatrix<T> _DMatrix<T>::trimmed_means(int leaveout_count) const
{
_DMatrix<T> sorted = (*this);
sorted.sort_cols();
assert(sorted.rows() > leaveout_count * 2);
return (sorted.extract(DRect(leaveout_count, 0, rows()-1-leaveout_count, cols()-1))).means();
}
template<class T>
void _DMatrix<T>::sort_cols()
{
assert(0);
// (*this) = transpose();
// sort_rows();
// (*this) = transpose();
}
template<class T>
int compar_sortrows(const void *t1, const void *t2)
{
T i1 = *((T *) t1), i2 = *((T *) t2);
if(i1 > i2)
return 1;
else if(i1 < i2)
return -1;
else
return 0;
}
// FIXME: get rid of global variable
int __sort_colcount = 0;
template<class T>
int compar_sortwholerows(const void *t1, const void *t2)
{
T *i1 = (T *) t1, *i2 = (T *) t2;
for(int i=0; i<__sort_colcount; i++)
if(i1[i] < i2[i])
return -1;
else if(i1[i] > i2[i])
return 1;
return 0;
}
template<class T>
void _DMatrix<T>::sort_rows()
{
for(int i=0; i<rows(); i++)
qsort((*this)[i], cols(), sizeof(T), compar_sortrows<T>);
}
template<class T>
void _DMatrix<T>::sort_wholerows()
{
__sort_colcount = cols();
qsort(data_ptr(), rows(), sizeof(T) * cols(), compar_sortwholerows<T>);
}
template<class T>
_DMatrix<T> _DMatrix<T>::unique_rows(std::vector<int> &row_counts) const
{
row_counts.clear();
_DMatrix<T> tmp(*this);
tmp.sort_wholerows();
int cur_count = 1;
for(int i=1; i<rows(); i++)
if(compar_sortwholerows<T>((void *)((*this)[i-1]), (void *)((*this)[i])) != 0)
{
row_counts.push_back(cur_count);
cur_count = 1;
}
else
cur_count++;
row_counts.push_back(cur_count);
_DMatrix<T> result((int) row_counts.size(), cols());
int cum_row = 0;
for(int i=0; i<(int)row_counts.size(); i++)
{
result.copy_row_from((*this), cum_row, i);
cum_row += row_counts[i];
}
return result;
}
template<class T>
T _DMatrix<T>::max(int &out_row, int &out_col) const
{
const T *in_ptr = data_ptr();
int max_i = 0;
T max_element = in_ptr[0];
int sz = rows() * cols();
for(int i=0; i < sz; i++)
if(max_element < in_ptr[i])
{
max_element = in_ptr[i];
max_i = i;
}
out_row = max_i / cols(), out_col = max_i % cols();
return max_element;
}
template<class T>
T _DMatrix<T>::min(int &out_row, int &out_col) const
{
const T *cp = data_ptr();
out_row = out_col = 0;
T min_element = *cp;
for(int i=0; i<rows(); i++)
for(int j=0; j<cols(); j++, cp++)
if(min_element > *cp)
{
min_element = *cp;
out_row = i, out_col = j;
}
return min_element;
}
// Returns the same value as max(). If there is a unique global maximum,
// also returns the same point as max(); otherwise returns a randomly-chosen
// maximum.
template<class T>
T _DMatrix<T>::max_random(DPoint &out_location) const
{
const T *cp = data_ptr();
int out_row = 0, out_col = 0;
T max_element = *cp;
int max_count = 0;
for(int i=0; i<rows(); i++)
for(int j=0; j<cols(); j++, cp++)
if(max_element < *cp)
max_element = *cp, out_row = i, out_col = j, max_count = 1;
else if(max_element == *cp)
{
max_count++;
// now possibly switch max_element to the new maximum,
// with probability 1/max_count. This is the same as choosing among
// the maximums with a uniform distribution.
if(drand48() * max_count < 1.0)
out_row = i, out_col = j;
}
out_location = DPoint(out_row, out_col);
return max_element;
}
template<class T>
bool _DMatrix<T>::is_any_nan() const
{
int sz = rows() * cols();
for(int i=0; i<sz; i++)
if(isnan(data_area[i]))
return true;
return false;
}
template<class T>
bool _DMatrix<T>::is_any_inf() const
{
int sz = rows() * cols();
for(int i=0; i<sz; i++)
if(isinf(data_area[i]))
return true;
return false;
}
// assumes each row is an observation, each column is
// a feature.
template<class T>
_DMatrix<T> _DMatrix<T>::covariance() const
{
_DMatrix<T> mean_vector = means();
return covariance(mean_vector);
}
template<class T>
_DMatrix<T> _DMatrix<T>::covariance(const _DMatrix<T> &mean_vector) const
{
_DMatrix<T> cov(cols(), cols());
const T *mean_vec = mean_vector.data_ptr();
// handle special case of cols=2 separately, for speed
if(cols() == 2)
{
T var00=0, var01=0, var11=0;
const T *row_cp = data_ptr();
for(int k=0; k<rows(); k++, row_cp += cols())
{
T a0 = (row_cp[0] - mean_vec[0]);
T a1 = (row_cp[1] - mean_vec[1]);
var00 += a0 * a0;
var11 += a1 * a1;
var01 += a0 * a1;
}
cov[0][0] = var00 / (rows()-1);
cov[1][1] = var11 / (rows()-1);
cov[0][1] = cov[1][0] = var01 / (rows()-1);
}
else
{
for(int i=0; i<cols(); i++)
for(int j=0; j<cols(); j++)
{
T var = 0;
const T *row_cp = data_ptr();
for(int k=0; k<rows(); k++, row_cp += cols())
var += (row_cp[i]-mean_vec[i]) * (row_cp[j]-mean_vec[j]); // was mean_vec[i]
cov[i][j] = var / (rows()-1);
}
}
return cov;
}
template<class T>
void _DMatrix<T>::swap_rows(int row1, int row2)
{
// ideally, we'd just have to swap the pointers, except that
// some routines assume that the matrix is arranged
// contiguously in row-major order. So we have to move
// memory around also (unfortunately).
if(row1 == row2)
return;
_DMatrix<T> temp(1, cols());
temp.copy_row_from(*this, row1, 0);
copy_row_from(*this, row2, row1);
copy_row_from(temp, 0, row2);
}
template<class T>
void _DMatrix<T>::copy_row_from(const _DMatrix<T> &other, int src_row, int dest_row)
{
assert(other.cols() == cols());
assert(src_row >= 0 && src_row < other.rows());
assert(dest_row >= 0 && dest_row < rows());
memcpy((*this)[dest_row], other[src_row], sizeof(T) * other.cols());
}
template<class T>
void _DMatrix<T>::copy_row_from(const DPoint &other, int dest_row)
{
assert(cols() == 2);
(*this)[dest_row][0] = other.row();
(*this)[dest_row][1] = other.col();
}
template<class T>
void _DMatrix<T>::copy_col_from(const _DMatrix<T> &other, int src_col, int dest_col)
{
assert(other.rows() == rows());
assert(src_col >= 0 && src_col < other.cols());
assert(dest_col >= 0 && dest_col < cols());
T *in_ptr = &(other.data_ptr()[src_col]);
T *out_ptr = &(data_ptr()[dest_col]);
for(int i=0; i<rows(); i++, in_ptr += other.cols(), out_ptr += cols())
*out_ptr = *in_ptr;
}
template<class T>
vector<DPoint> _DMatrix<T>::sample_probabilities(int sample_count) const
{
int s=0;
vector<T> samples(sample_count);
vector<DPoint> result(sample_count);
T _sum = sum();
for(s=0; s<sample_count; s++)
samples[s] = T(drand48()) * _sum;
sort(samples.begin(), samples.end());
_sum = 0;
s=0;
T *prob_map_cp = data_ptr();
T this_sample = samples[s];
for(int i=0, cp=0; i<rows(); i++)
for(int j=0; j<cols(); j++, cp++)
{
_sum += prob_map_cp[cp];
while(_sum > this_sample)
{
result[s].row(i), result[s].col(j);
s++;
this_sample = samples[s];
if(s >= sample_count)
goto done;
}
}
done:
static int count = 0;
count++;
return result;
}
template<class T>
vector<DPoint> _DMatrix<T>::sample_likelihoods(int sample_count, bool fast) const
{
_DMatrix<T> prob_map = exp((*this) - max(), fast);
// prob_map = pointwise_max<T>(prob_map, T(1e-20));
return prob_map.sample_probabilities(sample_count);
}
#ifdef GSL_SUPPORT
template<class T>
gsl_matrix *DMatrix_to_gsl(const _DMatrix<T> &dm)
{
gsl_matrix *gm = gsl_matrix_alloc(dm.rows(), dm.cols());
T *cp = dm.data_ptr();
for(int i=0; i<dm.rows(); i++)
for(int j=0; j<dm.cols(); j++, cp++)
gsl_matrix_set(gm, i, j, *cp);
return gm;
}
template<class T>
_DMatrix<T> gsl_to_DMatrix(gsl_matrix *gm)
{
_DMatrix<T> dm((int) gm->size1, (int) gm->size2);
T *cp = dm.data_ptr();
for(int i=0; i<dm.rows(); i++)
for(int j=0; j<dm.cols(); j++, cp++)
*cp = T(gsl_matrix_get(gm, i, j));
return dm;
}
template<class T>
_DMatrix<T> gsl_to_DMatrix(gsl_vector *gm)
{
_DMatrix<T> dm(1, (int) gm->size);
T *cp = dm.data_ptr();
for(int i=0; i<dm.cols(); i++, cp++)
*cp = T(gsl_vector_get(gm, i));
return dm;
}
#endif
template<class T>
_DMatrix<T> _DMatrix<T>::inverse() const
{
#ifdef GSL_SUPPORT
assert(rows() == cols());
if(rows() == 0)
return _DMatrix<T>();
gsl_matrix *gm = DMatrix_to_gsl(*this);
gsl_permutation *p = gsl_permutation_alloc(rows());;
int signum;
gsl_linalg_LU_decomp(gm, p, &signum);
gsl_matrix *inverse = gsl_matrix_alloc(rows(), cols());
gsl_linalg_LU_invert(gm, p, inverse);
_DMatrix<T> dm = gsl_to_DMatrix<T>(inverse);
gsl_permutation_free(p);
gsl_matrix_free(gm);
gsl_matrix_free(inverse);
return dm;
#else
throw string("no gsl support");
#endif
}
template<class T>
T _DMatrix<T>::determinant() const
{
#ifdef GSL_SUPPORT
assert(rows() == cols());
if(rows() == 0)
return T(1);
else if(rows() == 1)
return (*data_area);
else if(rows() == 2)
return (data_area[0] * data_area[3] - data_area[1] * data_area[2]);
gsl_matrix *gm = DMatrix_to_gsl(*this);
gsl_permutation *p = gsl_permutation_alloc(rows());;
int signum;
gsl_linalg_LU_decomp(gm, p, &signum);
T det = T(gsl_linalg_LU_det(gm, signum));
_DMatrix<T> dm = gsl_to_DMatrix<T>(gm);
gsl_permutation_free(p);
gsl_matrix_free(gm);
return det;
#else
throw string("no gsl support");
#endif
}
template<class T>
_DMatrix<T> _DMatrix<T>::eigen(_DMatrix<T> &d_eigvec) const
{
#ifdef GSL_SUPPORT
assert(rows() == cols());
gsl_matrix *gm = DMatrix_to_gsl(*this);
gsl_vector *eigval = gsl_vector_alloc(rows());
gsl_matrix *eigvec = gsl_matrix_alloc(rows(), rows());
gsl_eigen_symmv_workspace *worksp = gsl_eigen_symmv_alloc(rows());
gsl_eigen_symmv(gm, eigval, eigvec, worksp);
gsl_eigen_symmv_sort (eigval, eigvec, GSL_EIGEN_SORT_ABS_ASC);
d_eigvec = gsl_to_DMatrix<T>(eigvec);
_DMatrix<T> d_eigval = gsl_to_DMatrix<T>(eigval);
gsl_matrix_free(gm);
gsl_matrix_free(eigvec);
gsl_vector_free(eigval);
gsl_eigen_symmv_free(worksp);
return d_eigval;
#else
throw string("no gsl support");
#endif
}
template<class T>
void _DMatrix<T>::svd(_DMatrix<T> &u, _DMatrix<T> &s, _DMatrix<T> &v) const
{
#ifdef GSL_SUPPORT
gsl_matrix *gm = DMatrix_to_gsl(*this);
gsl_vector *S = gsl_vector_alloc(cols());
gsl_matrix *V = gsl_matrix_alloc(cols(), cols());
gsl_vector *work = gsl_vector_alloc(cols());
gsl_linalg_SV_decomp(gm, V, S, work);
u = gsl_to_DMatrix<T>(gm);
v = gsl_to_DMatrix<T>(V);
s = gsl_to_DMatrix<T>(S);
gsl_matrix_free(gm);
gsl_matrix_free(V);
gsl_vector_free(S);
gsl_vector_free(work);
return;
#else
throw string("no gsl support");
#endif
}
template<class T2>
_DMatrix<T2> horiz_concat(const _DMatrix<T2> &m1, const _DMatrix<T2> &m2)
{
assert(m1.rows() == m2.rows());
_DMatrix<T2> result(m1.rows(), m1.cols() + m2.cols());
result.set_submatrix(DPoint(0, 0), m1);
result.set_submatrix(DPoint(0, m1.cols()), m2);
return result;
}
template<class T2>
_DMatrix<T2> vert_concat(const _DMatrix<T2> &m1, const _DMatrix<T2> &m2)
{
assert(m1.cols() == m2.cols());
_DMatrix<T2> result(m1.rows() + m2.rows(), m1.cols());
result.set_submatrix(DPoint(0, 0), m1);
result.set_submatrix(DPoint(m1.rows(), 0), m2);
return result;
}
template<class T>
std::vector<T> _DMatrix<T>::extract_row_as_vector(int row) const
{
assert(row >= 0 && row < rows());
std::vector<T> result(cols());
const T *row_ptr = (*this)[row];
for(int j=0; j<cols(); j++)
result[j] = row_ptr[j];
return result;
}
template<class T>
void _DMatrix<T>::search_and_replace_ip(T src, T dest)
{
T *cp = data_ptr();
int sz = rows() * cols();
for(int i=0; i<sz; i++)
if(cp[i] == src) cp[i] = dest;
return;
}
template<class T2>
_DMatrix<T2> operator>(const _DMatrix<T2> &m1, T2 val)
{
_DMatrix<T2> result(m1);
T2 *cp = result.data_ptr();
int sz = m1.rows() * m1.cols();
for(int i=0; i<sz; i++)
if(cp[i] > val)
cp[i] = 1;
else
cp[i] = 0;
return result;
}
template<class T2>
_DMatrix<T2> operator<(const _DMatrix<T2> &m1, T2 val)
{
_DMatrix<T2> result(m1);
T2 *cp = result.data_ptr();
int sz = m1.rows() * m1.cols();
for(int i=0; i<sz; i++)
if(cp[i] < val)
cp[i] = 1;
else
cp[i] = 0;
return result;
}
template<class T2, class T3>
void change_type(const _DMatrix<T2> &in, _DMatrix<T3> &result)
{
if(!(in.rows() == result.rows() && in.cols() == result.cols()))
result = _DMatrix<T3>(in.rows(), in.cols());
int sz = in.rows() * in.cols();
T3 *res_ptr = result.data_ptr();
const T2 *in_ptr = in.data_ptr();
for(int i=0; i<sz; i++)
res_ptr[i] = (T3) in_ptr[i];
}
#define DECLARE(x) \
template class _DTwoDimArray<x>; \
template class _DTwoDimArray<std::complex<x> >; \
template class _DMatrix<x>; \
template _DMatrix<x> operator-(x value, const _DMatrix<x> &other); \
template _DMatrix<x> pointwise_multiply(const _DMatrix<x> &m1, const _DMatrix<x> &m2); \
template _DMatrix<x> pointwise_divide(const _DMatrix<x> &m1, const _DMatrix<x> &m2); \
template ostream &operator<<(ostream &os, const _DMatrix<x> &matrix); \
template istream &operator>>(istream &is, _DMatrix<x> &matrix); \
template _DMatrix<x> operator-(const _DMatrix<x> &m); \
template _DMatrix<x> pointwise_min(const _DMatrix<x> &, const _DMatrix<x> &); \
template _DMatrix<x> pointwise_max(const _DMatrix<x> &, const _DMatrix<x> &); \
template _DMatrix<x> pointwise_min(const _DMatrix<x> &, x val); \
template _DMatrix<x> pointwise_max(const _DMatrix<x> &, x val); \
template _DMatrix<x> sqr(const _DMatrix<x> &); \
template _DMatrix<x> log(const _DMatrix<x> &); \
template _DMatrix<x> exp(const _DMatrix<x> &); \
template _DMatrix<x> exp(const _DMatrix<x> &, bool); \
template _DMatrix<x> sqrt(const _DMatrix<x> &); \
template _DMatrix<x> fabs(const _DMatrix<x> &); \
template _DMatrix<x> horiz_concat(const _DMatrix<x> &m1, const _DMatrix<x> &m2); \
template _DMatrix<x> vert_concat(const _DMatrix<x> &m1, const _DMatrix<x> &m2); \
template _DMatrix<x> operator<(const _DMatrix<x> &m1, x val); \
template _DMatrix<x> operator>(const _DMatrix<x> &m1, x val); \
template _DMatrix<x> operator*(x value, const _DMatrix<x> &other); \
template void fread(_DMatrix<x> &, FILE *fp, bool enforce_size); \
template void fread(_DMatrix<x> &, FILE *fp); \
template void fwrite(const _DMatrix<x> &, FILE *fp); \
template bool same_size(const _DMatrix<x> &m1, const _DMatrix<x> &m2);
#define DECLARE2(x,y) \
template void change_type(const _DMatrix<x> &m1, _DMatrix<y> &m2);
DECLARE(double)
DECLARE(short)
DECLARE(int)
DECLARE(float)
DECLARE(char)
DECLARE(unsigned char)
DECLARE2(double, double)
DECLARE2(float, float)
DECLARE2(double, unsigned char)
DECLARE2(double, int)
DECLARE2(double, short)
DECLARE2(double, float)
DECLARE2(float, unsigned char)
DECLARE2(float, int)
DECLARE2(float, short)
DECLARE2(float, double)
DECLARE2(int, unsigned char)
DECLARE2(int, float)
DECLARE2(int, short)
DECLARE2(int, double)
DECLARE2(short, float)
DECLARE2(short, double)
DECLARE2(short, int)
DECLARE2(short, unsigned char)
DECLARE2(unsigned char, int)
DECLARE2(unsigned char, float)
DECLARE2(unsigned char, short)
DECLARE2(unsigned char, double)
<file_sep>/data/disco-bp/DLib/DDiagMatrix.cpp
//
// DLib: A simple image processing library.
//
// <NAME>, 2003-2005
// <EMAIL>
//
// Please do not redistribute this code.
//
//
//
//
#include "DDiagMatrix.h"
#include <string>
using namespace std;
DDiagMatrix::DDiagMatrix()
{
_rows = _cols = 0;
}
DDiagMatrix::DDiagMatrix(int __rows, int __cols)
{
_rows = __rows;
_cols = __cols;
data = DMatrix(1, _rows);
}
DDiagMatrix::DDiagMatrix(const DMatrix &source_data)
{
assert(source_data.rows() == 1);
data = source_data;
_rows = source_data.cols();
_cols = source_data.cols();
}
DDiagMatrix DDiagMatrix::operator+(const DDiagMatrix &other) const
{
DMatrix result = data + other.data;
return DDiagMatrix(result);
}
DDiagMatrix DDiagMatrix::operator+(double value) const
{
DMatrix result = data + value;
return DDiagMatrix(result);
}
DDiagMatrix DDiagMatrix::operator-(const DDiagMatrix &other) const
{
DMatrix result = data - other.data;
return DDiagMatrix(result);
}
DDiagMatrix DDiagMatrix::operator-(double value) const
{
DMatrix result = data - value;
return DDiagMatrix(result);
}
DDiagMatrix DDiagMatrix::operator*(const DDiagMatrix &other) const
{
return DDiagMatrix(pointwise_multiply(data, other.data));
}
DDiagMatrix DDiagMatrix::operator*(double value) const
{
return DDiagMatrix(data * value);
}
DDiagMatrix DDiagMatrix::operator/(double value) const
{
return DDiagMatrix(data / value);
}
DDiagMatrix operator*(double value, const DDiagMatrix &other)
{
return(other * value);
}
double DDiagMatrix::operator[](int entry) const
{
return data[0][entry];
}
DDiagMatrix &DDiagMatrix::operator=(const DDiagMatrix &other)
{
_rows = other.rows();
_cols = other.cols();
data = other.data;
return *this;
}
DDiagMatrix::DDiagMatrix(const DDiagMatrix &other)
{
*this = other;
}
DDiagMatrix operator+(double value, const DDiagMatrix &other)
{
return(other + value);
}
DDiagMatrix operator-(double value, const DDiagMatrix &other)
{
return DDiagMatrix(value - other.data);
}
DMatrix DDiagMatrix::operator*(const DMatrix &other) const
{
assert(cols() == other.rows());
DMatrix result(rows(), other.cols());
for(int i=0; i<rows(); i++)
for(int j=0; j<other.cols(); j++)
result[i][j] = data[0][i] * other[i][j];
return result;
}
void DDiagMatrix::set_entry(int row, int col, double value)
{
assert(row == col);
data[0][row] = value;
}
DMatrix operator*(const DMatrix ®, const DDiagMatrix &diag)
{
assert(reg.cols() == diag.rows());
DMatrix result(reg.rows(), diag.cols());
for(int i=0; i<reg.rows(); i++)
for(int j=0; j<diag.cols(); j++)
result[i][j] = reg[i][j] * diag.data[0][j];
return result;
}
DMatrix DDiagMatrix::operator+(const DMatrix &other) const
{
assert(other.rows() == cols() && other.rows() == rows());
DMatrix result(other.rows(), other.cols());
result = other;
for(int i=0; i<other.rows(); i++)
result[i][i] += (*this)[i];
return result;
}
DMatrix DDiagMatrix::operator-(const DMatrix &other) const
{
assert(other.rows() == other.cols() && other.rows() == rows());
DMatrix result(other.rows(), other.cols());
result = -other;
for(int i=0; i<other.rows(); i++)
result[i][i] += (*this)[i];
return result;
}
ostream &operator<<(ostream &os, const DDiagMatrix &matrix)
{
for(int i=0; i<matrix.rows(); i++)
{
for(int j=0; j<matrix.cols(); j++)
if(i != j)
os << "0 ";
else
os << matrix[i] << " ";
os << endl;
}
return os;
}
DDiagMatrix::operator DMatrix() const
{
DMatrix result(rows(), cols());
for(int i=0; i<rows(); i++)
for(int j=0; j<cols(); j++)
if(i != j)
result[i][j] = 0;
else
result[i][j] = data[0][i];
return result;
}
<file_sep>/src/CoordinateDescent.h
#ifndef CD_H
#define CD_H
#include <iostream>
#include <vector>
#include <cstring>
#include <fstream>
#include <cmath>
#include <cassert>
#include "graph.h"
#include <algorithm>
#include <time.h>
#include <omp.h>
#include "util.h"
using namespace std;
class CoordinateDescent : public Solver{
public:
int max_iter;
CoordinateDescent(Params* params){
this->max_iter = params->max_iter;
}
CoordinateDescent(int max_iter){
this->max_iter = max_iter;
}
inline double solve(Graph& graph, string filename){
ofstream fout(filename);
double start_time = omp_get_wtime();
int n = graph.n;
int m = graph.m;
vector<pair<double, int>>* v = graph.adj;
double* x = new double[n];
for (int i = 0; i < n; i++){
x[i] = graph.x0[i];
}
double* old_x = new double[n];
int iter = 0;
double min_loss = 1e100;
int* d = new int[n];
for (int i = 0; i < n; i++){
d[i] = v[i].size();
}
double elapsed_time = 0.0;
double last_update = omp_get_wtime();
while (iter++ < max_iter){
double up = 0.0, down = 0.0;
double delta_x = 0.0;
for (int i = 0; i < n; i++){
vector<double> c;
c.clear();
for (vector<pair<double, int>>::const_iterator it_i = v[i].cbegin(); it_i != v[i].cend(); it_i++){
int j = it_i->second;
double t_ij = it_i->first;
c.push_back(x[j] + t_ij);
}
int middle_point = c.size() / 2;
assert(c.size() != 0);
nth_element(c.begin(), c.begin()+middle_point, c.end());
double new_x = c[middle_point];
if (c.size() % 2 == 0){
nth_element(c.begin(), c.begin()+middle_point-1, c.end());
new_x = 0.5*(new_x + c[middle_point-1]);
}
up += new_x*sqrt(d[i]);
down += sqrt(d[i]);
old_x[i] = x[i];
x[i] = new_x;
}
double shift = up/down;
for (int i = 0; i < n; i++){
x[i] -= shift;
delta_x += fabs(x[i]-old_x[i]);
}
double loss = linf_loss(n, x, graph.x);
if (loss < min_loss){
min_loss = loss;
}
elapsed_time = (omp_get_wtime() - start_time);
fout << "iter=" << iter;
fout << ", linf_loss=" << loss;
fout << ", l1_loss=" << l1_loss(n, x, graph.x);
fout << ", min_loss=" << min_loss;
fout << ", delta_x=" << delta_x;
fout << ", elapsed_time=" << elapsed_time;
fout << endl;
if (fabs(delta_x) < 1e-6){
break;
}
}
for (int i = 0; i < n; i++){
fout << x[i] << endl;
}
fout.close();
delete d;
delete x;
delete old_x;
return min_loss;
}
};
#endif
<file_sep>/data/temp/ArtsQuad_dataset_results/compare_src/CompareModels.cpp
/*
* CompareModels.cpp
*
* Compare two models in terms of points and cameras distances, after
* performing an alignment between the two camera sets (with a
* similarity transform)
*
* Author: <NAME>
* Copyright (c) 2011 Cornell University
*
*/
#include <math.h>
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <ext/hash_map>
#include <assert.h>
#include "defines.h"
#include "sfm.h"
#include "matrix.h"
#include "horn.h"
#include "qsort.h"
#include "util.h"
typedef std::pair<int, int> ImageKey;
typedef std::pair<int, int> MatchPair;
typedef struct
{
double pos[3];
double color[3];
std::vector<ImageKey> views;
} point_t;
namespace __gnu_cxx {
template<> struct hash< ImageKey > {
size_t operator()( const ImageKey& x ) const {
return x.first * 17 + x.second;
}
};
}
void ReadBundleFile(char *bundle_file, bool use_points,
std::vector<camera_params_t> &cameras,
std::vector<point_t> &points, double &bundle_version)
{
FILE *f = fopen(bundle_file, "r");
if (f == NULL) {
printf("Error opening file %s for reading\n", bundle_file);
return;
}
int num_images, num_points;
char first_line[256];
fgets(first_line, 256, f);
if (first_line[0] == '#') {
double version;
sscanf(first_line, "# Bundle file v%lf", &version);
bundle_version = version;
printf("[ReadBundleFile] Bundle version: %0.3f\n", version);
fscanf(f, "%d %d\n", &num_images, &num_points);
} else if (first_line[0] == 'v') {
double version;
sscanf(first_line, "v%lf", &version);
bundle_version = version;
printf("[ReadBundleFile] Bundle version: %0.3f\n", version);
fscanf(f, "%d %d\n", &num_images, &num_points);
} else {
bundle_version = 0.1;
sscanf(first_line, "%d %d\n", &num_images, &num_points);
}
printf("[ReadBundleFile] Reading %d images and %d points...\n",
num_images, num_points);
/* Read cameras */
for (int i = 0; i < num_images; i++) {
double focal_length, k0, k1;
double R[9];
double t[3];
if (bundle_version < 0.2) {
/* Focal length */
fscanf(f, "%lf\n", &focal_length);
} else {
fscanf(f, "%lf %lf %lf\n", &focal_length, &k0, &k1);
}
/* Rotation */
fscanf(f, "%lf %lf %lf\n%lf %lf %lf\n%lf %lf %lf\n",
R+0, R+1, R+2, R+3, R+4, R+5, R+6, R+7, R+8);
/* Translation */
fscanf(f, "%lf %lf %lf\n", t+0, t+1, t+2);
// if (focal_length == 0.0)
// continue;
camera_params_t cam;
cam.f = focal_length;
memcpy(cam.R, R, sizeof(double) * 9);
memcpy(cam.t, t, sizeof(double) * 3);
cameras.push_back(cam);
}
if (!use_points) {
fclose(f);
return;
}
/* Read points */
for (int i = 0; i < num_points; i++) {
point_t pt;
/* Position */
fscanf(f, "%lf %lf %lf\n",
pt.pos + 0, pt.pos + 1, pt.pos + 2);
/* Color */
fscanf(f, "%lf %lf %lf\n",
pt.color + 0, pt.color + 1, pt.color + 2);
int num_visible;
fscanf(f, "%d", &num_visible);
for (int j = 0; j < num_visible; j++) {
int view, key;
fscanf(f, "%d %d", &view, &key);
double x, y;
if (bundle_version >= 0.3)
fscanf(f, "%lf %lf", &x, &y);
pt.views.push_back(ImageKey(view, key));
}
points.push_back(pt);
}
fclose(f);
}
std::vector<MatchPair>
FindPointCorrespondence(const std::vector<point_t> &points1,
const std::vector<point_t> &points2)
{
int num_points1 = (int) points1.size();
int num_points2 = (int) points2.size();
__gnu_cxx::hash_map<ImageKey, int> view_map;
for (int i = 0; i < num_points1; i++) {
int num_views = (int) points1[i].views.size();
for (int j = 0; j < num_views; j++) {
view_map[points1[i].views[j]] = i;
}
}
std::vector<MatchPair> pairs;
for (int i = 0; i < num_points2; i++) {
int num_views = (int) points2[i].views.size();
for (int j = 0; j < num_views; j++) {
if (view_map.find(points2[i].views[j]) != view_map.end()) {
int pt_idx = view_map[points2[i].views[j]];
pairs.push_back(MatchPair(pt_idx, i));
// printf("[FindPointCorrespondence] %d <-> %d\n", pt_idx, i);
break;
}
}
}
printf("[FindPointCorrespondence] Found %d matches\n", (int) pairs.size());
return pairs;
}
void AlignModels(const std::vector<camera_params_t> &cameras1,
const std::vector<point_t> &points1,
const std::vector<camera_params_t> &cameras2,
const std::vector<point_t> &points2,
const std::vector<MatchPair> &pairs, double *T,
double ransac_threshold)
{
int num_matches = (int) pairs.size();
v3_t *left = NULL;
v3_t *right = NULL;
if (false && num_matches >= 6) {
left = new v3_t[num_matches];
right = new v3_t[num_matches];
for (int i = 0; i < num_matches; i++) {
int i1 = pairs[i].first;
int i2 = pairs[i].second;
const double *p1 = points1[i1].pos;
const double *p2 = points2[i2].pos;
left[i] = v3_new(p2[0], p2[1], p2[2]);
right[i] = v3_new(p1[0], p1[1], p1[2]);
}
} else {
/* Align using the cameras */
printf("[AlignModels] Using cameras for alignment\n");
int num_cameras = (int) cameras1.size();
num_matches = num_cameras;
left = new v3_t[num_matches];
right = new v3_t[num_matches];
int count = 0;
for (int i = 0; i < num_cameras; i++) {
double pos1[3], pos2[3];
if (cameras1[i].f == 0.0 || cameras2[i].f == 0.0)
continue;
matrix_transpose_product(3, 3, 3, 1,
(double *) cameras1[i].R,
(double *) cameras1[i].t, pos1);
matrix_transpose_product(3, 3, 3, 1,
(double *) cameras2[i].R,
(double *) cameras2[i].t, pos2);
matrix_scale(3, 1, pos1, -1.0, pos1);
matrix_scale(3, 1, pos2, -1.0, pos2);
left[count] = v3_new(pos2[0], pos2[1], pos2[2]);
right[count] = v3_new(pos1[0], pos1[1], pos1[2]);
count++;
}
num_matches = count;
}
/* Run the horn solver */
int inliers = align_horn_3D_ransac(num_matches, right, left,
4096, ransac_threshold, 0.0, T);
printf("[AlignModels] %d / %d inliers\n", inliers, num_matches);
matrix_print(4, 4, T);
}
void CompareModels(const std::vector<camera_params_t> &cameras1,
const std::vector<point_t> &points1,
const std::vector<camera_params_t> &cameras2,
const std::vector<point_t> &points2,
const std::vector<MatchPair> &pairs,
double *T)
{
/* Compare camera positions */
int num_cameras = (int) cameras1.size();
double max_dist = 0.0;
double sum_dist = 0.0;
int max_cam = -1;
int good_cameras = 0, good_cameras1 = 0, good_cameras2 = 0;
double *dists = new double[num_cameras];
double R[9] = { T[0], T[1], T[2],
T[4], T[5], T[6],
T[8], T[9], T[10] };
double S = sqrt(T[0] * T[0] + T[1] * T[1] + T[2] * T[2]);
matrix_scale(3, 3, R, 1.0 / S, R);
FILE *f = fopen("dists.txt", "w");
FILE *fr = fopen("viewangles.txt", "w");
for (int i = 0; i < num_cameras; i++) {
double pos1[3], pos2[4], Tpos2[4];
if (cameras1[i].f > 0.0)
good_cameras1++;
if (cameras2[i].f > 0.0)
good_cameras2++;
if (cameras1[i].f <= 0.0 || cameras2[i].f <= 0.0)
continue;
matrix_transpose_product(3, 3, 3, 1,
(double *) cameras1[i].R,
(double *) cameras1[i].t, pos1);
matrix_transpose_product(3, 3, 3, 1,
(double *) cameras2[i].R,
(double *) cameras2[i].t, pos2);
matrix_scale(3, 1, pos1, -1.0, pos1);
matrix_scale(3, 1, pos2, -1.0, pos2);
pos2[3] = 1.0;
matrix_product(4, 4, 4, 1, T, pos2, Tpos2);
double diff[3];
matrix_diff(3, 1, 3, 1, pos1, Tpos2, diff);
double dist = matrix_norm(3, 1, diff);
fprintf(f, "%d %0.8f\n", i, dist);
double *viewdir1 = (double *) cameras1[i].R + 6;
double *viewdir2 = (double *) cameras2[i].R + 6;
double Rvd2[3];
matrix_product(3, 3, 3, 1, R, viewdir2, Rvd2);
double dot;
matrix_product(1, 3, 3, 1, viewdir1, Rvd2, &dot);
double angle = RAD2DEG(acos(CLAMP(dot, -1.0, 1.0)));
fprintf(fr, "%d %0.8f\n", i, angle);
if (dist > max_dist) {
max_dist = dist;
max_cam = i;
}
sum_dist += dist;
dists[good_cameras] = dist;
good_cameras++;
}
fclose(f);
fclose(fr);
double med_dist = median(good_cameras, dists);
delete [] dists;
printf("[CompareModels] good cameras (1): %d / %d\n",
good_cameras1, num_cameras);
printf("[CompareModels] good cameras (2): %d / %d\n",
good_cameras2, num_cameras);
printf("[CompareModels] camera dist (avg): %0.4e\n",
sum_dist / good_cameras);
printf("[CompareModels] camera dist (med): %0.4e %0.4f\n",
med_dist, med_dist * 1.7588);
printf("[CompareModels] camera dist (max): %0.4e [cam: %d]\n",
max_dist, max_cam);
/* Compare point positions */
int num_matches = (int) pairs.size();
max_dist = 0.0;
sum_dist = 0.0;
dists = new double[num_matches];
for (int i = 0; i < num_matches; i++) {
int i1 = pairs[i].first;
int i2 = pairs[i].second;
const double *p1 = points1[i1].pos;
const double *p2 = points2[i2].pos;
double p2_4[4] = { p2[0], p2[1], p2[2], 1.0 }, Tp2[4];
matrix_product(4, 4, 4, 1, T, p2_4, Tp2);
double diff[3];
matrix_diff(3, 1, 3, 1, (double *) p1, Tp2, diff);
double dist = matrix_norm(3, 1, diff);
if (dist > max_dist)
max_dist = dist;
dists[i] = dist;
sum_dist += dist;
}
med_dist = median(num_matches, dists);
delete [] dists;
printf("[CompareModels] num points (1): %d\n", (int) points1.size());
printf("[CompareModels] num points (2): %d\n", (int) points2.size());
printf("[CompareModels] point dist (avg): %0.4e\n",
sum_dist / num_matches);
printf("[CompareModels] point dist (med): %0.4e %0.4f\n",
med_dist, med_dist * 1.7588);
printf("[CompareModels] point dist (max): %0.4e\n", max_dist);
}
void TransformWorld(std::vector<camera_params_t> &cameras,
std::vector<point_t> &points,
double *T)
{
int num_images = (int) cameras.size();
int num_points = (int) points.size();
/* Transform the points */
for (int i = 0; i < num_points; i++) {
double *pos = points[i].pos;
double p[4] = { pos[0], pos[1], pos[2], 1.0 }, Tp[4];
matrix_product(4, 4, 4, 1, T, p, Tp);
memcpy(pos, Tp, 3 * sizeof(double));
}
/* Transform the cameras */
for (int i = 0; i < num_images; i++) {
if (cameras[i].f == 0.0)
continue;
double *R = cameras[i].R;
double *t = cameras[i].t;
double pose[9], pos[4];
matrix_transpose(3, 3, R, pose);
matrix_product(3, 3, 3, 1, pose, t, pos);
matrix_scale(3, 1, pos, -1.0, pos);
pos[3] = 1.0;
double M3x3[9];
memcpy(M3x3 + 0, T + 0, 3 * sizeof(double));
memcpy(M3x3 + 3, T + 4, 3 * sizeof(double));
memcpy(M3x3 + 6, T + 8, 3 * sizeof(double));
double scale;
matrix_product(1, 3, 3, 1, M3x3, M3x3, &scale);
matrix_scale(3, 3, M3x3, 1.0 / sqrt(scale), M3x3);
double pose_new[9], pos_new[4];
matrix_product(3, 3, 3, 3, M3x3, pose, pose_new);
matrix_product(4, 4, 4, 1, T, pos, pos_new);
/* Factor out the scaling */
double mag = matrix_norm(3, 1, pose_new);
matrix_scale(3, 3, pose_new, 1.0 / mag, pose_new);
// m_image_data[i].m_camera.SetPose(pose_new);
// m_image_data[i].m_camera.SetPosition(pos_new);
matrix_transpose(3, 3, pose_new, R);
matrix_product(3, 3, 3, 1, R, pos_new, t);
matrix_scale(3, 1, t, -1.0, t);
}
}
void OutputBundleFile_v3(const std::vector<camera_params_t> &cameras,
const std::vector<point_t> &points,
const char *output_file)
{
/* Output the new bundle.out file */
FILE *f = fopen(output_file, "w");
if (f == NULL) {
printf("[OutputBundleFile] Error opening file %s "
"for writing\n", output_file);
return;
}
fprintf(f, "# Bundle file v0.3\n");
int num_images = (int) cameras.size();
int num_points = (int) points.size();
fprintf(f, "%d %d\n", num_images, num_points);
/* Dump cameras */
for (int i = 0; i < num_images; i++) {
if (cameras[i].f == 0.0) {
fprintf(f, "%0.9e %0.9e %0.9e\n", 0.0, 0.0, 0.0);
fprintf(f, "%0.9e %0.9e %0.9e\n", 0.0, 0.0, 0.0);
fprintf(f, "%0.9e %0.9e %0.9e\n", 0.0, 0.0, 0.0);
fprintf(f, "%0.9e %0.9e %0.9e\n", 0.0, 0.0, 0.0);
fprintf(f, "%0.9e %0.9e %0.9e\n", 0.0, 0.0, 0.0);
} else {
const double *R = cameras[i].R;
const double *t = cameras[i].t;
fprintf(f, "%0.9e %0.9e %0.9e\n", cameras[i].f, 0.0, 0.0);
fprintf(f, "%0.9e %0.9e %0.9e\n", R[0], R[1], R[2]);
fprintf(f, "%0.9e %0.9e %0.9e\n", R[3], R[4], R[5]);
fprintf(f, "%0.9e %0.9e %0.9e\n", R[6], R[7], R[8]);
fprintf(f, "%0.9e %0.9e %0.9e\n", t[0], t[1], t[2]);
}
}
/* Dump points */
for (int i = 0; i < num_points; i++) {
const point_t &p = points[i];
const double *pos = p.pos;
const double *color = p.color;
/* Position */
fprintf(f, "%0.9e %0.9e %0.9e\n", pos[0], pos[1], pos[2]);
/* Color */
fprintf(f, "%d %d %d\n",
iround(color[0]), iround(color[1]), iround(color[2]));
// fprintf(f, "0\n"); /* no points for now */
int num_visible = (int) p.views.size();
fprintf(f, "%d", num_visible);
for (int j = 0; j < num_visible; j++) {
int view = p.views[j].first;
int key = p.views[j].second;
/* Dummy keypoint locations */
double x = 0.0;
double y = 0.0;
fprintf(f, " %d %d %0.4f %0.4f", view, key, x, y);
}
fprintf(f, "\n");
}
fclose(f);
}
/* Aligns model 2 to model 1 (but outputs a transformed model 1) */
int main(int argc, char **argv)
{
if (argc != 3 && argc != 4 && argc != 5 && argc != 6) {
printf("Usage: %s <bundle1.out> <bundle2.out> "
"[ransac_threshold (25.0)] [read_points (0)] "
"[output_file1]\n", argv[0]);
return 1;
}
char *bundle1 = argv[1];
char *bundle2 = argv[2];
double ransac_threshold = 25.0;
if (argc >= 4) {
ransac_threshold = atof(argv[3]);
}
bool use_points = false;
if (argc >= 5) {
use_points = atoi(argv[4]);
}
char *output_file = NULL;
if (argc == 6) {
output_file = argv[5];
}
printf("RANSAC threshold: %0.3f\n", ransac_threshold);
/* Read bundle files */
std::vector<camera_params_t> cameras1;
std::vector<point_t> points1;
double bundle_version1;
ReadBundleFile(bundle1, use_points, cameras1, points1, bundle_version1);
std::vector<camera_params_t> cameras2;
std::vector<point_t> points2;
double bundle_version2;
ReadBundleFile(bundle2, use_points, cameras2, points2, bundle_version2);
assert(cameras1.size() == cameras2.size());
/* Establish a point correspondence */
std::vector<MatchPair> pairs = FindPointCorrespondence(points1, points2);
/* Align the models */
double T[16];
AlignModels(cameras1, points1, cameras2, points2, pairs, T,
ransac_threshold);
/* Finally, compare the models */
CompareModels(cameras1, points1, cameras2, points2, pairs, T);
/* Write the output file if a destination is given */
if (output_file != NULL) {
double Tinv[16];
matrix_invert(4, T, Tinv);
TransformWorld(cameras1, points1, Tinv);
OutputBundleFile_v3(cameras1, points1, output_file);
}
return 0;
}
<file_sep>/spotlight/show.py
import matplotlib.pyplot as plt
import numpy
def truncate(all_sample, skew_noise, th, c, k):
mean = numpy.mean(all_sample)
deleted = []
truncated = []
th_k = th * (c ** k)
for i, y_i in enumerate(all_sample):
if abs(y_i - mean) > th_k:
deleted.append(i)
truncated.append(y_i)
truncated = numpy.asarray(truncated)
deleted = deleted[::-1]
for idx in deleted:
all_sample = numpy.delete(all_sample, obj=idx, axis=0)
deleted = []
for i, y_i in enumerate(skew_noise):
if abs(y_i - mean) > th_k:
deleted.append(i)
deleted = deleted[::-1]
for idx in deleted:
skew_noise = numpy.delete(skew_noise, obj=idx, axis=0)
return all_sample, skew_noise, truncated
delta = 1.0
x = numpy.random.uniform(-delta, delta, size=20)
sym_noise = numpy.random.uniform(-5, 5, size=6)
skew_noise = numpy.random.uniform(5, 8, size=6)
color = {'gt':'c-', 'mean':'k--+', 'median':'g-+', 'trunc':'k-+'}
loc = 'upper left'
ft = 20
"""Scene #0"""
h0 = plt.figure()
plt.xlim(-10, 10)
plt.ylim(-1.0, 1.0)
#plt.tick_params(axis='x', which='both', bottom='off', top='off',
#labelbottom='off')
plt.plot([-10, -1], [0.0, 0.0], 'b')
plt.plot([1, 10], [0.0, 0.0], 'b')
plt.plot([-1, -1], [0.0, 0.5], 'b')
plt.plot([1, 1], [0.0, 0.5], 'b')
plt.plot([-1, 1], [0.5, 0.5], 'b')
#plt.plot(x, [0.0 for x_i in x], 'x', label='U[$\mu - \sigma, \mu + \sigma$]')
plt.text(2, 0.7, '$U[-\sigma, \sigma]$', fontsize=20)
plt.savefig('scene0_1.eps', bbox_inches='tight')
plt.plot([0.0]*2, [-1.0, 1.0], color['gt'])
plt.savefig('scene0_2.eps', bbox_inches='tight')
"""Scene #1"""
h1 = plt.figure()
plt.xlim(-10, 10)
plt.ylim(-1.0, 1.0)
plt.axis('off')
plt.plot([0.0]*2, [-1.0, 1.0], color['gt'])
plt.plot(x, [0.0 for x_i in x], 'x', label='U[$\mu - \sigma, \mu + \sigma$]')
#plt.legend(loc=loc, fontsize=ft)
plt.savefig('scene1_1.eps', bbox_inches='tight')
plt.plot([numpy.mean(x)]*2, [-0.8, 0.8], color['mean'], label='mean')
#plt.legend(loc=loc, fontsize=ft)
plt.savefig('scene1_2.eps', bbox_inches='tight')
"""Scene #2: Adding noise in"""
h2 = plt.figure()
plt.xlim(-10, 10)
plt.ylim(-1.0, 1.0)
plt.axis('off')
plt.plot([0.0]*2, [-1.0, 1.0], color['gt'])
plt.plot(x, [0.0 for x_i in x], 'x', label='U[$\mu - \sigma, \mu + \sigma$]')
all_sample = numpy.concatenate((x, sym_noise))
plt.plot([numpy.mean(all_sample)]*2, [-0.8, 0.8], color['mean'], label='mean')
plt.plot(sym_noise, [0.0 for n_i in sym_noise], 'rx', label='noise')
#plt.legend(loc=loc, fontsize=ft)
plt.savefig('scene2_1.eps', bbox_inches='tight')
"""Scene #3: Skewed Noise"""
h3 = plt.figure()
plt.xlim(-10, 10)
plt.ylim(-1.0, 1.0)
plt.axis('off')
plt.plot([0.0]*2, [-1.0, 1.0], color['gt'])
plt.plot(x, [0.0 for x_i in x], 'x', label='U[$\mu - \sigma, \mu + \sigma$]')
all_sample = numpy.concatenate((x, skew_noise))
plt.plot([numpy.mean(all_sample)]*2, [-0.8, 0.8], color['mean'], label='mean')
plt.plot(skew_noise, [0.0 for n_i in skew_noise], 'rx', label='noise')
#plt.legend(loc=loc, fontsize=ft)
plt.savefig('scene3_1.eps', bbox_inches='tight')
plt.plot([numpy.median(all_sample)]*2, [-0.8, 0.8], color['median'], label='median')
#plt.legend(loc=loc, fontsize=ft)
plt.savefig('scene3_2.eps', bbox_inches='tight')
"""Scene #4-9: Remove Noise 1-6"""
trunc_sample = numpy.copy(all_sample)
th = 6
c = 0.9
h = plt.figure()
plt.xlim(-10, 10)
plt.ylim(-1.0, 1.0)
plt.axis('off')
plt.text(2, 0.7, 'Iteratively Delete', fontsize=20)
plt.text(2, 0.5, '$\{j | \quad |x^{(k)} - t_j| < \epsilon c^k\}$', fontsize=20)
plt.plot([0.0]*2, [-1.0, 1.0], color['gt'])
plt.plot(x, [0.0 for x_i in x], 'x', label='U[$\mu - \sigma, \mu + \sigma$]')
#trunc_sample, skew_noise, deleted = truncate(trunc_sample, skew_noise, th, c, scene)
#for d in deleted:
# plt.arrow(d, 0.7, 0, -0.6, shape='full', lw=0, length_includes_head=True,
# head_width=.15)
plt.plot([numpy.mean(all_sample)]*2, [-0.8, 0.8], color['mean'], label='mean')
#plt.plot([numpy.mean(trunc_sample)]*2, [-0.8, 0.8], color['trunc'], label='truncated')
plt.plot(skew_noise, [0.0 for n_i in skew_noise], 'rx', label='noise')
plt.plot([numpy.median(all_sample)]*2, [-0.8, 0.8], color['median'], label='median')
#plt.scatter(deleted, [0.0 for d in deleted], s=100, edgecolors='b',
# facecolors='none')
#plt.legend(loc=loc, fontsize=ft)
plt.savefig('scene%d_1.eps' % (4), bbox_inches='tight')
for scene in range(6):
h = plt.figure()
plt.xlim(-10, 10)
plt.ylim(-1.0, 1.0)
plt.axis('off')
plt.text(2, 0.7, 'Iteratively Delete', fontsize=20)
plt.text(2, 0.5, '$\{j | \quad |x^{(k)} - t_j| < \epsilon c^k\}$', fontsize=20)
plt.plot([0.0]*2, [-1.0, 1.0], color['gt'])
plt.plot(x, [0.0 for x_i in x], 'x', label='U[$\mu - \sigma, \mu + \sigma$]')
trunc_sample, skew_noise, deleted = truncate(trunc_sample, skew_noise, th, c, scene)
#for d in deleted:
# plt.arrow(d, 0.7, 0, -0.6, shape='full', lw=0, length_includes_head=True,
# head_width=.15)
plt.plot([numpy.mean(all_sample)]*2, [-0.8, 0.8], color['mean'], label='mean')
plt.plot([numpy.mean(trunc_sample)]*2, [-0.8, 0.8], color['trunc'], label='truncated')
plt.plot(skew_noise, [0.0 for n_i in skew_noise], 'rx', label='noise')
plt.plot([numpy.median(all_sample)]*2, [-0.8, 0.8], color['median'], label='median')
#plt.scatter(deleted, [0.0 for d in deleted], s=100, edgecolors='b',
# facecolors='none')
#plt.legend(loc=loc, fontsize=ft)
plt.savefig('scene%d_1.eps' % (scene+5), bbox_inches='tight')
if len(skew_noise) == 0:
break
<file_sep>/src/run_parallel.sh
#!/bin/bash
mkdir -p logs
for i in `seq 1 $1`; do
j=$((${i}-1))
./run_graph.sh ${j} $1 &> logs/log_${j}_$1 &
done
<file_sep>/data/disco-bp/DLib/DColorCluster.cpp
//
// DLib: A simple image processing library.
//
// <NAME>, 2003-2005
// <EMAIL>
//
// Please do not redistribute this code.
//
//
//
//
#include <DPlane.h>
#include <DColorCluster.h>
// k clusters per channel = k^3 clusters
DPlane DColorCluster::do_clustering(DImage &input, int k, bool ignore_black)
{
int cluster_count = k*k*k;
for(int i=0; i<cluster_count; i++)
clusters.push_back(DRGBCluster());
DPlane result(input.rows(), input.cols());
int delta = 256/(k*2);
for(int a=0, cl=0; a<k; a++)
for(int b=0; b<k; b++)
for(int c=0; c<k; c++, cl++)
clusters[cl].set_mean(
DTriple((a+1)*delta, (b+1)*delta, (c+1)*delta));
int changes=1000000000;
bool done=false;
while(!done) {
changes=0;
for(int i=0; i<input.rows(); i++)
for(int j=0; j<input.cols(); j++)
{
if(ignore_black && input[0][i][j] == 0 && input[1][i][j] == 0 &&
input[2][i][j] == 0)
{
result[i][j] = 0;
continue;
}
int closest_cluster=0;
double min_dist=1000000000;
DTriple sample(input[0][i][j], input[1][i][j], input[2][i][j]);
for(int c=0; c<cluster_count; c++)
if(clusters[c].distance_to(sample) < min_dist)
{
min_dist = clusters[c].distance_to(sample);
closest_cluster = c;
}
clusters[closest_cluster].add_sample(sample);
if(closest_cluster+1 != result[i][j])
{
result[i][j] = closest_cluster+1;
changes++;
}
}
if( changes < input.rows() * input.cols() * 0.001)
done=true;
else
for(int i=0; i<cluster_count; i++)
{
clusters[i].update_mean();
clusters[i].remove_all();
}
printf("there were %d changes\n",changes);
}
return result;
}
DPlane DColorCluster::apply_clustering(const DImage &input)
{
DPlane result(input.rows(), input.cols());
for(int i=0; i<input.rows(); i++)
for(int j=0; j<input.cols(); j++)
{
int closest_cluster=0;
double min_dist=1000000000;
DTriple sample(input[0][i][j], input[1][i][j], input[2][i][j]);
for(int c=0; c<(int)clusters.size(); c++)
if(clusters[c].distance_to(sample) < min_dist)
{
min_dist = clusters[c].distance_to(sample);
closest_cluster = c;
}
result[i][j] = closest_cluster+1;
}
return result;
}
<file_sep>/spotlight/arrow.py
import matplotlib.patches as mpatches
import matplotlib.pyplot as plt
import numpy
import os
#def to_texstring(s):
# s = s.replace("<", r"$<$")
# s = s.replace(">", r"$>$")
# s = s.replace("|", r"$|$")
# return s
counter = 0
sig = 3.0
shift = 0.0
x = numpy.random.uniform(-sig+shift, sig+shift, size=25)
nl = 3.
nr = 9.
th = 7.5
c = 0.9
noise_x = numpy.linspace(nl, nr, 100)
noise_y = [1.0/6.0+numpy.random.rand()*0.1 for x_i in noise_x]
sym_noise = numpy.random.uniform(-5, 5, size=8)
skew_noise = numpy.random.uniform(nl, nr, size=10)
color = {'gt':'c-', 'mean':'k-', 'median':'g-', 'trunc':'k-'}
current_mean = 1e100
def draw_arrow(fig, ax, noise=False):
global sig, shift
styles = mpatches.ArrowStyle.get_styles()
fontsize = 20
styleclass = styles['->']
ax.annotate('', (10.0, 0.0), (-10.0, 0.0),
ha="right", va="center", size=fontsize,
arrowprops=dict(arrowstyle='->',
patchB=None,
shrinkA=5,
shrinkB=5,
fc="k", ec="k",
connectionstyle="arc3,rad=-0.00",
),
bbox=dict(boxstyle="square", fc="w"))
ax.plot([-sig+shift, -sig+shift], [0., 1.5/(2*sig)], 'k-')
ax.plot([sig+shift, sig+shift], [1.5/(2.*sig), 0.], 'k-')
ax.plot([-sig+shift, sig+shift], [1.5/(2.*sig), 1.5/(2*sig)], 'k-')
ax.text(-10, .35, '$U[\mu-\sigma, \mu+\sigma]$', fontsize=fontsize)
if sig > 2.0:
ax.text(sig+shift-0.5, -0.015*sig, '$\sigma$', fontsize=fontsize)
ax.text(-sig+shift-0.7, -0.015*sig, '$-\sigma$', fontsize=fontsize)
else:
ax.text(sig+shift-0.1, -0.15*sig, '$\sigma$', fontsize=fontsize)
ax.text(-sig+shift-0.7, -0.15*sig, '$-\sigma$', fontsize=fontsize)
if noise:
global noise_x, noise_y, nl, nr
ax.plot(noise_x, noise_y, 'r-')
ax.plot([nl, nl], [0.0, noise_y[0]], 'r-')
ax.plot([nr, nr], [0.0, noise_y[-1]], 'r-')
ax.text(5.5, 0.6, 'Outliers', fontsize=fontsize)
def draw_samples(sample, fig, ax, noise=False):
global x
global sig
if sig > 2.0:
gap = 0.1
else:
gap = 0.3
ax.plot(sample, [-gap for x_i in sample], 'bx', markersize=10)
if noise:
global skew_noise
ax.plot(skew_noise, [-gap for n_i in skew_noise], 'rx', markersize=10)
def draw_gt(fig, ax):
styles = mpatches.ArrowStyle.get_styles()
fontsize = 30
styleclass = styles['->']
global shift
ax.plot([shift, shift], [-0.7/sig, 1.0/sig], color['gt'])
ax.annotate('$\mu$', (shift, -0.7/sig), (-9.0, -.5/sig),
ha="right", va="center", size=fontsize, color='c',
arrowprops=dict(arrowstyle='->',
patchB=None,
shrinkA=5,
shrinkB=5,
fc='c', ec='c',
connectionstyle="arc3,rad=-0.1",
),
bbox=None)
def get_fig_and_ax():
fig = plt.figure()
ax = fig.add_axes([0, 0, 1, 1], frameon=False)
ax.set_xlim(-11.0, 11.0)
ax.set_ylim(-1.5/sig, 1.5/sig)
ax.xaxis.set_visible(False)
ax.yaxis.set_visible(False)
return fig, ax
def draw_smean(x, fig, ax):
styles = mpatches.ArrowStyle.get_styles()
fontsize = 30
styleclass = styles['-|>']
global current_mean
mean = numpy.mean(x)
current_mean = mean
ax.plot([mean]*2, [-0.7/sig, 1.0/sig], color['mean'])
ax.annotate('mean', (mean, 1.0/sig), (8.0, 1.3/sig),
ha="right", va="center", size=fontsize,
arrowprops=dict(arrowstyle='-|>',
patchB=None,
shrinkA=5,
shrinkB=5,
fc="k", ec="k",
connectionstyle="arc3,rad=0.2",
),
bbox=None)
def draw_smedian(x, fig, ax):
styles = mpatches.ArrowStyle.get_styles()
fontsize = 30
styleclass = styles['-|>']
median = numpy.median(x)
ax.plot([median]*2, [-0.7/sig, 1.0/sig], color['median'])
ax.annotate('median', (median, 0.8/sig), (-6.0, 1.3/sig),
ha="right", va="center", size=fontsize, color='g',
arrowprops=dict(arrowstyle='-|>',
patchB=None,
shrinkA=5,
shrinkB=5,
fc="g", ec="g",
connectionstyle="arc3,rad=0.3",
),
bbox=None)
def save_figure():
global counter
plt.savefig('scene%d.eps' % counter)
counter += 1
plt.draw()
plt.show(block=False)
raw_input('Press <ENTER> to continue')
def draw_idea_text(fig, ax, itr = 0, stage=1):
if itr == 0:
cst = ''
else:
cst = 'c^%d' % itr
ax.text(-10.0, -0.3, '1. Delete Sample $t_j$ if $|t_j - $mean$| > \epsilon '+cst+'$',
fontsize=25)
if stage > 1:
ax.text(-10.0, -0.4, '2. Recompute mean and Shrink threshold', fontsize=25)
global sig, th, c
ax.plot([current_mean - th*(c ** itr)] * 2, [-0.7/sig, 1.0/sig], 'k--')
ax.plot([current_mean + th*(c ** itr)] * 2, [-0.7/sig, 1.0/sig], 'k--')
fig, ax = get_fig_and_ax()
draw_arrow(fig, ax)
save_figure()
draw_gt(fig, ax)
save_figure()
draw_samples(x, fig, ax)
save_figure()
draw_smean(x, fig, ax)
save_figure()
plt.close(fig)
"""Scene #2"""
sig = 3.0
shift=-4.0
x = x + shift
fig, ax = get_fig_and_ax()
draw_arrow(fig, ax, noise=True)
save_figure()
draw_samples(x, fig, ax, noise=True)
save_figure()
draw_gt(fig, ax)
all_sample = numpy.concatenate((x, skew_noise))
truncated_sample = all_sample
draw_smean(all_sample, fig, ax)
save_figure()
draw_smedian(all_sample, fig, ax)
save_figure()
draw_idea_text(fig, ax)
save_figure()
plt.close(fig)
def truncate(x, skew_noise, th, c, k):
all_sample = numpy.concatenate((x, skew_noise))
mean = numpy.mean(all_sample)
deleted = []
#truncated = []
th_k = th * (c ** k)
for i, y_i in enumerate(x):
if abs(y_i - mean) > th_k:
deleted.append(i)
#truncated.append(y_i)
#truncated = numpy.asarray(truncated)
deleted = deleted[::-1]
for idx in deleted:
x = numpy.delete(x, obj=idx, axis=0)
deleted = []
for i, y_i in enumerate(skew_noise):
if abs(y_i - mean) > th_k:
deleted.append(i)
deleted = deleted[::-1]
for idx in deleted:
skew_noise = numpy.delete(skew_noise, obj=idx, axis=0)
return x, skew_noise
"""delete samples"""
fig, ax = get_fig_and_ax()
draw_arrow(fig, ax, noise=True)
draw_gt(fig, ax)
draw_smean(numpy.concatenate((x, skew_noise)), fig, ax)
draw_smedian(all_sample, fig, ax)
draw_idea_text(fig, ax, 0)
x, skew_noise = truncate(x, skew_noise, th, c, 0)
draw_samples(x, fig, ax, noise=True)
save_figure()
plt.close(fig)
"""Need to update mean and threshold"""
fig, ax = get_fig_and_ax()
draw_arrow(fig, ax, noise=True)
draw_gt(fig, ax)
draw_smean(numpy.concatenate((x, skew_noise)), fig, ax)
draw_smedian(all_sample, fig, ax)
draw_idea_text(fig, ax, 1, stage=2)
draw_samples(x, fig, ax, noise=True)
save_figure()
plt.close(fig)
for k in range(1, 5):
x, skew_noise = truncate(x, skew_noise, th, c, k)
fig, ax = get_fig_and_ax()
draw_arrow(fig, ax, noise=True)
draw_gt(fig, ax)
draw_smean(numpy.concatenate((x, skew_noise)), fig, ax)
draw_smedian(all_sample, fig, ax)
draw_idea_text(fig, ax, k+1, stage=2)
draw_samples(x, fig, ax, noise=True)
save_figure()
plt.close(fig)
<file_sep>/data/disco-bp/DLib/test_gaussian.cpp
//
// test_gaussian.cpp
//
// This code exercises many parts of DLib, by computing a 2-D gaussian
// probability distribution in several different ways:
//
// 1. using DGaussianModel::get_likelihood_plane(), which computes
// the distribution directly from the Gaussian PDF
//
// 2. doing DPlane::convolve_gaussian() on an impulse function, which
// uses image rotations and two 1-D convolutions of Gaussians
//
// 3. using a distance transform, which uses image rotations and
// the fast L2 distance transform algorithm of [FH05]
//
// 4. doing DPlane::cross_correlate() on an impulse function and using
// a DGaussianKernel as the convolution kernel
//
// 5. same as #4, but using DPlane::cross_correlate_fft()
//
#include <DMatrix.h>
#include <DGaussianModel.h>
#include <DPlane.h>
#include <DistanceTransform.h>
#include <iomanip>
#include <DImageIO.h>
#include <DGaussianKernel.h>
using namespace std;
main(int argc, char *argv[])
{
try {
_DMatrix<double> sigma(2,2);
sigma = 0;
sigma[0][0] = atof(argv[1]);
sigma[1][0] = sigma[0][1] = atof(argv[2]);
sigma[1][1] = atof(argv[3]);
cout << setw(5) << setprecision(4) << sigma << endl;
const int sz = 101;
_DMatrix<double> test(sz,sz);
test = 0;
test[sz/2][sz/2] = 1;
_DMatrix<double> res(5,sz);
_DMatrix<double> mean(2,1);
mean[0][0]=mean[0][1]=sz/2;
DGaussianModel<double> model(sigma, mean);
cout.flags(ios::fixed | ios::right);
// cout << setw(7) << setprecision(2) << model.get_likelihood_plane(sz,sz).extract_row(50) << endl;
res.set_row(0, model.get_likelihood_plane(sz,sz).extract_row(50));
const int sigma_count = 7;
cout.flags(ios::fixed | ios::right);
// cout << setw(7) << setprecision(2) << log(_DPlane<double>(test).convolve_gaussian(sigma)).extract_row(50) << endl;
res.set_row(1, log(_DPlane<double>(test).convolve_gaussian(sigma, sigma_count)).extract_row(50));
// cout << setw(7) << setprecision(10) << (_DPlane<double>(test).convolve_gaussian(sigma)) << endl;
_DMatrix<double> sigmas2 = sigma * 2.0;
DistanceTransform_2D<double> dt(sigmas2);
_DMatrix<double> test2(sz,sz);
test2 = 1e100;
test2[sz/2][sz/2] = 0;
test2[sz/2][sz/2+1] = 0;
test2[sz/2+1][sz/2] = 0;
test2[sz/2+1][sz/2+1] = 0;
_DMatrix<double> dt_image = dt.do_transform(test2);
dt_image= -dt_image - (1/2.0) * 2.0 * log(2.0*M_PI) - (1 / 2.0) * log(sigma.determinant());
// cout << setw(7) << setprecision(2) << dt_image.extract_row(50) << endl;
res.set_row(2, dt_image.extract_row(50));
res.set_row(3, log(_DPlane<double>(test).cross_correlate(_DGaussianKernel<double>(sigma, sigma_count)).extract_row(50)));
res.set_row(4, log(_DPlane<double>(test).cross_correlate_fft(_DGaussianKernel<double>(sigma, sz, sz)).extract_row(50)));
cout << setw(7) << setprecision(2) << res.transpose() << endl;
_DMatrix<double> m1 = model.get_likelihood_plane(sz,sz);
m1 = m1-m1.min();
_DMatrix<unsigned char> ooo;
change_type(m1/m1.max()*255, ooo);
SaveDImage("out1.png", ooo);
cout << (m1/m1.max()*255).extract_row(sz/2) << endl;
m1 = pointwise_max(log(_DPlane<double>(test).convolve_gaussian(sigma, sigma_count)), _DMatrix<double>(sz,sz,-50));
m1 = m1-m1.min();
change_type(m1/m1.max()*255, ooo);
SaveDImage("out2.png", ooo);
cout << (m1/m1.max()*255).extract_row(sz/2) << endl;
m1 = dt_image;
m1 = m1-m1.min();
change_type(m1/m1.max()*255, ooo);
SaveDImage("out3.png", ooo);
cout << (m1/m1.max()*255).extract_row(sz/2) << endl;
} catch(string &str)
{ cout << str << endl;
}
}
<file_sep>/src/graph.h
#ifndef GRAPH_H
#define GRAPH_H
#include <iostream>
#include <fstream>
#include <string>
#include <random>
#include <stdlib.h>
#include <string>
#include <algorithm>
using namespace std;
class Params{
public:
//Graph specific parameters:
int n; //#nodes
double p; //Probability of each edge
double noise_ratio; // probability that a sample has noise
int noise_type;
// possible noise types:
// 0: gaussian with mean 0 and std_dev=0.001
// 1: uncentered gaussian with mean 1 and std_dev=1
// 2: uncentered uniform [-1, 3]
bool load_graph = false; // if true, load graph from file
char* graph_file_name;
double bias;
double inc;
double* s;
//weight of each node, s[i] = bias + inc*i/(n-1)
//edge (i,j) exists w.p. p*s[i]*s[j]
//sample specific parameters:
double a, b;
double sigma;
double stopping;
default_random_engine generator;
//solver specific parameters:
int max_iter = 1000;
double decay = 0.9;
Params(){
}
Params(int argc, char** argv){
n = atoi(argv[1]);
p = atof(argv[2]);
bias = atof(argv[3]);
inc = atof(argv[4]);
noise_type = atoi(argv[5]);
noise_ratio = atof(argv[6]);
s = new double[n];
for (int i = 0; i < n; i++){
s[i] = bias + inc*i/(n-1);
}
max_iter = atoi(argv[7]);
decay = atof(argv[8]);
a = atof(argv[9]);
b = atof(argv[10]);
sigma = atof(argv[11]);
stopping = atof(argv[14]);
if (argc > 13){
load_graph = true;
graph_file_name = argv[13];
} else {
load_graph = false;
}
cerr << "Initiating Parameters: ";
cerr << "n=" << n;
cerr << ", edge_density=" << p;
cerr << ", bias=" << bias;
cerr << ", incremental=" << inc;
cerr << ", noise_type=" << noise_type;
cerr << ", noise_ratio=" << noise_ratio;
cerr << ", sigma=" << sigma;
cerr << ", noise_parameters=(" << a << "," << b << ")";
cerr << ", stopping=" << stopping;
cerr << endl;
}
~Params(){
delete s;
}
double noise(){
uniform_real_distribution<double> small_noise(-sigma, sigma);
uniform_real_distribution<double> uniform(a, b);
uniform_real_distribution<double> dice(0.0, 1.0);
double noise = small_noise(generator);
if (dice(generator) <= noise_ratio){
noise = uniform(generator);
}
return noise;
}
double ground_truth(){
normal_distribution<double> ground_truth(0.0, 1.0); // should not matter, can be just zero
return ground_truth(generator);
}
};
class Graph{
public:
int n; // #nodes
int m; // #edges
double* x;
double* x0;
vector<pair<double, int>>* adj; // #edges in terms of adjacency matrix
vector<pair<int, int>> edges;
Params* params;
bool* visited; // used to check connectivity
int num_visited;
Graph(){
}
Graph(Params* _params){
params = _params;
n = params->n;
//generate ground truth x
x = new double[n];
x0 = new double[n];
for (int i = 0; i < n; i++){
x[i] = params->ground_truth();
x0[i] = i;
}
adj = new vector<pair<double, int>>[n];
//generate samples, possibly with noise
if (params->load_graph){
edges.clear();
ifstream fin(params->graph_file_name);
fin >> n >> m;
for (int e = 0; e < m; e++){
int i, j;
fin >> i >> j;
assert(i < j);
edges.push_back(make_pair(i, j));
adj[i].push_back(make_pair(0.0, j));
adj[j].push_back(make_pair(0.0, i));
}
fin.close();
cerr << "Done Loading Graph From " << params->graph_file_name;
cerr << "#nodes=" << n;
cerr << ", #edges=" << m;
cerr << endl;
} else {
m = 0;
double p = params->p;
double* s = params->s;
edges.clear();
for (int i = 0; i < n; i++){
double s_i = s[i];
for (int j = i+1; j < n; j++){
double s_j = s[j];
double dice = rand()*1.0 / RAND_MAX;
if (dice <= p * s_i * s_j){
edges.push_back(make_pair(i, j));
double t_ij = x[i] - x[j] + params->noise();
adj[i].push_back(make_pair(t_ij, j));
adj[j].push_back(make_pair(-t_ij, i));
m++;
}
}
}
visited = new bool[n];
num_visited = 0;
memset(visited, false, sizeof(bool)*n);
dfs(0);
int num_extra_edges = 0;
for (int i = 1; i < n; i++){
if (!visited[i]){
int dice = rand() % num_visited;
int count = 0;
for (int j = 0; j < n; j++){
if (visited[j]){
if (count == dice){
//add edge to (i, j) and then dfs(j)
double t_ij = x[i] - x[j] + params->noise();
adj[i].push_back(make_pair(t_ij, j));
adj[j].push_back(make_pair(-t_ij, i));
edges.push_back(make_pair(min(i, j), max(i, j)));
dfs(i);
m++;
num_extra_edges++;
break;
} else {
count++;
}
}
}
}
}
delete visited;
cerr << "Done Generating Graph: ";
cerr << "#nodes=" << n;
cerr << ", #edges=" << m << " (#extras=" << num_extra_edges << ")";
cerr << endl;
}
//ground truth normalization/shift
int* d = new int[n];
memset(d, 0, sizeof(int)*n);
for (auto e = edges.begin(); e != edges.end(); e++){
int i = e->first, j = e->second;
d[i]++;
d[j]++;
}
normalize(n, d, x);
normalize(n, d, x0);
}
~Graph(){
for (int i = 0; i < n; i++){
adj[i].clear();
}
delete x0;
delete x;
}
inline void normalize(int n, int* d, double* z){
double up = 0.0, down = 0.0;
for (int i = 0; i < n; i++){
up += sqrt(d[i])*z[i];
down += sqrt(d[i]);
}
double shift = up/down;
for (int i = 0; i < n; i++){
z[i] -= shift;
}
}
inline Graph copy(){
Graph g;
g.n = this->n;
g.m = this->m;
g.adj = new vector<pair<double, int>>[g.n];
g.edges = this->edges;
g.x = new double[g.n];
g.x0 = new double[g.n];
for (int i = 0; i < g.n; i++){
g.x[i] = x[i];
g.x0[i] = x0[i];
}
g.params = this->params;
return g;
}
void resample(){
for (int i = 0; i < n; i++){
adj[i].clear();
x0[i] = (rand()*1.0 / RAND_MAX) * 100.0;
}
for (auto e = edges.begin(); e != edges.end(); e++){
int i = e->first, j = e->second;
double t_ij = x[i] - x[j] + params->noise();
adj[i].push_back(make_pair(t_ij, j));
adj[j].push_back(make_pair(-t_ij, i));
}
}
void dump(char* filename){
ofstream fout(filename);
fout << n << " " << m << endl;
int count = 0;
for (int e = 0; e < edges.size(); e++){
int i = edges[e].first, j = edges[e].second;
assert(i < j);
fout << i << " " << j << endl;
}
fout.close();
}
private:
void dfs(int i){
num_visited++;
visited[i] = true;
for (auto it = adj[i].begin(); it != adj[i].end(); it++){
int j = it->second;
if (!visited[j]){
dfs(j);
}
}
}
};
class Solver{
virtual inline double solve(Graph& graph, string output_name){
};
};
#endif
<file_sep>/data/disco-bp/DLib/DGaussianModel.cpp
#include <vector>
#include <numeric>
#include <DGaussianModel.h>
using namespace std;
template<class T>
DGaussianModel<T>::DGaussianModel(const _DMatrix<T> &__covariance, const _DMatrix<T> &__mean, bool reverse_dims) :
_covariance(__covariance), _mean(__mean)
{
parameter_sanity_check();
if(reverse_dims && _covariance.cols() == 2)
{
T tmp = _covariance[0][0];
_covariance[0][0] = _covariance[1][1];
_covariance[1][1] = tmp;
tmp = _mean[0][0];
_mean[0][0] = _mean[1][0];
_mean[1][0] = tmp;
}
constants_clean = false;
}
template<class T>
DGaussianModel<T>::DGaussianModel(const DGaussianModel<T> &other)
{
*this = other;
}
template<class T>
DGaussianModel<T>::DGaussianModel(const _DMatrix<T> &data)
{
learn(data);
}
template<class T>
DGaussianModel<T>::DGaussianModel()
{
constants_clean = false;
}
template<class T>
void DGaussianModel<T>::learn(const _DMatrix<T> &data)
{
constants_clean = false;
_mean = data.means().transpose();
_covariance = data.covariance(_mean);
}
template<class T>
T DGaussianModel<T>::get_data_likelihood(const _DMatrix<T> &data, std::vector<T> &likelihoods)
{
if(data.rows() == 0 || data.cols() == 0)
return 0;
compute_constants();
if(fabs(determinant_of_covariance()) < 1e-10)
throw std::string("nearly singular");
likelihoods = vector<T>(data.rows());
const T *data_cp = data[0];
if(data.cols() == 2)
{
T i_cov_11 = _inverse_covariance[0][0] / 2.0, i_cov_22 = _inverse_covariance[1][1] / 2.0;
T i_cov_12 = _inverse_covariance[0][1];
T mean_row = _mean[0][0], mean_col = _mean[1][0];
typename vector<T>::iterator likelihood_iter = likelihoods.begin();
for(; likelihood_iter != likelihoods.end(); ++likelihood_iter)
{
T a = *(data_cp++) - mean_row;
T b = *(data_cp++) - mean_col;
*likelihood_iter = log_c - ( a * a * i_cov_11 + a * b * i_cov_12 + b * b * i_cov_22);
}
}
else
{
for(int i=0; i<data.rows(); i++)
{
_DMatrix<T> _data = (data.extract_row(i)).transpose();
likelihoods[i] = log_c - ((_data - _mean).transpose() * _inverse_covariance * (_data - _mean) / 2.0)[0][0];
}
}
return accumulate(likelihoods.begin(), likelihoods.end(), T(0.0));
}
template<class T>
T DGaussianModel<T>::get_data_likelihood(T data)
{
compute_constants();
assert(dimensionality() == 1);
if(fabs(determinant_of_covariance()) < 1e-10)
throw std::string("nearly singular");
double __mean = _mean[0][0];
T likelihood = log_c - ((data - __mean) * _inverse_covariance[0][0] * (data - __mean) / 2.0);
return likelihood;
}
template<class T>
_DMatrix<T> DGaussianModel<T>::get_likelihood_pointwise(const _DMatrix<T> &data)
{
_DMatrix<T> result(data.rows(), data.cols());
compute_constants();
assert(dimensionality() == 1);
if(fabs(determinant_of_covariance()) < 1e-10)
throw std::string("nearly singular");
double __mean = _mean[0][0];
double __invcov = _inverse_covariance[0][0];
int sz = data.rows() * data.cols();
const T *in_ptr = data[0];
T *out_ptr = result[0];
for(int i=0; i<sz; i++)
out_ptr[i] = log_c - ((in_ptr[i] - __mean) * __invcov * (in_ptr[i] - __mean) / 2.0);
return result;
}
template<class T>
T DGaussianModel<T>::get_data_likelihood(const _DMatrix<T> &data)
{
vector<T> likelihoods;
return get_data_likelihood(data, likelihoods);
}
template<class T>
class like_item
{
public:
T likelihood;
int index;
};
template<class T>
inline bool operator<(const like_item<T> &i1, const like_item<T> &i2)
{
return i1.likelihood < i2.likelihood;
}
// trim percent is the fraction **you want to trim off**
template<class T>
T DGaussianModel<T>::get_data_likelihood_trimmed(const _DMatrix<T> &data, float trim_percent, std::vector<T> &result_likelihoods, _DMatrix<T> &result_data)
{
vector<T> likelihood_list;
get_data_likelihood(data, likelihood_list);
// FIXME
vector<like_item<T> > likelihoods(data.rows());
typename vector<like_item<T> >::iterator out_iter = likelihoods.begin();
typename vector<T>::iterator in_iter = likelihood_list.begin();
for(int i=0 ; out_iter != likelihoods.end(); ++in_iter, ++out_iter, i++)
{
out_iter->likelihood = *in_iter;
out_iter->index = i;
}
int worst_count = int(likelihoods.size() * trim_percent);
int good_count = data.rows() - worst_count;
nth_element(likelihoods.begin(), likelihoods.begin() + worst_count, likelihoods.end());
result_data = _DMatrix<T>(good_count, data.cols());
result_likelihoods = std::vector<T>(good_count);
T total_likelihood = 0;
for(int i=worst_count, new_row=0; i<data.rows(); i++, new_row++)
{
result_data.copy_row_from(data, likelihoods[i].index, new_row);
result_likelihoods[new_row] = likelihoods[i].likelihood;
total_likelihood += likelihoods[i].likelihood;
}
return total_likelihood;
}
template<class T>
_DMatrix<T> DGaussianModel<T>::get_likelihood_plane(int rows, int cols)
{
compute_constants();
if(fabs(determinant_of_covariance()) < 1e-10)
throw std::string("nearly singular");
T i_cov_11 = _inverse_covariance[0][0] / 2.0, i_cov_22 = _inverse_covariance[1][1] / 2.0;
T i_cov_12 = _inverse_covariance[0][1];
T mean_row = _mean[0][0], mean_col = _mean[1][0];
_DMatrix<T> result(rows, cols);
for(int i=0; i<rows; i++)
{
T a = i - mean_row;
T row_pre = log_c - (a * a * i_cov_11);
T b = 0 - mean_col;
T *out_cp = result[i];
for(int j=0; j<cols; j++)
{
out_cp[j] = row_pre - ( a * b * i_cov_12 + b * b * i_cov_22 );
b++;
}
}
return result;
}
template<class T>
void DGaussianModel<T>::compute_constants()
{
if(constants_clean)
return;
// assume one data point
const T N=1.0;
T det = _covariance.determinant();
log_c = -0.5 * _covariance.rows() * N * log(2.0 * M_PI) - 0.5 * N * log(det);
_inverse_covariance = _covariance.inverse();
constants_clean = true;
}
template<class T>
void DGaussianModel<T>::parameter_sanity_check()
{
assert(_covariance.rows() == _covariance.cols());
// assert(covariance.is_positive_definite();
// assert(covariance.is_symmetric());
assert(_mean.rows() == _covariance.rows() && _mean.cols() == 1);
}
template<class T>
DGaussianModel<T> &DGaussianModel<T>::operator=(const DGaussianModel<T> &other)
{
_covariance = other.covariance();
_mean = other.mean();
_inverse_covariance = other._inverse_covariance;
constants_clean = other.constants_clean;
log_c = other.log_c;
return *this;
}
template<class T>
T DGaussianModel<T>::log_constant()
{
compute_constants();
return log_c;
}
template<class T>
_DMatrix<T> DGaussianModel<T>::inverse_covariance()
{
compute_constants();
return _inverse_covariance;
}
template<class T>
void DGaussianModel<T>::set_mean(const _DMatrix<T> &new_mean)
{
_mean = new_mean;
parameter_sanity_check();
}
template<class T>
void DGaussianModel<T>::set_mean(const DPoint &new_mean)
{
_mean = _DMatrix<T>(2,1);
_mean[0][0] = new_mean.row();
_mean[0][1] = new_mean.col();
parameter_sanity_check();
}
template<class T>
void DGaussianModel<T>::set_covariance(const _DMatrix<T> &new_covariance)
{
_covariance = new_covariance;
constants_clean = false;
parameter_sanity_check();
}
template<class T>
void DGaussianModel<T>::force_diagonal_covariance()
{
constants_clean = false;
set_covariance(pointwise_multiply(covariance(), _DMatrix<T>(_covariance.rows(), _covariance.cols(), _DMatrix<T>::identity)));
}
template<class T>
ostream & operator<<(ostream &os, const DGaussianModel<T> &model)
{
cout << "mean: " << model.mean() << endl;
cout << "covariance: " << model.covariance() << endl;
return os;
}
// check whether the covariance in at least one dimension (when
// rotated to be axis oriented) is less than some threshold.
// This function checks whether the smallest eigenvalue is less than
// the threshold.
//
template<class T>
inline bool DGaussianModel<T>::is_covariance_bad(double threshold) const
{
_DMatrix<T> cov = covariance();
T a = cov[0][0], b = cov[0][1], d = cov[1][1];
T low_eigenvalue = 0.5 * ((a+d) - sqrt(4*b*b + (a-d)*(a-d)));
return low_eigenvalue < threshold;
}
template<class T>
void DGaussianModel<T>::rescale(float scale_factor)
{
set_mean(mean() * scale_factor);
set_covariance(covariance() * scale_factor * scale_factor);
}
#define DECLARE(x) \
template class DGaussianModel<x>; \
template ostream &operator<<(ostream &os, const DGaussianModel<x> &matrix);
DECLARE(double)
DECLARE(float)
<file_sep>/src/graph2.py
import sys
import numpy
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import os.path
from settings import *
import shapely
from shapely.geometry import Polygon
from descartes import PolygonPatch
def plot_coords(ax, ob):
x, y = ob.xy
ax.plot(x, y, '+', color='grey')
folder = 'uniform_n1p2/graph2_final/'
print("drawing figure with data under %s" % folder)
min_TL2, median_TL2, max_TL2, min_CD, median_CD, max_CD, tmean_TL2, tmean_CD, \
ratios = process(folder)
if sys.argv[1].startswith('graph'):
plots = {'min CD':min_CD, 'median CD':median_CD, 'max CD':max_CD,
'min TranSync':min_TL2, 'median TranSync':median_TL2, 'max TranSync':max_TL2}
down_CD = [(x, y) for (x, y) in zip(ratios, min_CD)]
up_CD = [(x, y) for (x, y) in zip(ratios, max_CD)]
up_CD.reverse()
down_TL2 = [(x, y) for (x, y) in zip(ratios, min_TL2)]
up_TL2 = [(x, y) for (x, y) in zip(ratios, max_TL2)]
up_TL2.reverse()
area_1 = Polygon(down_CD+up_CD)
area_2 = Polygon(down_TL2+up_TL2)
solution = area_1.intersection(area_2)
fig, ax = plt.subplots()
#plot_coords(ax, area_1.exterior)
patch = PolygonPatch(area_1, facecolor=colors['min CD'],
edgecolor=colors['min CD'], alpha=1.0)
ax.add_patch(patch)
#plot_coords(ax, area_2.exterior)
patch = PolygonPatch(area_2, facecolor=colors['min TranSync'],
edgecolor=colors['min TranSync'], alpha=1.0)
ax.add_patch(patch)
#plot_coords(ax, solution.exterior)
patch = PolygonPatch(solution, facecolor='r', edgecolor='r')
ax.add_patch(patch)
for label in ['min CD', 'median CD', 'max CD', 'min TranSync',
'median TranSync', 'max TranSync']:
ax.plot(ratios, plots[label], color=colors[label], label=label,
linestyle=linestyles[label], linewidth=linewidths[label],
marker=markers[label])
#ax.fill_between(ratios, min_CD, max_CD, facecolor=colors['median CD'], interpolate=True)
#ax.fill_between(ratios, min_TL2, max_TL2, facecolor=colors['median TL2'], interpolate=True)
legend = ax.legend(loc=(0.55, 0.55), shadow=True, fontsize=15)
plt.title('Graph $G_{di}$', fontsize=40)
plt.xlabel('$p$', fontsize=25)
plt.ylabel('$\|x^*-x^{gt}\|_{\infty}$', fontsize=25)
plt.axis([0.01, 1.0, 0, 1])
plt.savefig('graph2.eps')
else:
fig, ax = plt.subplots()
plots = {'CD' : tmean_CD, 'TranSync':tmean_TL2}
for label in ['CD', 'TranSync']:
ax.plot(ratios, plots[label], color=colors[label], label=label,
linestyle=linestyles[label], linewidth=linewidths[label],
marker=markers[label])
legend = ax.legend(loc=(0.55, 0.55), shadow=True, fontsize=15)
plt.title('Graph $G_{dr}$', fontsize=40)
plt.xlabel('$p$', fontsize=25)
plt.ylabel('$Average Running Time$', fontsize=25)
plt.axis([0.01, 1.0, 0, 3])
plt.savefig('time1.eps')
<file_sep>/src/plot.py
import sys
import numpy
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import os.path
from settings import *
import shapely
from shapely.geometry import Polygon
from descartes import PolygonPatch
from utils import *
fig, ax = plt.subplots()
def get_dist(filename):
fin = open(filename, 'r')
a = [float(line.strip()) for line in fin.readlines()]
b = [a[0]]
c = [1]
density = 0
for i in range(1, len(a)):
if a[i] == b[-1]:
c[-1] += 1
else:
b.append(a[i])
c.append(c[-1]+1)
c = [num*1.0/len(a) for num in c]
return b, c
CD_x, CD_y = get_dist('CD.dist')
TL2_x, TL2_y = get_dist('TL2.dist')
ax.plot(CD_x, CD_y, color='k', label='CD')
ax.plot(TL2_x, TL2_y, color='b', label='TranSync')
legend = ax.legend(loc=(0.13, 0.03), shadow=True, fontsize=40)
plt.title('Cumulative Density Function', fontsize=30)
plt.xlabel('$\|t_{ij}-(x_i-x_j)\|$', fontsize=20)
plt.ylabel('Cumulative Density', fontsize=25)
plt.axis([0.0, 200, 0, 1])
plt.savefig('cdf.eps')
<file_sep>/data/disco-bp/install_deps.sh
#!/bin/sh
make -C sparsehash-1.11 install<file_sep>/data/disco-bp/DLib/DCanny.cpp
#include <DCanny.h>
#include <vector>
#include <DGaussianKernel.h>
using namespace std;
template<class T>
_DPlane<unsigned char> DCanny<T>::do_canny(const _DPlane<T> &img_in, bool relative, float thresh1, float thresh2, float sigma, int filter_size)
{
_DPlane<float> img(img_in.rows(), img_in.cols());
int pixel_count = img_in.rows() * img_in.cols();
const T *img_in_cp = img_in[0];
float *img_cp = img[0];
for(int i=0; i<pixel_count; i++)
img_cp[i] = float(img_in_cp[i]);
_DGaussianKernel<float> _gaussian(sigma, filter_size);
_DMatrix<float> dog_1 = _gaussian / _gaussian.max();
_DMatrix<float> dog_2(1, filter_size);
int center = filter_size / 2;
for(int j=0; j<filter_size; j++)
{
int x = j-center, y=0;
dog_2[0][j] = -x * exp(-(x*x+y*y) / (2 * sigma * sigma)) / (M_PI * sigma * sigma);
}
img = img.cross_correlate_separable(_gaussian, _gaussian.transpose(), false);
dx = img.cross_correlate_separable(dog_2, dog_1.transpose(), false);
dy = img.cross_correlate_separable(dog_1, dog_2.transpose(), false);
canny_edges = _DPlane<unsigned char>(img.rows(), img.cols());
canny_edges = 0;
mag = _DPlane<float>(dx.rows(), dx.cols());
float *_mag = mag[0], *_dx = dx[0], *_dy = dy[0];
int sz = dx.rows() * dx.cols();
for(int i=0; i<sz; i++)
_mag[i] = _dx[i]*_dx[i]+_dy[i]*_dy[i];
if(thresh1 <= 0.0 && thresh2 <= 0.0)
{
float max_val = sqrt(mag.max());
float inc = max_val / 64.0;
std::vector<int> hist(64, 0);
int count = img_in.rows() * img_in.cols();
for(int i=0; i<count; i++)
{
hist[int(ceil((float(sqrt(_mag[i])) / inc)-0.5))]++;
}
int cum_sum = 0;
int _i=0;
for(int i=0; i<64; i++)
{
cum_sum += hist[i];
if(cum_sum >= 0.7 * count)
{
_i = i;
break;
}
}
thresh1 = max_val * (_i+1) / 64.0;
thresh2 = 0.4 * thresh1;
}
if (0 && relative)
{ /* thresholds are relative => get noise */
float noise; /* and scale them by it. */
noise = cannyNoiseEstimate(mag);
assert(noise > 0.0);
/* Noise is actually noise squared times 4.0, but, thresholds need to be squared
* since they should be thresholds for the gradient magnitude squared * 4.0.
* Be sure to preserve the signs of the thresholds, since thresh1 <= 0
* means no thresholding, and thresh2 < 0.0 means only one threshold.
*/
CannyThreshold1 = thresh1*noise; /* save values in global vars */
CannyThreshold2 = thresh2*noise;
thresh1 = thresh1 * thresh1 * noise * ((thresh1 < 0.0) ? -1.0 : 1.0);
thresh2 = thresh2 * thresh2 * noise * ((thresh2 < 0.0) ? -1.0 : 1.0);
}
else
{ /* make thresholds right for grad-mag squared * 4.0 */
CannyThreshold1 = thresh1; /* save values in global vars */
CannyThreshold2 = thresh2;
thresh1 = thresh1*thresh1;
thresh2 = thresh2*thresh2;
// thresh1 = thresh1 * thresh1 * ((thresh1 < 0.0) ? -1.0 : 1.0); // * 4.0;
// thresh2 = thresh2 * thresh2 * ((thresh2 < 0.0) ? -1.0 : 1.0); // * 4.0;
}
if (thresh2 <= 0.0) { /* only one threshold specified */
canny_edge(img_in, dx, dy, mag, thresh1, canny_edges);
}
else
{ /* two thresholds were specified */
if (thresh1 > thresh2)
{ /* swap 'em if necessary */
float tmp = thresh1;
thresh1 = thresh2;
thresh2 = tmp;
}
canny_edge2(img_in, dx, dy, mag, thresh1, thresh2, canny_edges);
}
canny_edges = canny_edges.binary_thin();
return canny_edges;
}
/* This is based on Chapter 2 of <NAME>' SM Thesis
* "Finding Testure Boundaries In Images," AI Tech Rpt 968/
*
* The histogram of the gradient magnitude should be a Rayleigh
* distribution: f(x) = (x/a)*(e^(-x^2/2a^2)) and F(x) = 1 - e^(-x^2/2a^2)
* Our noise estimate is the peak of f(x), which occurs when x = a.
*/
#define MAX(a,b) (((a)>(b))?(a):(b))
#define MIN(a,b) (((a)<(b))?(a):(b))
template<class T>
float DCanny<T>::cannyNoiseEstimate(const _DPlane<float> &mag) /* given gradient magnitude sqaured */
{ /* (times 4) of the image, give an */
std::vector<float> hist(NOISE_BUCKETS, 0.0), hists(NOISE_BUCKETS), histd(NOISE_BUCKETS);
float tmp, scale, mxval;
int index;
int initial;
float noise, sum, cdf;
float sigma;
int i, j;
float minderiv, maxderiv, deriv;
int estimate;
mxval = -1.0; /* Get maximum gradient magnitude value */
for (float *scan = mag[0], *end = scan + mag.total_pixel_count(); scan < end; scan++) {
if ((tmp = *scan) > mxval)
mxval = tmp;
}
mxval *= NOISE_RANGE * NOISE_RANGE; /* max mag-sqrd value to histogram */
scale = NOISE_BUCKETS / sqrt(mxval); /* scale for histograming mag vals */
/* now histogram the data */
for (float *scan = mag[0], *end = scan + mag.total_pixel_count(); scan < end; scan++) {
if ((tmp = *scan) <= mxval) { /* ok to histogram? */
hist[((int) (sqrt(tmp)*scale))] += 1.0; /* yes .. scale & histogram */
sum += 1.0;
}
}
/* partly kill the peak there *//* maybe use init est to figure how many to kill */
hist[0] = hist[1] = hist[2] = 0.0;
for (sum = 0.0, initial = 0; initial < NOISE_BUCKETS; initial++) {
sum += hist[initial];
}
sum *= 0.40;
for (cdf = 0.0, initial = 0; initial < NOISE_BUCKETS && cdf < sum; initial++) {
cdf += hist[initial];
}
noise = (initial + 0.5)/scale;
noise = noise*noise;
sigma = MIN(NOISE_MAX_SIGMA, MAX(NOISE_SIGMA*initial, NOISE_MIN_SIGMA));
_DGaussianKernel<float> _gaussian(sigma, 5);
hist[0] = 0.0;
int len = _gaussian.cols();
float *gaussian = _gaussian[0];
for (i = 0; i < NOISE_BUCKETS; i++) {
sum = gaussian[0] * hist[i];
for (j = 1; j < len; j++) {
sum += gaussian[j]*((i-j >= 0 ? hist[i-j] : -hist[j-i]) +
(i+j < NOISE_BUCKETS ? hist[i+j] : 0.0));
}
hists[i] = sum;
}
/* derivative based approach */
minderiv = maxderiv = 0.0; /* compute histogram derivative */
histd[0] = 0.0;
for (index = 1; index < NOISE_BUCKETS; index++) {
deriv = histd[index] = hists[index] - hists[index-1];
if (deriv > maxderiv) maxderiv = deriv;
if (deriv < minderiv) minderiv = deriv;
}
/* now use as estimate the beginning of the largest region of negative
valued derivative. */
{ int nareas = 0, begin;
typedef struct { int begin; float area; } AREA;
AREA areas[2];
float area;
for (i = 0; i < NOISE_BUCKETS - 1; i++) {
if (histd[i] >= 0.0 && histd[i+1] < 0.0) {
begin = ++i;
area = histd[i];
for ( ; i < NOISE_BUCKETS - 1 && histd[i] < 0.0; i++) {
area += histd[i];
}
area = -area; i--;
areas[nareas >= 2
? (areas[0].area < areas[1].area
? 0
: 1)
: nareas++] = ((AREA) {begin: begin, area: area});
}
}
estimate = (nareas == 2
? (areas[0].area > areas[1].area
? 0
: 1)
: 0);
estimate = areas[estimate].begin;
noise = (estimate + 0.5)/scale;
noise *= noise;
}
CannyNoiseEstimate = noise; /* save in global var */
return noise;
}
template<class T>
_DPlane<float> DCanny<T>::get_edgedir_plane()
{
_DPlane<float> result(dx.rows(), dx.cols());
result = 0;
unsigned char *edge_cp = canny_edges[0];
float *out_cp = result[0];
for(int i=0; i<dx.rows(); i++)
for(int j=0; j<dx.cols(); j++, edge_cp++, out_cp++)
{
if(*edge_cp)
{
float this_dx = dx[i][j], this_dy = dy[i][j];
if(this_dy == 0)
*out_cp = M_PI / 2.0;
else
*out_cp = atan(this_dx / this_dy);
}
}
return result;
}
/******************************* Local Functions *******************************/
template<class T>
int DCanny<T>::NMS(float *magn, const float mag, const float dx, const float dy)
{
float theta, m1, m2;
int n, o1, o2;
float interp, interp2;
/* map [-pi/2 .. pi/2] to [0.0 .. 4.0] */
if (dx == 0.0) /* Don't divide by 0.0 */
theta = 0.0; /* 90 degrees */
else
theta = 4.0*(0.5 + atanpi(dy/dx)); /* 4*(.5 + PI*theta) */
n = (int) theta; /* region we're in */
interp = theta - (float) n; /* interpolating factors */
interp2 = 1.0 - interp;
o1 = offset1[n]; /* get offsets & interpolate */
o2 = offset2[n];
m1 = *(magn + o1)*interp2 + *(magn + o2)*interp;
m2 = *(magn - o1)*interp2 + *(magn - o2)*interp;
return (mag>=m1 && mag>=m2 && m1!=m2); /* return 1 iff passes NMS */
}
template<class T>
int DCanny<T>::canny_edge(const _DPlane<T> &img, const _DPlane<float> &_dx, const _DPlane<float> &_dy, const _DPlane<float> &_magn,
const float thresh, _DPlane<unsigned char> &_output)
{
int x_m = img.cols(), y_m = img.rows();
offset1[0] = x_m; offset2[0] = x_m-1; /* 225..270 and 45..90 */
offset1[1] = x_m-1; offset2[1] = -1; /* 180..225 and 0..45 */
offset1[2] = 1; offset2[2] = x_m+1; /* 315..360 and 135..180 */
offset1[3] = x_m+1; offset2[3] = x_m; /* 270..315 and 90..135 */
offset1[4] = x_m; offset2[4] = x_m-1; /* 225..270 and 45..90 */
unsigned char *output = _output[0] + x_m + 1; /* skip first row */
float *dx = _dx[0] + x_m + 1; /* leave zero=>no edge */
float *dy = _dy[0] + x_m + 1;
float *magn_max, *magn = _magn[0];
for (magn_max = magn + x_m*(y_m - 1) - 1, magn += x_m + 1; magn < magn_max; ) {
for (float *magn_max_x = magn + x_m - 2; magn < magn_max_x;
output++, dx++, dy++, magn++)
{
float mag;
if ((mag = *magn) < thresh) /* don't even need to do NMS */
*output = 0;
else /* interpolate gradient & do NMS */
*output = NMS(magn, mag, *dx, *dy);
}
output += 2; /* skip over last pixel on this line */
dx += 2; /* and first pixel on next line */
dy += 2;
magn += 2;
}
return 0;
}
template<class T>
int DCanny<T>::canny_edge2(const _DPlane<T> &img, const _DPlane<float> &_dx, const _DPlane<float> &_dy,
const _DPlane<float> &_magn, const float thresh, const float thresh2,
_DPlane<unsigned char> &output)
{
int x_m = img.cols(), y_m = img.rows();
offset1[0] = x_m; offset2[0] = x_m-1; /* 225..270 and 45..90 */
offset1[1] = x_m-1; offset2[1] = -1; /* 180..225 and 0..45 */
offset1[2] = 1; offset2[2] = x_m+1; /* 315..360 and 135..180 */
offset1[3] = x_m+1; offset2[3] = x_m; /* 270..315 and 90..135 */
offset1[4] = x_m; offset2[4] = x_m-1; /* 225..270 and 45..90 */
unsigned char **stack = new unsigned char *[img.rows() * img.cols()];
unsigned char **stktop = stack;
unsigned char *out = output[0] + x_m + 1; /* skip over first row */
float *dx = _dx[0] + x_m + 1;
float *dy = _dy[0] + x_m + 1;
float * magn = _magn[0];
float *magn_max;
for (magn_max = magn + x_m*(y_m - 1) - 1, magn += x_m + 1; magn < magn_max; ) {
for (float *magn_max_x = magn + x_m - 2; magn < magn_max_x;
out++, dx++, dy++, magn++)
{
float mag;
if ((mag = *magn) < thresh) /* no possible edge */
*out = 0;
else
{
if (NMS(magn, mag, *dx, *dy)) /* check if passes NMS */
if (mag >= thresh2) {
*out = 1; /* definitely have an edge */
*(stktop++) = out; /* put edge pixel addr on stack */
}
else
*out = 2; /* maybe have an edge */
else
*out = 0; /* no edge here */
}
}
out += 2; /* skip over last pixel on this line */
dx += 2; /* and first pixel on next line */
dy += 2;
magn += 2;
}
while (stktop > stack) {
out = *--stktop;
/* look at neighbors. if a neighbor is 2 make it 1 and add to stack */
/* continue until stack empty */
if (*(out -= x_m + 1) == 2) /* upper-left */
*out = 1, *(stktop++) = out;
if (*(++out) == 2) /* upper-middle */
*out = 1, *(stktop++) = out;
if (*(++out) == 2) /* upper-right */
*out = 1, *(stktop++) = out;
if (*(out += x_m) == 2) /* middle-right */
*out = 1, *(stktop++) = out;
if (*(out -= 2) == 2) /* middle-left */
*out = 1, *(stktop++) = out;
if (*(out += x_m) == 2) /* lower-left */
*out = 1, *(stktop++) = out;
if (*(++out) == 2) /* lower-middle */
*out = 1, *(stktop++) = out;
if (*(++out) == 2) /* lower-right */
*out = 1, *(stktop++) = out;
}
{
unsigned char *outend = output[0] + x_m*y_m; /* get rid of any remaining 2's */
for (out = output[0]; out < outend; out++ )
if (*out == 2)
*out = 0;
}
delete[] stack;
return 0;
}
template<class T>
int DCanny<T>::gradientFull(const _DPlane<float> &img, _DPlane<float> &dx, _DPlane<float> &dy, _DPlane<float> &mag)
{
float *img_xendp;
register float x, y;
register float a, b, c, d;
register float *img_endp;
int width = img.cols(), height = img.rows();
dx = _DPlane<float>(height, width);
dy = _DPlane<float>(height, width);
mag = _DPlane<float>(height, width);
float *imgp = img[0];
float *dxp = dx[0], *dyp = dy[0], *magp = mag[0];
for (img_endp = imgp + width*(height-1) - 1; imgp < img_endp; ) {
a = *imgp;
c = *(imgp+width);
for (img_xendp = imgp + width - 1; imgp < img_xendp; ) { /* this really works! */
imgp++;
b = *(imgp);
d = *(imgp+width);
a = d - a;
c = b - c;
x = a + c;
y = a - c;
a = b;
c = d;
*(dxp++) = x;
*(dyp++) = y;
*(magp++) = x*x + y*y;
}
imgp++; /* last column gets 0.0 */
*(dxp++) = *(dyp++) = *(magp++) = 0.0;
}
for (img_endp += width; imgp <= img_endp; imgp++) /* last row gets 0.0 */
*(dxp++) = *(dyp++) = *(magp++) = 0.0;
return 0;
}
#define DECLARE(x) \
template class DCanny<x>;
DECLARE(double)
DECLARE(short)
DECLARE(int)
DECLARE(float)
DECLARE(char)
DECLARE(unsigned char)
<file_sep>/src/Truncated_L2.h
#ifndef TRUNCATED_L2_H
#define TRUNCATED_L2_H
#include <iostream>
#include <vector>
#include <cstring>
#include <fstream>
#include <cmath>
#include <cassert>
#include "graph.h"
#include <algorithm>
#include <time.h>
#include <omp.h>
#include "util.h"
#include <string>
#include <random>
using namespace std;
class Truncated_L2 : public Solver{
public:
int max_iter;
double decay;
double stopping;
Truncated_L2(Params* params){
this->max_iter = params->max_iter;
this->decay = params->decay;
this->stopping = params->stopping;
}
Truncated_L2(int max_iter, double decay){
this->max_iter = max_iter;
this->decay = decay;
}
inline double solve(Graph& graph, string filename){
ofstream fout(filename);
double start_time = omp_get_wtime();
int n = graph.n;
int m = graph.m;
vector<pair<double, int>>* v = graph.adj;
double* x = new double[n];
for (int i = 0; i < n; i++){
x[i] = graph.x0[i];
}
double* new_x = new double[n];
double* old_x = new double[n];
int iter = 0;
double min_loss = 1e100;
int* index = new int[n];
for (int i = 0; i < n; i++){
index[i] = i;
}
bool disconnected = false;
double threshold = 1e100;
int* d = new int[n];
memset(d, 0, sizeof(int)*n);
double up = 0.0;
double down = 0.0;
for (int i = 0; i < n; i++){
for (vector<pair<double, int>>::const_iterator it_i = v[i].cbegin(); it_i != v[i].cend(); it_i++){
int j = it_i->second;
d[i]++;
d[j]++;
}
}
//vector<double> critical, dist;
//vector<double> var_trace;
int edges_remain = 0;
for (int i = 0; i < n; i++){
down += sqrt(d[i]);
edges_remain += d[i];
}
//double* best = new double[n];
int max_count = 0;
int N = 200;
//int* stat = new int[N+1];
//int count_down = 0;
//int cut_off = 0;
//vector<int> cut_offs;
while (iter++ < max_iter && (edges_remain >= 2*(n-1)) && (!disconnected)){
double delta_x = 1e100;
int inner = 0;
double max_diff = 0.0;
double sum_diff = 0.0;
vector<int> remove_list;
for (int i = 0; i < n; i++){
old_x[i] = x[i];
}
double count_inlier = 0;
double center = 0.0;
//vector<vector<double>> dist_diff;
while (delta_x > 1e-2 && (!disconnected) && (edges_remain >= 2*(n-1))){
inner++;
max_diff = 0.0;
up = 0.0;
//count_inlier = 0;
//memset(stat, 0, sizeof(int)*(N+1));
//dist.clear();
//center = 0.0;
for (int i = 0; i < n; i++){
int d_i = 0.0;
double weighted_sum = 0.0;
remove_list.clear();
for (vector<pair<double, int>>::const_iterator it_i = v[i].begin(); it_i != v[i].end(); it_i++){
int j = it_i->second;
double t_ij = it_i->first;
//cerr << "i=" << i << ", j=" << j << ", t_ij=" << t_ij << endl;
double diff_i = fabs(t_ij - (x[i] - x[j]));
//if (t_ij - (x[i] - x[j]) > 0.0 && i < j){
// center += 1;
//} else{
// center -= 1;
//}
//dist.push_back(diff_i);
if ( diff_i < threshold ){
weighted_sum += x[j] + t_ij;
d_i++;
if (diff_i > max_diff){
max_diff = diff_i;
}
} else {
remove_list.push_back(it_i-v[i].cbegin());
}
int diff_int = trunc(diff_i*N);
if (diff_int > N){
diff_int = N;
}
//stat[diff_int]++;
/*if (diff_i < 0.04){
count_inlier++;
}*/
}
for (int k = remove_list.size()-1; k >= 0; k--){
int to_remove = remove_list[k];
v[i][to_remove] = v[i][v[i].size()-1];
v[i].pop_back();
edges_remain--;
}
if (d_i == 0){
disconnected = true;
break;
}
//cerr << "weighted_sum=" << weighted_sum << ", d_i=" << d_i << endl;
new_x[i] = weighted_sum / d_i;
up += new_x[i] * sqrt(d[i]);
}
double shift = up/down;
delta_x = 0.0;
for (int i = 0; i < n; i++){
new_x[i] -= shift;
delta_x += fabs(new_x[i] - x[i]);
//cerr << "new_x=" << new_x[i] << ", x=" << x[i] << ", gt=" << graph.x[i] << endl;
}
double* temp = new_x; new_x = x; x = temp;
//cerr << "delta_x=" << delta_x << endl;
}
//double mean_var = 0.0;
//for (int i = 0; i < n; i++){
// int d_i = 0.0;
// double weighted_sum = 0.0;
// remove_list.clear();
// vector<double> dist_diff_i;
// double mean = 0.0;
// for (vector<pair<double, int>>::const_iterator it_i = v[i].begin(); it_i != v[i].end(); it_i++){
// int j = it_i->second;
// double t_ij = it_i->first;
// //cerr << "i=" << i << ", j=" << j << ", t_ij=" << t_ij << endl;
// double diff_i = fabs(t_ij - (x[i] - x[j]));
// mean += diff_i;
// dist_diff_i.push_back(diff_i);
// }
// int L = v[i].size();
// mean /= L;
// double var_i = 0.0;
// for (int j = 0; j < L; j++){
// var_i += (dist_diff_i[j] - mean) * (dist_diff_i[j] - mean);
// }
// var_i /= L;
// mean_var += var_i;
//}
//mean_var /= n;
//double dist_mean = 0.0;
//for (int i = 0; i < dist.size(); i++){
// dist_mean += dist[i];
//}
//dist_mean /= dist.size();
//double variance = 0.0;
//for (int i = 0; i < dist.size(); i++){
// variance += (dist[i] - dist_mean) * (dist[i] - dist_mean);
//}
//variance /= dist.size();
//int cut_off_k = 0;
//double max_drop = -1e100;
//for (int i = 1; i < N/20; i++){
// double drop = stat[i-1] - stat[i];
// if (drop > max_drop){
// max_drop = drop;
// cut_off_k = i;
// }
//}
//if (iter <= 15){
// cut_offs.push_back(cut_off_k);
// sort(cut_offs.begin(), cut_offs.end());
// cut_off = cut_offs[cut_offs.size()/2];
//}
//count_inlier = 0;
//for (int i = 0; i < cut_off; i++){
// count_inlier += stat[i];
//}
//maintain and output
if (iter == 1){
threshold = max_diff;
}
if (threshold > max_diff){
threshold = max_diff;
}
threshold = threshold * decay;
double loss = linf_loss(n, x, graph.x);
//if (loss < min_loss){
// min_loss = loss;
// //critical = dist;
//}
//double outer_delta_x = 0.0;
//for (int i = 0; i < n; i++){
// outer_delta_x += fabs(x[i] - old_x[i]);
//}
//count_inlier = count_inlier*1.0/(cut_off+1);
//////////////////////////////////
//if (count_inlier > max_count){
// max_count = count_inlier;
// for (int i = 0; i < n; i++){
// best[i] = x[i];
// }
// count_down = 0;
//} else {
// count_down++;
//}
//fout << "k=" << iter << ", count_inlier=" << count_inlier << endl;
//for (int i = 0; i <= N/20; i++){
// fout << " " << stat[i];
//}
//fout << endl;
//////////////////////////test
fout << "iter=" << iter;
fout << ", #inner=" << inner;
fout << ", linf_loss=" << loss;
fout << ", l1_loss=" << l1_loss(n, x, graph.x);
fout << ", min_loss=" << min_loss;
fout << ", threshold=" << threshold;
fout << ", max_diff=" << max_diff;
//fout << ", variance=" << mean_var;
//fout << ", center=" << center;
//fout << ", count_inlier=" << count_inlier;
//fout << ", cut_off=" << cut_off;
//fout << ", cut_off_k=" << cut_off_k;
//fout << ", outer_delta_x=" << outer_delta_x;
fout << ", elapsed_time=" << (omp_get_wtime() - start_time);
fout << endl;
//for (int i = 0; i < dist.size(); i++){
// fout << dist[i] << " ";
//}
//fout << endl;
if (max_diff < stopping){
break;
}
/*if (fabs(outer_delta_x) < 1e-6){
count_down++;
} else {
count_down = 0;
}*/
//if (count_down >= 10){
// break;
//}
}
min_loss = linf_loss(n, x, graph.x);
//for (int i = 0; i < n; i++){
// fout << best[i] << endl;
//}
fout.close();
//delete stat;
delete d;
delete index;
delete x;
delete new_x;
//delete best;
return min_loss;
}
};
#endif
<file_sep>/src/artsquad.cpp
#include <iostream>
#include <vector>
#include <cstring>
#include <fstream>
#include <cmath>
#include <cassert>
#include "graph.h"
#include <algorithm>
#include <time.h>
#include <omp.h>
#include "Truncated_L2.h"
#include "CoordinateDescent.h"
using namespace std;
struct point{
double x, y;
};
int main(){
ifstream fin("sample.txt");
Graph graph;
int n;
int m;
fin >> n >> m;
graph.n = n;
graph.m = m;
graph.x = new double[n];
graph.x0 = new double[n];
graph.adj = new vector<pair<double, int>>[n];
cerr << "0" << endl;
point* p = new point[n];
for (int i = 0; i < n; i++){
fin >> p[i].x >> p[i].y;
graph.x[i] = p[i].y;
graph.x0[i] = 0.0;
}
cerr << "1" << endl;
point* edge = new point[m];
int* d = new int[n];
for (int e = 0; e < m; e++){
int i, j;
fin >> i >> j >> edge[e].x >> edge[e].y;
i--;
j--;
d[i]++;
d[j]++;
graph.adj[i].push_back(make_pair(edge[e].y,j));
graph.adj[j].push_back(make_pair(-edge[e].y,i));
}
cerr << "2" << endl;
for (int i = 0; i < n; i++){
if (d[i] == 0){
cerr << "i=" << i << endl;
}
}
cerr << "hey" << endl;
graph.normalize(n, d, graph.x);
CoordinateDescent CD(10000);
double ans_CD = CD.solve(graph, "dataI.CDY");
cout << "CD=" << ans_CD << endl;
Truncated_L2 TL2(10000, 0.8);
double ans_TL2 = TL2.solve(graph, "dataI.TL2Y");
cout << "TL2=" << ans_TL2 << endl;
}
<file_sep>/data/disco-bp/DLib/test_gradients.cpp
#include <DMatrix.h>
#include <DGaussianModel.h>
#include <DPlane.h>
#include <DistanceTransform.h>
#include <iomanip>
#include <DImageIO.h>
#include <DGaussianKernel.h>
using namespace std;
main(int argc, char *argv[])
{
try {
_DImage<unsigned char> img = LoadDImage(argv[1]);
_DPlane<double> plane;
change_type(img.get_luma_plane(), plane);
// compute x gradients
_DPlane<double> x_grad = plane.get_x_gradient();
_DPlane<double> y_grad = plane.get_y_gradient();
_DPlane<double> mag = sqrt(sqr(x_grad) + sqr(y_grad));
DMatrix cov(2,2);
cov=0;
cov[0][0] = 10;
cov[1][1] = 5;
_DPlane<double> conv_result = plane.convolve_gaussian(cov, 20);
_DPlane<unsigned char> result;
change_type(conv_result, result);
SaveDImage("mag.png", result);
} catch(string &str)
{ cout << str << endl;
}
}
<file_sep>/makefile
graph_types=[1, 2, 3]
folder=./results
draw:
matlab -nodesktop -r "draw_figure($(graph_types), '$(folder)'); exit;"
scp $(folder)/Graph_*.eps <EMAIL>:./public_html/figures/TranslationSync/$(folder)/
<file_sep>/data/disco-bp/DLib/FrequencyTransforms.cpp
/*#include <FrequencyTransforms.h>
template<class T>
DiscreteCosineTransform<T>::DiscreteCosineTransform(const DPoint &size, bool _normalize) : normalize(_normalize)
{
input = _DMatrix<double>(size.row(), size.col());
output = _DMatrix<double>(size.row(), size.col());
p = fftw_plan_r2r_2d(size.col(), size.row(), input[0], output[0], FFTW_REDFT10, FFTW_REDFT10, FFTW_ESTIMATE);
}
template<class T>
_DMatrix<T> DiscreteCosineTransform<T>::do_transform(const _DMatrix<T> &_input)
{
change_type(_input, input);
fftw_execute(p);
if(normalize)
output /= 2.0 * output.rows() * 2.0 * output.cols();
_DMatrix<T> result;
change_type(output, result);
return result;
}
template<class T>
DiscreteCosineTransform<T>::~DiscreteCosineTransform()
{
fftw_destroy_plan(p);
}
template<class T>
DiscreteRealFourierTransform<T>::DiscreteRealFourierTransform(const DPoint &_size, bool _normalize) : size(_size), normalize(_normalize)
{
input = _DMatrix<double>(size.row(), size.col());
output = _DComplexMatrix<double>(size.row(), size.col() / 2 + 1);
p = fftw_plan_dft_r2c_2d(size.row(), size.col(), input[0], reinterpret_cast<fftw_complex*>(output[0]), FFTW_ESTIMATE);
}
template<class T>
_DComplexMatrix<T> DiscreteRealFourierTransform<T>::do_transform(const _DMatrix<T> &_input)
{
change_type(_input, input);
fftw_execute(p);
_DComplexMatrix<T> result;
change_type(output, result);
return result;
}
template<class T>
DiscreteRealFourierTransform<T>::~DiscreteRealFourierTransform()
{
fftw_destroy_plan(p);
}
template<class T>
DiscreteInverseRealFourierTransform<T>::DiscreteInverseRealFourierTransform(const DPoint &_size, bool _normalize) : size(_size), normalize(_normalize)
{
input = _DComplexMatrix<double>(size.row(), size.col() / 2 + 1);
output = _DMatrix<double>(size.row(), size.col());
p = fftw_plan_dft_c2r_2d(size.row(), size.col(), reinterpret_cast<fftw_complex*>(input[0]), output[0], FFTW_ESTIMATE);
}
template<class T>
_DMatrix<T> DiscreteInverseRealFourierTransform<T>::do_transform(const _DComplexMatrix<T> &_input)
{
change_type(_input, input);
fftw_execute(p);
if(normalize)
output /= output.rows() * output.cols();
_DMatrix<T> result;
change_type(output, result);
return result;
}
template<class T>
DiscreteInverseRealFourierTransform<T>::~DiscreteInverseRealFourierTransform()
{
fftw_destroy_plan(p);
}
template<class T>
FFT_Convolution<T>::FFT_Convolution(const DPoint &_image_size, const DPoint &_kernel_size) : kernel_size(_kernel_size), image_size(_image_size)
{
new_size = kernel_size + image_size - DPoint(1,1);
fft = DiscreteRealFourierTransform<T>(new_size, true);
ifft = DiscreteInverseRealFourierTransform<T>(new_size, true);
}
template<class T>
void FFT_Convolution<T>::compute_image_fft(const _DPlane<T> &in_image)
{
assert(in_image.size() == image_size);
_DPlane<T> padded_input(new_size.row(), new_size.col());
padded_input = 0;
padded_input.set_submatrix(DPoint(0,0), in_image);
_input_fft = fft.do_transform(padded_input);
}
template<class T>
void FFT_Convolution<T>::compute_kernel_fft(const _DPlane<T> &kernel)
{
assert(kernel.size() == kernel_size);
_DPlane<T> padded_kernel(new_size.row(), new_size.col());
padded_kernel = 0;
padded_kernel.set_submatrix(DPoint(0,0), kernel.rotate_180());
_kernel_fft = fft.do_transform(padded_kernel);
}
template<class T>
_DPlane<T> FFT_Convolution<T>::do_transform()
{
_DComplexMatrix<T> product = pointwise_multiply(_input_fft, _kernel_fft);
_DPlane<T> p = (ifft.do_transform(product)).extract(DRect(kernel_size/2, kernel_size/2 + image_size - DPoint(1,1)));
return p;
}
template<class T>
_DPlane<T> FFT_Convolution<T>::do_transform(const _DPlane<T> &in_image, const _DPlane<T> &kernel)
{
compute_image_fft(in_image);
compute_kernel_fft(kernel);
return do_transform();
}
#define DECLARE(x) \
template class FFT_Convolution<x>; \
template class DiscreteInverseRealFourierTransform<x>; \
template class DiscreteRealFourierTransform<x>; \
template class DiscreteCosineTransform<x>;
DECLARE(double);
DECLARE(float);
*/
<file_sep>/data/ArtsQuad_dataset_results/preprocess.cpp
#include <iostream>
#include <fstream>
#include <vector>
#include <cassert>
#include <cmath>
using namespace std;
struct point{
double f, k1, k2;
vector<double> r;
vector<double> t;
};
struct comparison{
int i, j;
vector<double> t_ij;
};
inline vector<double> transpose(int n, int m, vector<double> a){
vector<double> ans;
for (int j = 0; j < m; j++){
for (int i = 0; i < n; i++){
ans.push_back(a[i*m+j]);
}
}
return ans;
}
//compute <a, b>, assume a.size() > b.size()
inline vector<double> multiply(vector<double> a, vector<double> b){
vector<double> ans;
int n = a.size(), m = b.size();
assert(n % m == 0);
auto it_a = a.cbegin();
for (int i = 0; i < n; i+=m){
double dot_product = 0.0;
auto it_b = b.cbegin();
for (auto it_b = b.cbegin(); it_b != b.cend(); it_b++, it_a++){
dot_product += *it_b*(*it_a);
}
ans.push_back(dot_product);
}
return ans;
}
//compute a - b, assume a.size() > b.size()
inline vector<double> subtract(vector<double>& a, vector<double>& b){
vector<double> ans;
int n = a.size(), m = b.size();
assert(n == m);
auto it_a = a.cbegin();
for (auto it_b = b.cbegin(); it_b != b.cend(); it_b++, it_a++){
ans.push_back((*it_a)-(*it_b));
}
return ans;
}
void get_cameras(char* filename, vector<point>& ans){
ifstream fin(filename);
int n, m;
fin >> n >> m;
for (int i = 0; i < n; i++){
point camera;
fin >> camera.f >> camera.k1 >> camera.k2;
for (int j = 0; j < 9; j++){
double r_j;
fin >> r_j;
camera.r.push_back(r_j);
}
for (int j = 0; j < 3; j++){
double t_j;
fin >> t_j;
camera.t.push_back(t_j);
}
ans.push_back(camera);
}
fin.close();
}
void get_relative_translations(char* filename, vector<comparison>& rel){
ifstream fin(filename);
int n, m;
fin >> n >> m;
for (int e = 0; e < m; e++){
comparison comp;
fin >> comp.i >> comp.j;
for (int k = 0; k < 9; k++){
double temp;
fin >> temp;
}
for (int k = 0; k < 3; k++){
double t_ij_k;
fin >> t_ij_k;
comp.t_ij.push_back(t_ij_k);
}
for (int k = 0; k < 4; k++){
double temp;
fin >> temp;
}
rel.push_back(comp);
}
fin.close();
}
inline void print_vec(vector<double> v){
for (auto it = v.cbegin(); it != v.cend(); it++){
cout << *it << " ";
}
cout << endl;
}
int main(int argc, char** argv){
if (argc <= 1){
cerr << "./preprocess [camera] [relative translation]" << endl;
return -1;
}
vector<point> camera;
get_cameras(argv[1], camera);
int n = camera.size();
vector<comparison> rel;
get_relative_translations(argv[2], rel);
double loss = 0.0;
double norm = 0.0;
for (auto it = rel.cbegin(); it != rel.cend(); it++){
int i = it->i, j = it->j;
vector<double> t_ij = it->t_ij;
vector<double>& r_i = camera[i].r;
vector<double>& t_i = camera[i].t;
vector<double>& t_j = camera[j].t;
vector<double> rhs = multiply(transpose(3,3, r_i), subtract(t_j, t_i));
double norm_l = 0.0, norm_r = 0.0;
double norm_rhs = 0.0;
for (int k = 0; k < 3; k++){
norm_r += rhs[k]*t_ij[k];
norm_l += t_ij[k]*t_ij[k];
norm_rhs += rhs[k]*rhs[k];
}
norm += sqrt(norm_rhs);
double lambda = 1.0;
if (fabs(norm_l) > 1e-20){
lambda = norm_r/norm_l;
//print_vec(t_ij);
//print_vec(rhs);
} else {
assert(false);
}
double dist = 0.0;
for (int k = 0; k < 3; k++){
double diff = t_ij[k]*lambda-rhs[k];
dist += diff*diff;
}
loss += sqrt(dist);
}
cout << "loss=" << loss;
cout << ", avg_loss=" << loss/rel.size();
cout << ", norm=" << norm;
cout << ", avg_norm=" << norm/rel.size();
cout << endl;
return 0;
}
<file_sep>/src/cdf.cpp
#include <iostream>
#include <fstream>
#include <vector>
#include <cmath>
#include <algorithm>
using namespace std;
int main(){
ifstream finx("TL2.x");
int n = 6001;
double* x = new double[n];
for (int i = 0; i < n; i++){
finx >> x[i];
}
finx.close();
ifstream finy("TL2.y");
double* y = new double[n];
for (int i = 0; i < n; i++){
finy >> y[i];
}
finy.close();
int m = 140381;
ifstream fin("sample.txt");
fin >> n >> m;
for (int i = 0; i < n; i++){
double t;
fin >> t;
fin >> t;
}
vector<double> dists;
for (int e = 0; e < m; e++){
int i, j;
double t_ij_x;
double t_ij_y;
fin >> i >> j >> t_ij_x >> t_ij_y;
i--; j--;
dists.push_back(fabs(t_ij_x-(x[i]-x[j])));
dists.push_back(fabs(t_ij_y-(y[i]-y[j])));
}
fin.close();
sort(dists.begin(), dists.end());
ofstream fout("TL2.dist");
for (int e = 0; e < m*2; e++){
fout << dists[e] << endl;
}
fout.close();
delete x;
}
<file_sep>/src/graph3.py
import sys
import numpy
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import os.path
from settings import *
import shapely
from shapely.geometry import Polygon
from descartes import PolygonPatch
def plot_coords(ax, ob):
x, y = ob.xy
ax.plot(x, y, '+', color='grey')
folder = 'uniform_n1p2/graph3_final/'
print("drawing figure with data under %s" % folder)
min_TL2 = []
median_TL2 = []
max_TL2 = []
min_CD = []
median_CD = []
max_CD = []
ratios = []
for ratio in range(100):
ml_TL2 = []
time_TL2 = []
ml_CD = []
time_CD = []
for eid in range(100):
name_TL2 = folder+'/ratio'+str(ratio)+'_'+str(eid)+'.TL2'
name_CD = folder+'/ratio'+str(ratio)+'_'+str(eid)+'.CD'
if os.path.isfile(name_TL2) and os.path.isfile(name_CD):
if len(ratios) == 0 or ratios[-1] < ratio/100.0-0.005:
ratios.append(ratio/100.0)
with open(name_TL2) as fin:
lines = fin.readlines()
min_loss=1e100
time=0.0
for line in lines:
ml = 0.0
t = 0.0
for token in line.strip().split(', '):
if token.startswith('min_loss'):
ml = float(token.split('=')[1])
if token.startswith('elapsed'):
t = float(token.split('=')[1])
if min_loss > ml:
min_loss = ml
time = t
if min_loss < 1e-5:
break
ml_TL2.append(min_loss)
time_TL2.append(time)
with open(name_CD) as fin:
lines = fin.readlines()
min_loss=1e100
time=0.0
for line in lines:
ml = 0.0
t = 0.0
for token in line.strip().split(', '):
if token.startswith('min_loss'):
ml = float(token.split('=')[1])
if token.startswith('elapsed'):
t = float(token.split('=')[1])
if min_loss > ml:
min_loss = ml
time = t
if min_loss < 1e-5:
break
ml_CD.append(min_loss)
time_CD.append(time)
if len(ml_TL2) > 0 and len(ml_CD) > 0:
min_TL2.append(min(ml_TL2))
median_TL2.append(numpy.median(ml_TL2))
max_TL2.append(max(ml_TL2))
min_CD.append(min(ml_CD))
median_CD.append(numpy.median(ml_CD))
max_CD.append(max(ml_CD))
ratios = [1.0-ratio for ratio in ratios]
plots = {'min CD':min_CD, 'median CD':median_CD, 'max CD':max_CD,
'min TranSync':min_TL2, 'median TranSync':median_TL2, 'max TranSync':max_TL2}
down_CD = [(x, y) for (x, y) in zip(ratios, min_CD)]
up_CD = [(x, y) for (x, y) in zip(ratios, max_CD)]
up_CD.reverse()
down_TL2 = [(x, y) for (x, y) in zip(ratios, min_TL2)]
up_TL2 = [(x, y) for (x, y) in zip(ratios, max_TL2)]
up_TL2.reverse()
area_1 = Polygon(down_CD+up_CD)
area_2 = Polygon(down_TL2+up_TL2)
solution = area_1.intersection(area_2)
fig, ax = plt.subplots()
#plot_coords(ax, area_1.exterior)
patch = PolygonPatch(area_1, facecolor=colors['min CD'],
edgecolor=colors['min CD'], alpha=1.0)
ax.add_patch(patch)
#plot_coords(ax, area_2.exterior)
patch = PolygonPatch(area_2, facecolor=colors['min TranSync'],
edgecolor=colors['min TranSync'], alpha=1.0)
ax.add_patch(patch)
#plot_coords(ax, solution.exterior)
patch = PolygonPatch(solution, facecolor='r', edgecolor='r')
ax.add_patch(patch)
for label in ['min CD', 'median CD', 'max CD', 'min TranSync',
'median TranSync','max TranSync']:
ax.plot(ratios, plots[label], color=colors[label], label=label,
linestyle=linestyles[label], linewidth=linewidths[label],
marker=markers[label])
#ax.fill_between(ratios, min_CD, max_CD, facecolor=colors['median_CD'], interpolate=True)
#ax.fill_between(ratios, min_TL2, max_TL2, facecolor=colors['median_TL2'], interpolate=True)
legend = ax.legend(loc=(0.03, 0.03), shadow=True, fontsize=15)
plt.title('Graph $G_{sr}$', fontsize=40)
plt.xlabel('$p$', fontsize=25)
plt.ylabel('$\|x^*-x^{gt}\|_{\infty}$', fontsize=25)
plt.axis([0.01, 1.0, 0, 1.8])
plt.savefig('graph3.eps')
<file_sep>/src/process.py
import sys
with open(sys.argv[1], 'r') as fin:
lines = fin.readlines()
strings = {0:[], 1:[], 2:[], 3:[]}
for count, line in enumerate(lines):
print line
a = [float(token.split('=')[-1].strip().lstrip('(').rstrip(')')) for token in line.split(',')]
p = 1.0-a[0]
if count >= 8:
sigma = 0.04
else:
sigma = 0.01
ID = (count % 8) / 2
if ID == 0:
name = '$G_{dr}$'
if ID == 1:
name = '$G_{di}$'
if ID == 2:
name = '$G_{sr}$'
if ID == 3:
name = '$G_{si}$'
ans = ''
ans += '%s & %.1f & %.2f ' % (name, p, sigma, )
for i in range(4, 7):
if a[i] < a[i-3]:
ans += '& \\textbf{%.2fe-2}' % (a[i]*100)
else:
ans += '& %.2fe-2' % (a[i]*100)
if a[8] < a[7]:
ans += '& \\textbf{%.3fs}' % a[8]
else:
ans += '& %.3fs' % a[8]
for i in range(1, 4):
if a[i] < a[i+3]:
ans += '& \\textbf{%.2fe-2}' % (a[i]*100)
else:
ans += '& %.2fe-2' % (a[i]*100)
if a[7] < a[8]:
ans += '& \\textbf{%.3fs}' % a[7]
else:
ans += '& %.3fs' % a[7]
ans += '\\\\'
strings[ID].append(ans)
for j in range(4):
string = strings[j]
print(string[1])
print(string[3])
print(string[0])
print(string[2])
print('\\hline')
<file_sep>/bar.py
import numpy as np
import matplotlib.pyplot as plt
def draw(name, dictg):
CD_median = dictg['CD_median']
CD_min = dictg['CD_min']
CD_max = dictg['CD_max']
TL_median = dictg['TL_median']
TL_min = dictg['TL_min']
TL_max = dictg['TL_max']
ps = dictg['ps']
CD_median = [x/max_x for x, max_x in zip(CD_median, CD_max)]
CD_min = [x/max_x for x, max_x in zip(CD_min, CD_max)]
TL_median = [x/max_x for x, max_x in zip(TL_median, CD_max)]
TL_min = [x/max_x for x, max_x in zip(TL_min, CD_max)]
TL_max = [x/max_x for x, max_x in zip(TL_max, CD_max)]
CD_max = [1.0 for x in CD_max]
CD_err_up = []
for median_i, max_i in zip(CD_median, CD_max):
CD_err_up.append(max_i - median_i)
CD_err_down = []
for median_i, min_i in zip(CD_median, CD_min):
CD_err_down.append(median_i - min_i)
CD_err = [CD_err_down, CD_err_up]
TL_err_up = []
for median_i, max_i in zip(TL_median, TL_max):
TL_err_up.append(max_i - median_i)
TL_err_down = []
for median_i, min_i in zip(TL_median, TL_min):
TL_err_down.append(median_i - min_i)
TL_err = [TL_err_down, TL_err_up]
error_kw = dict(lw=4, capsize=5, capthick=3)
N = 4
#N = 5
#men_means = (20, 35, 30, 35, 27)
#men_std = [[2, 3, 4, 1, 2], [0,0,0,0,0]]
ind = np.arange(N) # the x locations for the groups
width = 0.40 # the width of the bars
fig, ax = plt.subplots()
#rects1 = ax.bar(ind, men_means, width, color='r', yerr=men_std)
rects1 = ax.bar(ind, CD_median, width, color='y', yerr=CD_err,
error_kw=error_kw)
#women_means = (25, 32, 34, 20, 25)
#women_std = (3, 5, 2, 3, 3)
#rects2 = ax.bar(ind + width, women_means, width, color='y', yerr=women_std)
rects2 = ax.bar(ind + width, TL_median, width, color='r', yerr=TL_err,
error_kw=error_kw)
# add some text for labels, title and axes ticks
ax.set_xlabel('$\{p, \sigma\}$', fontsize=20)
ax.set_ylabel('Normalized Error (Min, Median, Max)', fontsize=20)
ax.set_ylim(0, 1.2)
ax.set_title(name, fontsize=50)
ax.set_xticks(ind + width / 2)
ax.set_xticklabels((ps[0], ps[1], ps[2], ps[3]))
ax.legend((rects1[0], rects2[0]), ('$\ell_1$ min', 'TranSync'))
def autolabel(rects):
"""
Attach a text label above each bar displaying its height
"""
for rect in rects:
height = rect.get_height()
ax.text(rect.get_x() + rect.get_width()/2.0, 1.05*height, '', ha='center', va='bottom')
autolabel(rects1)
autolabel(rects2)
plt.show()
dict_dr = {
'CD_min':[0.95e-2, 3.87e-2, 0.3e-2, 1.19e-2],
'CD_median':[1.28e-2, 4.73e-2, 0.34e-2, 1.35e-2],
'CD_max':[11.40e-2, 18.59e-2, 0.41e-2, 1.78e-2],
'TL_min':[0.30e-2, 1.04e-2, 0.16e-2, 0.57e-2],
'TL_median':[0.37e-2, 1.22e-2, 0.18e-2, 0.70e-2],
'TL_max':[0.60e-2, 1.59e-2, 0.28e-2, 0.87e-2],
'ps':['0.4, 0.01', '0.4, 0.04', '0.8, 0.01', '0.8, 0.04']
}
draw('Dense Regular', dict_dr)
dict_di = {
'CD_min':[2.17e-2, 5.46e-2, 0.34e-2, 1.39e-2],
'CD_median':[17.59e-2, 19.40e-2, 0.42e-2, 1.66e-2],
'CD_max':[50.51e-2, 53.88e-2, 0.58e-2, 2.30e-2],
'TL_min':[0.39e-2, 1.25e-2, 0.17e-2, 0.68e-2],
'TL_median':[0.52e-2, 1.55e-2, 0.24e-2, 0.86e-2],
'TL_max':[0.93e-2, 2.42e-2, 0.33e-2, 1.16e-2],
'ps':['0.4, 0.01', '0.4, 0.04', '0.8, 0.01', '0.8, 0.04']
}
draw('Dense Irregular', dict_di)
dict_sr = {
'CD_min':[0.58e-2, 2.35e-2, 0.45e-2, 1.84e-2],
'CD_median':[0.65e-2, 2.62e-2, 0.5e-2, 1.99e-2],
'CD_max':[0.79e-2, 3.54e-2, 0.58e-2, 2.36e-2],
'TL_min':[0.38e-2, 1.35e-2, 0.28e-2, 1.14e-2],
'TL_median':[0.45e-2, 1.55e-2, 0.32e-2, 1.29e-2],
'TL_max':[0.61e-2, 2.05e-2, 0.39e-2, 1.60e-2],
'ps':['0.8, 0.01', '0.8, 0.04', '1.0, 0.01', '1.0, 0.04']
}
draw('Sparse Regular', dict_sr)
dict_si = {
'CD_min':[0.72e-2, 2.88e-2, 0.53e-2, 2.24e-2],
'CD_median':[0.85e-2, 3.38e-2, 0.62e-2, 2.52e-2],
'CD_max':[75.85e-2, 11.48e-2, 0.77e-2, 3.12e-2],
'TL_min':[0.52e-2, 1.79e-2, 0.37e-2, 1.44e-2],
'TL_median':[0.64e-2, 2.16e-2, 0.43e-2, 1.72e-2],
'TL_max':[1.10e-2, 3.59e-2, 0.57e-2, 2.47e-2],
'ps':['0.8, 0.01', '0.8, 0.04', '1.0, 0.01', '1.0, 0.04']
}
draw('Sparse Irregular', dict_si)
<file_sep>/data/disco-bp/DLib/DistanceTransform.cpp
//
// DLib: A simple image processing library.
//
// <NAME>, 2003-2005
// <EMAIL>
//
// Please do not redistribute this code.
//
//
//
//
#include <stdlib.h>
#include <math.h>
#include <vector>
#include "DistanceTransform.h"
using namespace std;
/* Implementation of fast squared Euclidean distance transform algorithm
using amortized algorithm for lower envelope of quadratics.
For a description see
www.cs.cornell.edu/~dph/matchalgs/iccv2003-tutorial.pdf */
// hacked for C++ by crandall, 9/2003
template<class T>
DistanceTransform_2D<T>::DistanceTransform_2D(const _DMatrix<T> &sigma, bool _old_algo) : old_algo(_old_algo)
{
rotate = true;
if(sigma.is_diagonal())
rotate = false;
if(rotate)
{
_DMatrix<T> eig_vectors;
_DMatrix<T> eig_values = sigma.eigen(eig_vectors);
if(eig_vectors[1][1] ==0)
angle=0;
else
angle = -atan(eig_vectors[0][1]/eig_vectors[1][1]);
scaling_x = 1/eig_values[0][1];
scaling_y = 1/eig_values[0][0];
}
else
{
scaling_x = 1/sigma[1][1];
scaling_y = 1/sigma[0][0];
}
}
template<class T>
inline T square(T n)
{
return n*n;
}
/* dt helper function */
template<class T>
void dt(T *src, T *dst, int s1, int s2, int d1, int d2, T scale, T *lut, int col_count, int col_off) {
// if (d2 >= d1)
{
int d = (d1+d2) >> 1;
int s = s1;
for (int p = s1; p <= s2; p++)
// if (src[s] + square(s-d) * scale> src[p] + square(p-d) * scale)
if (src[s] + lut[s-d]> src[p] + lut[p-d])
s = p;
// dst[d*col_count+col_off] = src[s] + square(s-d) * scale;
dst[d*col_count+col_off] = src[s] + lut[s-d];
if(d-1 >= d1)
dt(src, dst, s1, s, d1, d-1, scale, lut, col_count, col_off);
if(d2>=d+1)
dt(src, dst, s, s2, d+1, d2, scale, lut, col_count, col_off);
}
}
template<class T>
_DPlane<T> DistanceTransform_2D<T>::do_transform(const _DPlane<T> & in_im, bool need_locations, T scaling_x, T scaling_y)
{
if(old_algo)
{
_DPlane<T> im = in_im;
// records closest pixel
if(need_locations)
row_locations = _DPlane< int >(in_im.rows(), in_im.cols()), col_locations = _DPlane< int >(in_im.rows(), in_im.cols());
int width = im.cols();
int height = im.rows();
int k;
float s;
float sp;
int x, y;
int *z = (int *)malloc(sizeof(int)*(max(width, height)+1));
int *v = (int *)malloc(sizeof(int)*(max(width, height)));
float *vref = (float *)malloc(sizeof(float)*(max(width, height)));
if ((z == NULL) || (v == NULL) || (vref == NULL)) {
assert(0);
}
float *lut1 = new float[max(height,width)*2+1];
float sx_inv = 1.0/scaling_x;
for(int i=-width,j=0; i<width; i++, j++)
lut1[j] = 1.0/(2.0 * i) * sx_inv;
/* do x transform */
for (y = 0; y < height; y++) {
k = 0; /* Number of boundaries between parabolas */
z[0] = 0; /* Indexes of locations of boundaries,
order by increasing x */
z[1] = width;
v[0] = 0; /* Indexes of locations of visible parabola bases,
ordered by increasing x */
T *im_y=im[y];
int *closest_row_y, *closest_col_y;
if(need_locations)
closest_row_y = row_locations[y], closest_col_y = col_locations[y];
int v_k = v[k], v_k_2 = v[k]*v[k];
for (x = 1; x < width; x++)
{
// # of times here = 1.0
do {
/* compute Vornoi border: intersection of parabola at x
with rightmost currently visible parabola */
// s = ((im[y][x] + x*x) - (im[y][v[k]] + v[k]*v[k])) /
// (2 * (x - v[k]));
// s = ((im_y[x] + scaling_x*x*x) - (im_y[v[k]] + scaling_x*v[k]*v[k])) /
// (2 * scaling_x * (x - v[k]));
// count 1.1
s = (im_y[x] - im_y[v_k] + scaling_x*(x*x - v_k_2)) *
lut1[x-v_k+width]; // * sx_inv;
// / (2 * scaling_x * (x - v_k));
sp = ceil(s); // floor(ceil(s)); // OPTME
/* case one: intersection is to the right of the array,
so this parabola is not visible (and nothing to do) */
if (sp >= width)
{
// count 0.8
break;
}
/* case two: intersection is at larger x than rightmost current
intersection, so this parabola is visible on the right (add
it to the end) */
if (sp > z[k])
{
z[k+1] = int(sp);
z[k+2] = width;
v[k+1] = x;
k++;
v_k = v[k];
v_k_2 = v_k*v_k;
// count 0.2
break;
}
/* case three: intersection is at smaller x than the
rightmost current intersection, so this parabola hides the
rightmost parabola (remove the rightmost parabola, if there
are still remaining potentially visible parabolas iterate to
complete the addition of this parabola). */
if (k == 0)
{
v[k] = x;
v_k = v[k];
v_k_2 = v_k*v_k;
// count 0.002
break;
}
else
{
z[k] = width;
k--;
v_k = v[k];
v_k_2 = v_k*v_k;
// count 0.09
}
} while (1);
}
/* compute transform values from visible parabolas */
/* get value of input image at each parabola base */
for (x = 0; x <= k; x++)
{
vref[x] = im_y[v[x]];
}
k = 0;
/* iterate over pixels, calculating value for closest parabola */
v_k=v[k];
if(need_locations)
for (x = 0; x < width; x++)
{
if (x == z[k+1]) k++, v_k=v[k];
im_y[x] = vref[k] + (v_k-x)*(v_k-x) * scaling_x;
closest_row_y[x] = y, closest_col_y[x] = v_k;
}
else
for (x = 0; x < width; x++)
{
if (x == z[k+1]) k++, v_k=v[k];
im_y[x] = vref[k] + (v_k-x)*(v_k-x) * scaling_x;
}
}
/* do y transform - analogous computation in y-direction applied to
result of x-transform */
float sy_inv = 1.0/scaling_y;
for(int i=-height,j=0; i<height; i++, j++)
lut1[j] = 1.0/(2.0 * i) * sy_inv;
_DPlane< int > closest2_row(row_locations), closest2_col(col_locations);
for (x = 0; x < width; x++)
{
k = 0;
z[0] = 0;
z[1] = height;
v[0] = 0;
int v_k = v[k];
int v_k_2 = v[k]*v[k];
T im_vk_x = im[v_k][x];
T *im_x_cp=im[1]+x;
for (y = 1; y < height; y++, im_x_cp+=width)
{
do {
/* compute vornoi border */
float s1 = *im_x_cp;
s1+=(y*y-v_k_2)*scaling_y;
s1 -= im_vk_x; // OPTME
// float s2 = (2 * (y - v_k) * scaling_y);
float s=s1*lut1[y-v_k+height]; //*sy_inv;
sp = ceil(s); // OPTME get rid of ceil
/* case one */
if (sp >= height)
break;
/* case two */
if (sp > z[k]) {
z[k+1] = int(sp);
z[k+2] = height;
v[k+1] = y;
k++;
v_k = v[k];
v_k_2 = v_k*v_k;
im_vk_x = im[v_k][x];
break;
}
/* case three */
if (k == 0) {
v[0] = y;
v_k = v[k];
v_k_2 = v_k*v_k;
im_vk_x = im[v_k][x];
break;
} else {
z[k] = height;
k--;
v_k = v[k];
v_k_2 = v_k*v_k;
im_vk_x = im[v_k][x];
}
} while (1);
}
for (y = 0; y <= k; y++)
vref[y] = im[v[y]][x];
k = 0;
v_k = v[k];
if(need_locations)
{
int *closest2_row_x = closest2_row[0]+x, *closest2_col_x = closest2_col[0]+x;
int *closest_in_row_x = row_locations[v_k]+x, *closest_in_col_x = col_locations[v_k]+x;
for (y = 0; y < height; y++, closest2_row_x+=width, closest2_col_x+=width)
{
if (y == z[k+1])
{
k++;
closest_in_row_x += width*(v[k]-v_k);
closest_in_col_x += width*(v[k]-v_k);
v_k = v[k];
}
im[y][x] = vref[k] + (v_k-y)*(v_k-y)*scaling_y;
*closest2_row_x = *closest_in_row_x;
*closest2_col_x = *closest_in_col_x;
}
}
else
{
T *im_x_ptr = im[0]+x;
for (y = 0; y < height; y++, im_x_ptr += width)
{
if (y == z[k+1])
{
k++;
v_k = v[k];
}
*im_x_ptr = vref[k] + (v_k-y)*(v_k-y)*scaling_y; // OPTME ::: get rid of im[y]
}
}
}
row_locations = closest2_row, col_locations = closest2_col;
free(z);
free(v);
free(vref);
delete[] lut1;
return im;
}
else
{
assert(!need_locations);
int lut_pivot = max(in_im.rows(), in_im.cols())+5;
int lut_size = lut_pivot * 2;
// static T *lut=0;
// if(!lut)
T *lut = new T[lut_size];
for(int i=0; i<lut_size; i++)
lut[i] = square(T(i)-lut_pivot) * scaling_x;
_DPlane<T> result(in_im.cols(), in_im.rows());
for(int i=0; i<in_im.rows(); i++)
dt(in_im[i], result[0], 0, in_im.cols()-1,0, in_im.cols()-1, scaling_x, lut+lut_pivot, in_im.rows(), i);
for(int i=0; i<lut_size; i++)
lut[i] = square(T(i)-lut_pivot) * scaling_y;
// result=result.transpose();
_DMatrix<T> result2(in_im.rows(), in_im.cols());
for(int i=0; i<in_im.cols(); i++)
dt(result[i], result2[0], 0, in_im.rows()-1,0, in_im.rows()-1, scaling_y, lut + lut_pivot, in_im.cols(), i);
delete[] lut;
return result2;//.transpose();
}
}
// distance transform, with arbitrary (i.e. possibly non-diagonal) covariance matrix
template<class T>
_DPlane<T> DistanceTransform_2D<T>::do_transform(const _DPlane<T> & orig_im, bool need_locations)
{
_DPlane<T> in_im;
if(rotate)
in_im = orig_im.rotate_image(-angle);
else
in_im = orig_im;
_DPlane<T> dt_result = do_transform(in_im, need_locations, scaling_x, scaling_y);
if(rotate)
{
_DPlane<T> I4 = dt_result.rotate_image(angle);
DPoint p1 = orig_im.size();
DPoint p2 = I4.size();
DPoint new_half = in_im.size() / 2;
DPoint pp1 = (p2 - p1) / 2;
DPoint pp2 = (in_im.size() - p1) / 2;
dt_result = I4.extract(DRect(pp1, pp1+p1-DPoint(1,1)));
// fix closest_rows, closest_cols here (to compensate for rotation)
if(need_locations)
{
row_locations = row_locations.rotate_image(angle);
col_locations = col_locations.rotate_image(angle);
row_locations = row_locations.extract(DRect(pp1, pp1+p1-DPoint(1,1)));
col_locations = col_locations.extract(DRect(pp1, pp1+p1-DPoint(1,1)));
double cos_ = cos(-angle);
double sin_ = sin(-angle);
for(int i=0; i<p1.row(); i++)
{
int* closest_row = row_locations[i], *closest_col = col_locations[i];
int c1 = p1.col();
for(int j=0; j<c1; j++)
{
DPoint old = DPoint(closest_row[j], closest_col[j]) - new_half;
DPoint new_pt(int(cos_ * old.row() + sin_ * old.col()), int(-sin_ * old.row() + cos_ * old.col()));
DPoint pt = new_pt + new_half - pp2;
closest_row[j] = pt.row(), closest_col[j] = pt.col();
}
}
}
}
return dt_result;
}
#define DECLARE(x) \
template class DistanceTransform_2D<x>;
DECLARE(double);
DECLARE(float);
<file_sep>/data/disco-bp/bp-trans.cpp
#include "DImage.h"
#include <vector>
#include <istream>
#include <fstream>
//#include <io.h>
#include <DistanceTransform.h>
#include <algorithm>
#include <numeric>
#include <map>
#include <DImageIO.h>
#include <DrawText.h>
#include <ext/hash_set>
#include "bp-trans-common.h"
#include <iomanip>
#include <DProfile.h>
#ifdef HADOOP_VERSION
#include "hadoop/Pipes.hh"
#include "hadoop/TemplateFactory.hh"
#include "hadoop/StringUtils.hh"
#include <hadoop_utils.h>
#endif
using namespace std;
// input files
char *pairwise_file_g = 0, *unary_file_g = 0;
// # of BP iterations to run
int max_ii_g = 100;
// relative weighting of unary potentials
double unary_weighting_g = 100;
// truncation points for unary, pairwise costs
double pair_trunc_g = 10, prior_trunc_g = 10;
// ground truth file
char *gt_g = 0;
// if nonzero, initialize unary potentials based on ground truth. value indicates
// fraction of nodes that should have unary potentials.
double known_from_gt_g = 0;
// hacks to prevent collapsing to trivial solution (all cameras at same point)
double anticollapse_g = 0, anticollapse_penalty_g = 0;
// only use high-confidence geotags for initializing unary potentials
bool geo_highconf_only_g = false;
// specifies whether to view pairwise constraints as rays (false) or lines (true)
bool bidir_g = true;
// set >1 to enable "banding", which improves the accuracy of the objective function
// estimates.
int bands_g = 1;
// save memory (but increase compute time) by not caching priors
bool precompute_priors_off_g = false;
// set to true to disable removing cut edges
bool no_remove_cut_edges_g = false;
// For debugging...
double stretch_g = 4;
int bufsz_g = 4000;
int node_g = -1;
char *rescore_g = 0;
int thread_count_g = 1;
bool dump_messages_g = false;
// translate entire coordinate frame
DPoint translate_g(0,0);
// set to true to estimate size of label space from geotags
bool est_label_space_g = false;
// size of label space per dimension
int bins_g = 201;
// selects between different gt file formats
bool gt_trans_only_g = false;
int GRID_COUNT2_g = 71;
double DIST_GRAN2_g = 0.08;
using namespace __gnu_cxx;
using namespace std;
void parse_opts(int argc, char *argv[])
{
int ii=1;
for(ii=1; ii<argc; ii++)
{
if(!strcmp(argv[ii], "--iters"))
max_ii_g = atoi(argv[++ii]);
else if(!strcmp(argv[ii], "--rescore"))
rescore_g = argv[++ii];
else if(!strcmp(argv[ii], "--unweight"))
unary_weighting_g = atof(argv[++ii]);
else if(!strcmp(argv[ii], "--bands"))
bands_g = atoi(argv[++ii]);
else if(!strcmp(argv[ii], "--gt"))
gt_g = argv[++ii];
else if (!strcmp(argv[ii], "--gtformat")) {
ii++;
if (!strcmp(argv[ii], "trans"))
gt_trans_only_g = true;
else if (!strcmp(argv[ii], "rottrans"))
gt_trans_only_g = false;
else {
cerr << "unknown ground truth file format " << argv[ii] << endl;
exit(1);
}
}
else if(!strcmp(argv[ii], "--nocutedgefilter"))
no_remove_cut_edges_g = true;
else if(!strcmp(argv[ii], "--threads"))
thread_count_g = atoi(argv[++ii]);
else if(!strcmp(argv[ii], "--knownfromgt"))
known_from_gt_g = atof(argv[++ii]);
else if(!strcmp(argv[ii], "--node"))
node_g = atoi(argv[++ii]);
else if(!strcmp(argv[ii], "--pairtrunc"))
pair_trunc_g = atof(argv[++ii]);
else if(!strcmp(argv[ii], "--priortrunc"))
prior_trunc_g = atof(argv[++ii]);
else if(!strcmp(argv[ii], "--bidir"))
bidir_g = false;
else if(!strcmp(argv[ii], "--highconf"))
geo_highconf_only_g = true;
else if(!strcmp(argv[ii], "--png"))
{
stretch_g = atof(argv[++ii]);
bufsz_g = atoi(argv[++ii]);
}
else if(!strcmp(argv[ii], "--anticollapse"))
{
anticollapse_g = atof(argv[++ii]);
anticollapse_penalty_g = atof(argv[++ii]);
}
else if(!strcmp(argv[ii], "--translate"))
{
int row = atoi(argv[++ii]);
int col = atoi(argv[++ii]);
translate_g = DPoint(row, col);
}
else if(!strcmp(argv[ii], "--estimatelabelspace"))
{
est_label_space_g = true;
bins_g = atoi(argv[++ii]);
assert(bins_g % 2 == 1);
}
else if(!strcmp(argv[ii], "--labelspace"))
{
GRID_COUNT2_g = atoi(argv[++ii]); //101; // 21; // 101; // 41; // 131; // 131 breaks it -- WHY????
DIST_GRAN2_g = atof(argv[++ii]); // 0.15; // 0.025; // 3.0; // 7.0; // 0.1;
assert(GRID_COUNT2_g % 2 == 1);
}
else if(!strcmp(argv[ii], "--dumpmessages"))
dump_messages_g = true;
else
break;
}
if(ii < argc)
pairwise_file_g = argv[ii++];
if(ii < argc)
unary_file_g = argv[ii++];
cerr << pairwise_file_g << " "<< unary_file_g << endl;
if(argc != ii)
{
cerr << "bad commandline options." << endl;
exit(1);
}
}
void *bp_thread(void *_p)
{
BPtrans_Params *p = (BPtrans_Params *)_p;
p->inf->do_iter_edge_subset(p->beg_edge, p->end_edge, *p->edge_list, *p->deltas, *p->last_ms, *p->current_ms, *p->known_locations, *p->D_planes, *p->final, p->ii);
return 0;
}
void do_iter_edge_subset(int beg_edge, int end_edge, const vector<Edge> &edge_list, Pairs &deltas,
PackedMessageSet &last_ms, PackedMessageSet ¤t_ms, map<int, DMatrix> &known_locations, PriorsMap &D_planes,
map< int, Message > &final, int ii, int thread_count, const BPTrans_Inference *bp)
{
if(thread_count == 0)
return;
pthread_t p;
BPtrans_Params params(beg_edge, (end_edge-beg_edge)/thread_count + beg_edge, &edge_list, &deltas, &last_ms, ¤t_ms, &known_locations, &D_planes, &final, ii, bp);
pthread_create(&p, NULL, &bp_thread, (void *) ¶ms);
do_iter_edge_subset(params.end_edge+1, end_edge, edge_list, deltas, last_ms, current_ms, known_locations, D_planes, final, ii, thread_count-1, bp);
pthread_join(p, 0);
}
void estimate_labelspace_size(const hash_map<int, UnaryPosition> &unary, const BPTrans_Inference &bp)
{
_DRect<double> bound, bound2;
for(hash_map<int, UnaryPosition>::const_iterator iter = unary.begin(); iter != unary.end(); ++iter)
{
if(fabs(iter->second.position).sum() && iter->second.confidence > 0.0)
bound = bounding_rectangle(bound, _DRect<double>(_DPoint<double>(iter->second.position[0][0], iter->second.position[0][2]),
_DPoint<double>(iter->second.position[0][0], iter->second.position[0][2])));
bound2 = bounding_rectangle(bound2, _DRect<double>(_DPoint<double>(iter->second.position[0][0], iter->second.position[0][2]),
_DPoint<double>(iter->second.position[0][0], iter->second.position[0][2])));
}
_DPoint<double> b1 = elementwise_max(-bound.top_left(), bound.bottom_right());
double max_b1 = max(b1.row(), b1.col()) * 1.2;
cout << bound << " " << bins_g << " " << bp.grid_count()/2/max_b1 << endl;
_DPoint<double> b2 = elementwise_max(-bound2.top_left(), bound2.bottom_right());
double max_b2 = max(b2.row(), b2.col()) * 1.2;
cout << bound2 << " " << bins_g << " " << bp.grid_count()/2/max_b2 << endl;
}
void rescore(const char *rescore_file, const BPTrans_State &bps)
{
hash_map<int, DMatrix> estimates;
ifstream ifs(rescore_file, ios::in);
while(ifs.good())
{
int cid;
double r1, r2, r3;
ifs >> cid >> r1 >> r2 >> r3;
if(!ifs.good())
break;
DMatrix vec(1,3);
vec[0][0] = r1; vec[0][1] = r2; vec[0][2] = r3;
estimates[cid] = vec;
}
double corr_err;
double err = compare_with_gt(bps.gt, bps.photo_count, estimates, bps.reachable, bps.known_locations, corr_err);
cerr << "DONE RESCORE " << err << endl;
double cost = 0;
cout << "iter " << 0 << " of " << 0 << " " << cost << " " << err << " " << corr_err << endl;
}
#ifndef HADOOP_VERSION
int main(int argc, char *argv[])
{
parse_opts(argc, argv);
BPTrans_Inference bp(prior_trunc_g, unary_weighting_g, pair_trunc_g, bands_g, anticollapse_g,
anticollapse_penalty_g, bidir_g, precompute_priors_off_g, translate_g, GRID_COUNT2_g, DIST_GRAN2_g);
try {
BPTrans_State bps;
bps.initialize(pairwise_file_g, unary_file_g, gt_g, no_remove_cut_edges_g, translate_g, gt_trans_only_g,
known_from_gt_g, geo_highconf_only_g, bp);
if(est_label_space_g)
{
estimate_labelspace_size(bps.unary, bp);
return 0;
}
else if(rescore_g)
{
rescore(rescore_g, bps);
return 0;
}
BPTrans_Debug debug(stretch_g, bp, bufsz_g, bps.gt);
debug.make_gt_image(bps.known_locations, bps.photo_count, bps.reachable);
// FIXME - this is a hack. if there are a lot of known camera positions, turn off pre-computing the priors.
// (saves memory, at the expense of speed)
if(bps.known_locations.size() > 5000)
{
cerr << "WARNING: Turning off prior pre-computation to save memory." << endl;
precompute_priors_off_g = true;
bp.disable_precompute_priors();
}
// calculate priors
PriorsMap D_planes;
if(!precompute_priors_off_g)
for(map<int, DMatrix>::const_iterator iter = bps.known_locations.begin(); iter != bps.known_locations.end(); ++iter)
D_planes[iter->first] = bp.compute_priors(iter->second);
// initialize message buffers
PackedMessageSet last_ms(bps.photo_count);
PackedMessageSet current_ms = last_ms;
map< int, Message > final; //(photo_count, vector<double>(LABEL_COUNT2));
for(int ii=0; ii<max_ii_g; ii++)
{
cerr << ii << endl;
int beg_edge = 0, end_edge = bps.edge_count - 1;
if(thread_count_g > 1)
do_iter_edge_subset(beg_edge, end_edge, bps.edge_list, bps.deltas, last_ms, current_ms, bps.known_locations, D_planes, final, ii, thread_count_g, &bp);
else
bp.do_iter_edge_subset(beg_edge, end_edge, bps.edge_list, bps.deltas, last_ms, current_ms, bps.known_locations, D_planes, final, ii);
last_ms = current_ms;
cerr << endl;
// compute MAP estimates from last iteration
// 0th iteration labels and costs aren't valid, because state likelihood distributions for iteration ii are
// computed on the ii+1-th iteration.
if(ii > 0)
{
vector<int> labels = bp.map_estimate(final, bps.photo_count);
for(int i=0; i<bps.photo_count; i++)
cout << "labels " << ii << " " << i << " " << labels[i] << " " << bp.to_dist2(bp.unpack_v0(labels[i])) << " " << bp.to_dist2(bp.unpack_v1(labels[i])) << " " << bp.to_dist2(bp.unpack_v2(labels[i])) << endl;
double corr_err;
double cost = bp.compute_iter_energy(labels, bps.deltas, bps.edge_list);
double err = bp.compute_gt_error(labels, bps.deltas, bps.gt, corr_err, bps.reachable, bps.known_locations);
cout << "iter " << ii << " of " << max_ii_g << " " << cost << " " << err << " " << corr_err << endl;
debug.make_iter_image(bps.known_locations, labels, bps.photo_count, bps.reachable, ii);
}
if(dump_messages_g)
{
char temp[1024];
sprintf(temp, "iter_%03d_messages.txt", ii);
ofstream ofs(temp);
for(PackedMessageSet::const_iterator dest_iter = current_ms.begin(); dest_iter != current_ms.end(); ++dest_iter)
{
int new_dest = dest_iter->first;
for(hash_map<int, PackedMessage>::const_iterator src_iter = dest_iter->second.begin(); src_iter != dest_iter->second.end(); ++src_iter)
{
int new_src = src_iter->first;
const PackedMessage &pm = src_iter->second;
ofs << new_dest << " " << new_src << " " << setprecision(10) << pm << endl;
}
}
}
}
} catch(const string &str)
{
cerr << str << endl;
}
}
#else
void parse_hadoop_cmdline(const string &cmdline)
{
char **argv = new char *[cmdline.size()];
istringstream iss(cmdline);
string word;
int i=1;
for(i=1; iss.good(); i++)
{
iss >> word;
argv[i] = new char[word.size() + 1];
strcpy(argv[i], word.c_str());
}
int argc = i;
parse_opts(argc, argv);
}
// The reducer takes a set of incoming messages for a node, and outputs a set of outgoing messages.
//
// input: dest node_count src1 msg_len1 msg... src2 msg_len2 msg... ...
// output: (dest, src msg_len msg) ...
//
class BPTrans_Mapper : public HadoopPipes::Mapper
{
public:
BPTrans_Mapper(HadoopPipes::TaskContext& context)
{
cerr << "init mapper" << endl;
string cmdline = get_init_string("bptrans.cmdline", context);
parse_hadoop_cmdline(cmdline);
precompute_priors_off_g = true;
bp = BPTrans_Inference(prior_trunc_g, unary_weighting_g, pair_trunc_g, bands_g, anticollapse_g,
anticollapse_penalty_g, bidir_g, precompute_priors_off_g, translate_g, GRID_COUNT2_g, DIST_GRAN2_g);
bps.initialize(pairwise_file_g, unary_file_g, gt_g, no_remove_cut_edges_g, translate_g, gt_trans_only_g,
known_from_gt_g, geo_highconf_only_g, bp);
cerr << "done init mapper" << endl;
}
void map(HadoopPipes::MapContext& context)
{
cerr << "in mapper" << endl;
PriorsMap D_planes;
// initialize message buffers
PackedMessageSet last_ms;
std::map< int, Message > final;
// cerr << context.getInputValue() << endl;
/*
// FIXME : HACK to remove problematic "inf"'s
string str2 = context.getInputValue();
int pos1=0;
while(pos1 != string::npos)
{
pos1 = str2.find("inf", pos1);
if(pos1 != string::npos)
str2 = str2.replace(pos1, 3, "10e100");
}
cerr << str2 << endl;
*/
istringstream iss(context.getInputValue());
int node_count;
int dest;
iss >> dest;
iss >> node_count;
int iter=1;
vector<Edge> edge_list2; //(node_count);
for(int i=0; i<node_count; i++)
{
PackedMessage pm;
int src, msg_flag;
iss >> src >> msg_flag;
if(msg_flag)
{
iss >> pm;
last_ms[dest][src] = pm;
}
// make sure we actually care about this edge
Edge e(dest, src);
if( binary_search(bps.edge_list.begin(), bps.edge_list.end(), e, compare_first) )
edge_list2.push_back(e);
if(!msg_flag)
iter=0;
}
PackedMessageSet current_ms;
// FIXME: could possibly use multi-threaded version here
int ii=iter; // fake iteration number
node_count = edge_list2.size();
cout << "dest " << dest << " node count " << node_count << endl;
bp.do_iter_edge_subset(0, node_count-1, edge_list2, bps.deltas, last_ms, current_ms, bps.known_locations, D_planes, final, ii);
cerr << endl;
for(PackedMessageSet::const_iterator dest_iter = current_ms.begin(); dest_iter != current_ms.end(); ++dest_iter)
{
int new_dest = dest_iter->first;
assert(dest_iter->second.size() == 1);
assert(dest_iter->second.begin()->first == dest);
const PackedMessage &pm = dest_iter->second.begin()->second;
ostringstream key, val;
key << new_dest;
val << dest << " 1 ";
val << setprecision(10) << pm;
context.emit(key.str(), val.str());
}
if(ii > 0)
{
assert(final.size() == 1);
ostringstream oss1;
// oss1 << dest << " " << setprecision(10) << final[dest] << endl;
int label = bp.map_estimate_singlecamera(final[dest]);
oss1 << dest << " " << label;
context.emit("-1", oss1.str());
}
}
BPTrans_State bps;
BPTrans_Inference bp;
};
// The reducer just collects all of the messages destined for each node into a single
// key-value pair
//
// input: dest src msg_len msg...
// output: dest node_count src1 msg_len1 msg... src2 msg_len2 msg... ...
//
class BPTrans_Reducer: public HadoopPipes::Reducer
{
public:
BPTrans_Reducer(HadoopPipes::TaskContext& context)
{
string cmdline = get_init_string("bptrans.cmdline", context);
parse_hadoop_cmdline(cmdline);
precompute_priors_off_g = true;
bp = BPTrans_Inference(prior_trunc_g, unary_weighting_g, pair_trunc_g, bands_g, anticollapse_g,
anticollapse_penalty_g, bidir_g, precompute_priors_off_g, translate_g, GRID_COUNT2_g, DIST_GRAN2_g);
bps.initialize(pairwise_file_g, unary_file_g, gt_g, no_remove_cut_edges_g, translate_g, gt_trans_only_g,
known_from_gt_g, geo_highconf_only_g, bp);
}
void reduce(HadoopPipes::ReduceContext& context)
{
int ii=get_init_param("bptrans.iternum", context);
if(atoi(context.getInputKey().c_str()) == -1)
{
DProfile prof(10);
prof.begin(0);
ofstream ofs("iter_output");
vector<int> labels(bps.photo_count);
prof.begin(1);
while(context.nextValue())
{
int node;
int label;
// Message msg;
istringstream iss(context.getInputValue());
// iss >> node >> msg;
iss >> node >> label;
labels[node] = label;
// final[node] = msg;
}
prof.end(1);
// compute MAP estimates from last iteration
prof.begin(2);
// vector<int> labels = bp.map_estimate(final, bps.photo_count);
prof.end(2);
prof.begin(3);
for(int i=0; i<bps.photo_count; i++)
ofs << "labels " << ii << " " << i << " " << labels[i] << " " << bp.to_dist2(bp.unpack_v0(labels[i])) << " " << bp.to_dist2(bp.unpack_v1(labels[i])) << " " << bp.to_dist2(bp.unpack_v2(labels[i])) << endl;
prof.end(3);
double corr_err;
prof.begin(4);
double cost = bp.compute_iter_energy(labels, bps.deltas, bps.edge_list);
prof.end(4);
prof.begin(5);
double err = bp.compute_gt_error(labels, bps.deltas, bps.gt, corr_err, bps.reachable, bps.known_locations, ofs);
prof.end(5);
ofs << "iter " << ii << " of " << max_ii_g << " " << cost << " " << err << " " << corr_err << endl;
system((string("/usr/local/hadoop/bin/hadoop dfs -copyFromLocal iter_output ") + get_init_string("mapred.output.dir", context)).c_str());
prof.end(0);
}
else
{
ostringstream out, out2;
int cnt=0;
while(context.nextValue())
{
out << context.getInputValue() << " ";
cnt++;
}
out2 << cnt << " " << out.str();
context.emit(context.getInputKey(), out2.str());
}
}
BPTrans_State bps;
BPTrans_Inference bp;
};
int main(int argc, char *argv[])
{
try
{
return HadoopPipes::runTask(HadoopPipes::TemplateFactory<BPTrans_Mapper, BPTrans_Reducer>());
}
catch(const string &str)
{
cerr << str << endl;
}
}
#endif
<file_sep>/data/disco-bp/DLib/DCluster.cpp
#include <DCluster.h>
#include <set>
using namespace std;
#define SWAP(a, b) { int tmp = b; b = a; a = tmp; }
template<class T>
_DMatrix<int> KMeansCluster<T>::do_clustering(const _DMatrix<T> &in_matrix)
{
_DMatrix<int> min_assignments;
_DMatrix<T> min_centers;
double min_error;
int min_cluster_count;
min_assignments = do_clustering_singlereplicate(in_matrix);
min_centers = get_cluster_centers();
min_error = get_total_error();
min_cluster_count = get_actual_cluster_count();
for(int i=1; i<replicate_count; i++)
{
do_clustering_singlereplicate(in_matrix);
if(get_total_error() < min_error)
min_error = get_total_error(), min_centers = get_cluster_centers(),
min_assignments = get_assignments(), min_cluster_count = get_actual_cluster_count();
}
assignments = min_assignments;
cluster_centers = min_centers;
total_error = min_error;
actual_cluster_count = min_cluster_count;
return assignments;
}
template<class T>
bool KMeansCluster<T>::find_assignments(int beg, int end, const _DMatrix<T> &in_matrix, int *assignments_ptr, double &err, const _DMatrix<T> &cluster_centers, int actual_cluster_count)
{
err = 0;
bool done = true;
int dim_count = in_matrix.cols();
// cerr << "--> " << beg << " " << end << " " << actual_cluster_count << " " << dim_count << endl;
for(int i=beg; i <= end; i++)
{
// cerr << "== " << i << " " << actual_cluster_count << " " << dim_count << endl;
const T *in_matrix_cp = in_matrix[i];
T _min = 1e20;
int _min_i = 0;
if(!(i % 10000))
cerr << ".";
const T *cluster_centers_cp = cluster_centers.data_ptr();
for(int j=0; j < actual_cluster_count; j++)
{
float dist = 0;
for(int k=0; k<dim_count; k++)
{
float a = in_matrix_cp[k] - *(cluster_centers_cp++);
dist += a * a;
}
if(dist < _min)
{
_min = dist;
_min_i = j;
}
}
// a point being reassigned means that convergence
// hasn't yet been reached.
if(assignments_ptr[i] != _min_i)
{
assignments_ptr[i] = _min_i;
done = false;
}
err += double((_min));
}
return done;
}
template<class T>
void *KMeansCluster<T>::cluster_thread(void *_p)
{
ClusterThreadParams<T> *p = (ClusterThreadParams<T> *)_p;
double err;
// cerr << "CT " << p->actual_cluster_count << " " << p->in_matrix->cols() << endl;
bool done = find_assignments(p->beg, p->end, *(p->in_matrix), p->assignments_ptr, err, *p->cluster_centers, p->actual_cluster_count);
ClusterThreadResult *res = new ClusterThreadResult(done, err);
pthread_exit((void *)res);
return res;
}
template<class T>
bool KMeansCluster<T>::run_cluster_thread(int thread_count, int beg, int end, const _DMatrix<T> &in_matrix, int *assignments, double &total_error)
{
if(thread_count == 0)
return true;
pthread_t p;
ClusterThreadParams<T> params(beg, (end-beg)/thread_count + beg, &in_matrix, &cluster_centers, assignments, actual_cluster_count);
// cerr << "CT33 " << params.actual_cluster_count << endl;
pthread_create(&p, NULL, &cluster_thread, (void *) ¶ms);
double err2=0;
bool done2 = run_cluster_thread(thread_count-1, params.end+1, end, in_matrix, assignments, err2);
ClusterThreadResult *result;
pthread_join(p, (void **)&result);
// cerr << "joined " << (long int) result << endl;
// cerr << "joined2 " << result->error<< endl;
total_error = err2 + result->error;
bool done = result->done;
delete result;
// cerr << "returning" << endl;
return done && done2;
}
// columns are features, rows are observations
//
// If a cluster is empty, then nothing in assignments will point to it and
// the corresponding entries of cluster_centers are nan
//
template<class T>
_DMatrix<int> KMeansCluster<T>::do_clustering_singlereplicate(const _DMatrix<T> &in_matrix)
{
cerr << " clustering: setting up... " << endl;
int dim_count = in_matrix.cols();
int pt_count = in_matrix.rows();
actual_cluster_count = min(requested_cluster_count, pt_count);
cerr << " clustering: " << requested_cluster_count << " " << dim_count << endl;
cluster_centers = _DMatrix<T>(requested_cluster_count, dim_count);
set<int> used_pts;
cerr << " clustering: assigning initial clusters..." << endl;
// randomly select points for initial cluster centers
// (also make sure no 2 clusters have the same center)
// FIXME: this only checks that clusters have different
// data index ids, not actually that the pts are different
//
for(int i=0; i<actual_cluster_count; i++)
{
int pt;
do {
pt = random() % pt_count;
} while (used_pts.find(pt) != used_pts.end());
cluster_centers.set_row(i, in_matrix.extract_row(pt));
used_pts.insert(pt);
}
cerr << " clustering..." << endl;
bool done=false;
assignments = _DMatrix<int>(1, pt_count);
assignments = -1;
int *assignments_ptr = assignments.data_ptr();
int iter_count = 0;
cerr << " starting..." << endl;
while(!done && iter_count < max_iters)
{
// first: assign points to closest clusters
total_error=0;
cerr << " pt count " << pt_count;
#ifdef HAVE_PTHREADS
if(thread_count > 1)
done = run_cluster_thread(thread_count, 0, pt_count-1, in_matrix, assignments_ptr, total_error);
else
#endif
done = find_assignments(0, pt_count-1, in_matrix, assignments_ptr, total_error, cluster_centers, actual_cluster_count);
cerr << endl;
cerr << " total error " << total_error << endl;
// second: recompute cluster centers
cluster_centers = 0;
int counts[actual_cluster_count];
memset(counts, 0, sizeof(int) * actual_cluster_count);
T *in_cp = in_matrix.data_ptr();
for(int i=0; i < pt_count; i++)
{
int which_cluster = assignments_ptr[i];
counts[which_cluster]++;
T *center_cp = cluster_centers[which_cluster];
for(int j=0; j < dim_count; j++)
center_cp[j] += *(in_cp++);
}
T *cluster_centers_cp = cluster_centers.data_ptr();
for(int i=0; i < actual_cluster_count; i++)
for(int j=0; j < dim_count; j++)
*(cluster_centers_cp++) /= counts[i];
cerr << " second " << endl;
// check for the case of an empty cluster. Handle this by
// moving cluster to the end and decreasing actual_cluster_count.
for(int i=0; i<actual_cluster_count; i++)
{
if(counts[i] == 0)
{
assignments.search_and_replace_ip(actual_cluster_count-1, i);
cluster_centers.swap_rows(actual_cluster_count-1, i);
swap(counts[actual_cluster_count-1], counts[i]);
actual_cluster_count--; i--;
}
}
cerr << "iteration " << iter_count << endl;
iter_count++;
}
// mark empty cluster centers as "nan"
_DMatrix<T> no_cluster(1, dim_count, T(nan("")));
for(int i=actual_cluster_count; i < requested_cluster_count; i++)
cluster_centers.set_row(i, no_cluster);
return assignments;
}
template<class T>
pair<int, T> KMeansCluster<T>::get_assignment(const vector<T> &vec)
{
assert(vec.size() == cluster_centers.cols());
double _min = 1e100;
int _min_i;
const T *cluster_centers_cp = cluster_centers.data_ptr();
for(int j=0; j < actual_cluster_count; j++)
{
T dist = 0;
for(int k=0; k<cluster_centers.cols(); k++)
{
T a = vec[k] - *(cluster_centers_cp++);
dist += a * a;
}
if(dist < _min)
{
_min = dist;
_min_i = j;
}
}
return make_pair(_min_i, _min);
}
//#define DECLARE(x) \
// template class KMeansCluster<x>;
//DECLARE(double)
//DECLARE(float)
template class KMeansCluster<float>;
template class KMeansCluster<double>;
<file_sep>/data/disco-bp/run_transbp.sh
#!/bin/sh
export LD_LIBRARY_PATH=./corona-1.0.2/src/.libs/:$LD_LIBRARY_PATH
# Run transbp on the Acropolis dataset. The energy and labels are printed each iteration.
./bin/bp-trans --iters 15 --threads 10 --anticollapse 0.00001 1.0 --labelspace 101 0.2 --unweight 2 --dumpmessages --translate data/acropolis/global.translations.txt data/acropolis/global.poses.txt
<file_sep>/src/overnight.sh
#!/bin/bash
stopping=0.0
for sigma in '0.01' '0.04';
do
stopping='0'`echo "0.05+${stopping}" | bc`;
echo ${stopping}
#for g in `seq 1 2`;
#do
# for i in '2' '6';
# do
# make graph${g} noise_ratio=0.${i} sigma=${sigma} stopping=${stopping}
# done
#done
for g in `seq 3 4`;
do
for i in '0';
do
make graph${g} noise_ratio=0.${i} sigma=${sigma} stopping=${stopping}
done
done
done
<file_sep>/data/disco-bp/bp-rot.cpp
#include "DImage.h"
#include <DMultiDMatrix.h>
#include <vector>
#include <istream>
#include <fstream>
#include <ext/hash_map>
#include <set>
using namespace std;
#include <io.h>
#include <algorithm>
#include <numeric>
#include <map>
#include <math.h>
#include "bp-common.h"
#include <stack>
using namespace __gnu_cxx;
using namespace std;
// input files
char *pairwise_file_g = 0; //, *unary_file_g = 0;
// # of BP iterations to run
int max_ii_g = 100;
//
double pair_trunc_g = 1.0;
// older file format doesn't have confidence field
bool no_confidence_in_file_g = false, two_confidence_in_file_g = false;
// ground truth file
char *gt_g = 0;
// for debugging...
double stretch_g = 4;
int bufsz_g = 4000;
int verbose_g=0;
int node_g = -1;
char *rescore_g = 0;
//
bool linear_cost_g = false;
bool discrete_g = true;
// print to stdout info about geotag-based edge priors then exit
bool write_geo_priors_g = true;
bool scale_geotag_unary_g = false;
double scaled_geotag_weight_g = 0.22;
double unary_geotag_weight_g = 0.05;
double unary_geotag_trunc_g = 0.7;
double geotag_stdev_g = 0; // 53
int num_unary_samples_g = 1;
bool geotag_pan_g = false;
/// Rotations
//int TWIST_BIN_COUNT = 5;
//double TWIST_BIN_SPACING = 0.15;
//double TWIST_BIN_MIN = -0.3;
int TWIST_BIN_COUNT = 1;
double TWIST_BIN_SPACING = 0;
double TWIST_BIN_MIN = 0;
int GRID_COUNT = 21; // 21; // 11; // 21;
int LABEL_COUNT = GRID_COUNT * GRID_COUNT * GRID_COUNT * TWIST_BIN_COUNT;
double DIST_GRAN = 10.0; // 10.0; // 5.0; // 10.0;
double DIST_STEP = 0.1;
int ref_node_g = -1;
int thread_count_g = 1;
//
bool random_ref_g = false;
bool no_remove_cut_g = false;
bool ccref_highest_degree_g = false;
bool no_labels_g = false;
bool write_edges_g = false;
bool prepropagate_g = true;
bool param_est_g = false;
char *vanish_g = 0;
char *perfect_pairs_g = 0;
char *geoplanar_g = 0;
double unary_scale_g = 1.0, unary_trunc_g = 1.0;
DistributionMap unary_dists;
typedef _DMultiDMatrix<double> Msgtmp_type ;
//typedef hash_map<int, hash_map<int, PairwiseDiff > > Pairs;
template<class T>
inline double vector_norm(const _DMatrix<T> &matrix)
{
return sqrt(pointwise_multiply(matrix, matrix).sum());
}
inline int unpack_tw(int packed) { return packed / (GRID_COUNT * GRID_COUNT * GRID_COUNT); }
inline int unpack_t0(int packed) { return (packed / (GRID_COUNT * GRID_COUNT)) % GRID_COUNT; }
inline int unpack_t1(int packed) { return (packed / GRID_COUNT) % GRID_COUNT; }
inline int unpack_t2(int packed) { return packed % GRID_COUNT; }
inline int from_dist(double s) { return (int)round(s * double(DIST_GRAN) + GRID_COUNT/2); }
inline double to_dist(double s) { return (s - GRID_COUNT/2) / double(DIST_GRAN); }
inline int pack(int t0, int t1, int t2, int tz=0) { return tz * GRID_COUNT * GRID_COUNT * GRID_COUNT + t0 * GRID_COUNT * GRID_COUNT + t1 * GRID_COUNT + t2; }
inline int from_twist(double s) { int s2 = (int) round((s-TWIST_BIN_MIN) / TWIST_BIN_SPACING); if(s2 < 0) s2 = 0; if(s2 >= TWIST_BIN_COUNT) s2 = TWIST_BIN_COUNT-1; return s2; }
inline double to_twist(double s) { return TWIST_BIN_MIN + s * TWIST_BIN_SPACING; }
template<class T>
inline T sqr(T a) { return a * a; }
// FIXME: this function is called by inner_loop. Make it faster!
// it would be good to get rid of the conditional here. That's possible with integers using bit twiddling,
// but I'm not sure how to do it with floats. We could possibly move to fixed-precision arithmetic.
inline double dist(double s)
{
/*
if(linear_cost_g)
{
if(s < -pair_trunc_g) return pair_trunc_g;
else if(s > pair_trunc_g) return pair_trunc_g;
else if(s < 0) return -s;
else return s;
}
*/
// if(s < -pair_trunc_g) return pair_trunc_g*pair_trunc_g;
// else if(s > pair_trunc_g) return pair_trunc_g*pair_trunc_g;
// else return s*s;
return min(s*s, pair_trunc_g * pair_trunc_g);
}
/*
double distance2_correct(int my_state, int other_state, const PairwiseDiff &diff, const vector< _DMatrix<TYPE> > &rotation_matrices)
{
_DMatrix<TYPE> diff2;
change_type(-diff.rot.extract_col(2), diff2);
_DMatrix<TYPE> predicted_dir = rotation_matrices[my_state] * diff2;
_DMatrix<TYPE> other = -rotation_matrices[other_state].extract_col(2);
return(dist(predicted_dir[0][0]-other[0][0]) + dist(predicted_dir[0][1]-other[0][1]) + dist(predicted_dir[0][2] - other[0][2]));
}
*/
double distance2_correct(const PairwiseDiff &diff, DMatrix &mystate_rot, const DMatrix &other)
{
DMatrix predicted_dir = mystate_rot * -diff.rot.extract_col(2);
// double predicted_dir[3];
// for(int i=0; i<3; i++)
// for(int k=0; k<3; k++)
// predicted_dir[i] = mystate_rot[i][k] * -diff.rot[2+k*3];
return(dist(predicted_dir[0][0]-other[0][0]) + dist(predicted_dir[0][1]-other[0][1]) + dist(predicted_dir[0][2] - other[0][2]));
// double *other_ptr = other[0];
// return dist(predicted_dir[0] - other_ptr[0]) + dist(predicted_dir[1] - other_ptr[1]) + dist(predicted_dir[2] - other_ptr[2]);
}
int inside_sphere(double x, double y, double z, double r = 1.0)
{
return x*x + y*y + z*z <= r*r;
}
bool is_state_on_sphere(int state)
{
static bool first = true;
static bool *intersect = 0;
if(first)
{
intersect = new bool[LABEL_COUNT];
for(int s=0; s<LABEL_COUNT; s++)
{
double x = to_dist(unpack_t0(s));
double y = to_dist(unpack_t1(s));
double z = to_dist(unpack_t2(s));
// there are eight corners of this cube. if at least 1 is inside the sphere, and
// at least 1 is outside the sphere, then the surface of the sphere intersects this cube.
double half_bin = 1.0/(2.0 * DIST_GRAN);
int in_count=0;
for(int xx = -1; xx <= 1; xx+=2)
for(int yy = -1; yy <= 1; yy+=2)
for(int zz = -1; zz <= 1; zz+=2)
in_count += inside_sphere(x + xx*half_bin, y + yy*half_bin, z + zz*half_bin);
intersect[s] = in_count > 0 && in_count < 8;
}
int c=0;
for(int i=0; i<LABEL_COUNT; i++)
if(intersect[i]) c++;
first = false;
cout << "(" << c << " " << LABEL_COUNT << " " << DIST_GRAN << ") " << endl;
}
return intersect[state];
}
// *all* geotags projected onto a plane
void read_2d_locations_file(const string &fname, hash_map<int, DMatrix> &locations) {
ifstream f(fname.c_str());
if (!f.good()) {
cerr << "cannot find " << fname << endl;
}
assert(f.good());
while (f.good()) {
int id;
DMatrix xyz(3,1);
f >> id >> xyz[0][0] >> xyz[2][0];
xyz[1][0] = 0;
locations[id] = xyz;
//cerr << unary[id].position << endl;
}
f.close();
}
void parse_opts(int argc, char *argv[])
{
int ii=1;
for(ii=1; ii<argc; ii++)
{
if(!strcmp(argv[ii], "--iters"))
max_ii_g = atoi(argv[++ii]);
else if(!strcmp(argv[ii], "--gt"))
gt_g = argv[++ii];
else if(!strcmp(argv[ii], "--threads"))
thread_count_g = atoi(argv[++ii]);
else if(!strcmp(argv[ii], "--verbose"))
verbose_g++;
else if(!strcmp(argv[ii], "--writeedges"))
write_edges_g = true;
else if(!strcmp(argv[ii], "--twist"))
{
TWIST_BIN_COUNT=5;
TWIST_BIN_SPACING = 0.1;
TWIST_BIN_MIN = -0.2;
}
else if(!strcmp(argv[ii], "--noconf"))
no_confidence_in_file_g = true;
else if(!strcmp(argv[ii], "--hideg"))
ccref_highest_degree_g = true;
else if(!strcmp(argv[ii], "--nocut"))
no_remove_cut_g = true;
else if(!strcmp(argv[ii], "--randomref"))
random_ref_g = true;
else if(!strcmp(argv[ii], "--twoconf"))
two_confidence_in_file_g = true;
else if(!strcmp(argv[ii], "--rescore"))
rescore_g = argv[++ii];
else if(!strcmp(argv[ii], "--pairtrunc"))
pair_trunc_g = atof(argv[++ii]);
else if(!strcmp(argv[ii], "--linear"))
linear_cost_g = true;
else if(!strcmp(argv[ii], "--refnode"))
ref_node_g = atoi(argv[++ii]);
else if(!strcmp(argv[ii], "--nolabels"))
no_labels_g = true;
else if(!strcmp(argv[ii], "--nopreprop"))
prepropagate_g = false;
else if(!strcmp(argv[ii], "--makeperfectpairs"))
perfect_pairs_g = argv[++ii];
else if(!strcmp(argv[ii], "--vanish"))
vanish_g = argv[++ii];
else if(!strcmp(argv[ii], "--paramest"))
param_est_g = true;
else if(!strcmp(argv[ii], "--labelspace"))
{
GRID_COUNT = atoi(argv[++ii]);
LABEL_COUNT = GRID_COUNT * GRID_COUNT * GRID_COUNT * TWIST_BIN_COUNT;
DIST_GRAN = atof(argv[++ii]);
}
else if (!strcmp(argv[ii], "--geoplanar"))
geoplanar_g = argv[++ii];
else if (!strcmp(argv[ii], "--unarygeotagweight"))
unary_geotag_weight_g = atof(argv[++ii]);
else if (!strcmp(argv[ii], "--unarygeotagtrunc"))
unary_geotag_trunc_g = atof(argv[++ii]);
else if (!strcmp(argv[ii], "--geotagstdev"))
geotag_stdev_g = atof(argv[++ii]);
else if (!strcmp(argv[ii], "--numunarysamples"))
num_unary_samples_g = atoi(argv[++ii]);
else if (!strcmp(argv[ii], "--scalegeotagunary"))
scale_geotag_unary_g = atoi(argv[++ii]);
else if (!strcmp(argv[ii], "--scaledgeotagweight"))
scaled_geotag_weight_g = atof(argv[++ii]);
else if (!strcmp(argv[ii], "--geotagpan"))
geotag_pan_g = atoi(argv[++ii]);
else
break;
}
// linear pairwise costs currently disabled
assert(!linear_cost_g);
DIST_STEP = 1.0 / DIST_GRAN;
if(ii < argc)
pairwise_file_g = argv[ii++];
cerr << pairwise_file_g << endl;
if(argc != ii)
{
cerr << "bad commandline options." << endl;
exit(1);
}
}
inline TYPE inner_loop(double *scratch, double mu)
{
double _min=INFINITY;
for(int z2=0; z2<GRID_COUNT; z2++) //"mine"
{
double this_dist = scratch[z2] + dist(to_dist(z2) - mu);
if(this_dist < _min)
_min = this_dist;
}
return _min;
}
// this version gives slightly different results than the above function (due to numerical issues),
// but results should be of similar quality and this version is twice as fast
//
// FIXME: replace call to dist() with a lookup table?
inline TYPE inner_loop_opt(double *scratch, double mu)
{
double _min=INFINITY;
double inc = 1.0/DIST_GRAN;
double z3= (-GRID_COUNT / 2) / double(DIST_GRAN) - mu;
for(int z2=0; z2<GRID_COUNT; z2++, z3 += inc) //"mine"
{
double this_dist = scratch[z2] + dist(z3);
if(this_dist < _min)
_min = this_dist;
}
/*
// unrolled version -- doesn't seem to help much. The conditionals in dist() are probably the bottleneck.
double td[12];
switch(GRID_COUNT)
{
case 11: td[10] = scratch[10] + dist(z3+inc*10);
case 10: td[9] = scratch[9] + dist(z3+inc*9); if(td[10] < td[9]) td[9] = td[10];
case 9: td[8] = scratch[8] + dist(z3+inc*8); if(td[9] < td[8]) td[8] = td[9];
case 8: td[7] = scratch[7] + dist(z3+inc*7); if(td[8] < td[7]) td[7] = td[8];
case 7: td[6] = scratch[6] + dist(z3+inc*6); if(td[7] < td[6]) td[6] = td[7];
case 6: td[5] = scratch[5] + dist(z3+inc*5); if(td[6] < td[5]) td[5] = td[6];
case 5: td[4] = scratch[4] + dist(z3+inc*4); if(td[5] < td[4]) td[4] = td[5];
case 4: td[3] = scratch[3] + dist(z3+inc*3); if(td[4] < td[3]) td[3] = td[4];
case 3: td[2] = scratch[2] + dist(z3+inc*2); if(td[3] < td[2]) td[2] = td[3];
case 2: td[1] = scratch[1] + dist(z3+inc*1); if(td[2] < td[1]) td[1] = td[2];
case 1: td[0] = scratch[0] + dist(z3+inc*0); if(td[1] < td[0]) td[0] = td[1]; break;
default: assert(0);
}
_min = td[0];*/
return _min;
}
inline double multiply_row_with_vector(TYPE *row, TYPE *vec)
{
return row[0] * vec[0] + row[1] * vec[1] + row[2] * vec[2];
}
// have rotation matrix C0 estimated based on v0, so multiply C0*(P01*r).
// FIXME: there used to be a heuristic here that ignored columns that are all INF on the first dimension,
// leading to minor (few %) speedup
//
// d1, d2, d3: some permutation of the variable names x, y, z. DT is done over dimension d3 (i.e. for each value of d1,d2)
// D1, D2, D3: same as d1, d2, d3, but with capital variable names X, Y, Z
// row1, row2, row3: some permutation of 0, 1, 2: same order as d1, d2, d3, where 0=x, 1=y, 2=z
// P01r : final column of expected rotation matrix between the two cameras.
//
#define LOOP_CORRECT(d1,d2,d3, D1, D2, D3, row1, row2, row3, P01r, __msg_tmp, rot_mats) \
{ \
for(int d1=0; d1<GRID_COUNT; d1++) \
for(int d2=0; d2<GRID_COUNT; d2++) \
{ \
for(int d3=0; d3<GRID_COUNT; d3++) \
scratch[d3] = __msg_tmp.get(x)[y][z]; \
\
for(int d3=0; d3<GRID_COUNT; d3++) \
{ \
double mu = multiply_row_with_vector(rot_mats[pack(x,y,z,twist)][row3], P01r); \
__msg_tmp.get(x)[y][z] = inner_loop_opt(scratch, mu); \
} \
} \
}
// FIXME: remove twist in above call to pack to speed up.
void subtract_dc(Message &this_msg)
{
// subtract off dc component
double sum_min_cost = 0;
int count=0;
for(int other_s = 0; other_s < LABEL_COUNT; other_s++)
if(!isinf(this_msg[other_s] ))
sum_min_cost += this_msg[other_s], count++;
if(count == 0) count=1;
sum_min_cost /= double(count);
for(int i=0; i< LABEL_COUNT; i++)
this_msg[i] -= sum_min_cost;
// This step unnecessary, but makes messages more readable (for debugging)
for(int i=0; i< LABEL_COUNT; i++)
if(!is_state_on_sphere(i))
this_msg[i] = INFINITY;
}
void make_H(Distribution &H, int me, int cc_ref)
{
H = unary_dists[me];
}
void read_messages(Distribution &H, int me, int him, const Pairs &deltas, const MessageSet &last_ms)
{
// NeighborPairs &my_neighbors = deltas[me];
Pairs::const_iterator deltas_iter_me = deltas.find(me);
assert(deltas_iter_me != deltas.end());
const NeighborPairs &my_neighbors = deltas_iter_me->second;
// hash_map<int, Message> &my_incoming_messages = last_ms[me];
MessageSet::const_iterator lastms_me_iter = last_ms.find(me);
assert(lastms_me_iter != last_ms.end());
const hash_map<int, Message> &my_incoming_messages = lastms_me_iter->second;
for(NeighborPairs::const_iterator iter = my_neighbors.begin(); iter != my_neighbors.end(); ++iter)
{
if(iter->first != him)
{
// Message &this_msg = my_incoming_messages[iter->first];
hash_map<int, Message>::const_iterator this_msg_iter = my_incoming_messages.find(iter->first);
assert(this_msg_iter != my_incoming_messages.end());
const Message &this_msg = this_msg_iter->second;
for(int my_s = 0; my_s < LABEL_COUNT; my_s++)
H[my_s] += this_msg[my_s];
}
}
}
DistributionMap compute_final_distribution(MessageSet ¤t_ms, const vector<Edge> &edge_list, int cc_ref)
{
DistributionMap result;
for(int i=0; i<(int)edge_list.size(); i++)
{
Edge edge = edge_list[i];
int me = edge.first, him = edge.second;
if(result.find(him) == result.end())
result[him] = vector<TYPE>(LABEL_COUNT);
const Message &this_msg = current_ms[him][me];
vector<TYPE> &this_final = result[him];
for(int j=0; j<LABEL_COUNT; j++)
this_final[j] += this_msg[j];
}
for(DistributionMap::iterator iter = result.begin(); iter != result.end(); ++iter)
{
Distribution H;
make_H(H, iter->first, cc_ref);
for(int j=0; j<LABEL_COUNT; j++)
iter->second[j] += H[j];
}
return result;
}
DMatrix norm_dir_of_label(int my_state, bool include_twist)
{
DMatrix vec(4,1);
vec[0][0] = to_dist(unpack_t0(my_state)); vec[0][1] = to_dist(unpack_t1(my_state)); vec[0][2] = to_dist(unpack_t2(my_state)); vec[0][3] = 0;
double norm = (sqrt(pointwise_multiply(vec, vec).sum()));
if(norm > 0) vec /= norm;
vec[0][3] = to_twist(unpack_tw(my_state));
return vec;
}
vector<int> find_map_estimates(DistributionMap &final_distribution, int photo_count, map<int, DMatrix> &estimated_dirs, int ii)
{
vector<int> labels(photo_count);
for(int him=0; him<photo_count; him++)
{
if(!no_labels_g)
cout << "labels " << ii << " " << him << " ";
if(final_distribution.find(him) == final_distribution.end())
{
cout << -1 << " " << 0 << " " << 0 << " " << 1 << " " << 0 << " " << 0 << " " << 0 << " " << 1 << endl;
continue;
}
double min_cost = INFINITY;
int min_j=0;
for(int j=0; j<LABEL_COUNT; j++)
if(is_state_on_sphere(j))
if(min_cost > final_distribution[him][j])
min_cost = final_distribution[him][j], min_j = j;
labels[him] = min_j;
// estimated_dirs[him] = norm_dir_of_label(min_j);
estimated_dirs[him] = norm_dir_of_label(min_j, true);
// cout << him << ":" << "(" << min_j << ")," << to_dist(unpack_t0(min_j)) << "," << to_dist(unpack_t1(min_j)) << "," << to_dist(unpack_t2(min_j)) << "," << to_twist(unpack_tw(min_j)) << " ";
if(!no_labels_g)
cout << min_j << " " << to_dist(unpack_t0(min_j)) << " " << to_dist(unpack_t1(min_j)) << " " << to_dist(unpack_t2(min_j)) << " " << to_twist(unpack_tw(min_j)) << " " << estimated_dirs[him][0][0] << " " << estimated_dirs[him][0][1] << " " << estimated_dirs[him][0][2] << endl;
}
if(!no_labels_g)
cout << endl;
return labels;
}
template<class T>
inline int sign(T X)
{
if(X >= 0) return 1;
return -1;
}
template<class T>
_DMatrix<T> compute_zerotwist_rot(double X, double Y, double Z, double twist=0)
{
_DMatrix<T> result(3,3);
double len = sqrt(X*X + Y*Y + Z*Z);
X /= len; Y /= len; Z /= len;
double len2 = sqrt(X*X + Z*Z);
double X_ = X / len2, Z_ = Z / len2;
result[0][0] = -Z_; result[0][1] = -Y * X_; result[0][2] = -X;
result[1][0] = 0; result[1][1] = Z * Z_ + X * X_; result[1][2] = -Y;
result[2][0] = X_; result[2][1] = -Y * Z_; result[2][2] = -Z;
return result;
}
// rotation_matrix_abstwist : produce a 3x3 rotation matrix from a viewing direction and twist angle
// X, Y, Z is the camera viewing direction
// abstwist represents the twist in terms of an absolute y coordinate (in abs x-y-z space)
//
#define QUANTUM 1e-10
template<class T>
_DMatrix<T> rotation_matrix_abstwist(double X, double Y, double Z, double abstwist=0)
{
_DMatrix<T> result(3,3);
// last column of R is just -(X,Y,Z), (normalized just in case)
double len = sqrt(X*X + Y*Y + Z*Z);
X /= len; Y /= len; Z /= len;
// if X and Z are both 0, then the camera is pointing straight up or down.
// for now, just handle this as if abstwist were 0.
if(fabs(X) < QUANTUM && fabs(Z) < QUANTUM)
return compute_zerotwist_rot<T>(X, Y, Z, 0);
// first column is (A, B, C), where B=asin(twist), |(A,B,C)=1|, and (A,B,C) . (X,Y,Z) = 0.
// solve for C with quadratic equation:
// (Z^2/X^2 + 1) * C^2 + 2*Z*B*Y/X^2*C + (B^2*Y^2/X^2 - 1 + B^2)
double B = abstwist;
double A, C, A1, C1, A2, C2;
if(fabs(X) < QUANTUM)
{
C2 = C1 = B*Y/Z;
A1 = sqrt(1-B*B*(1+Y*Y/(Z*Z)));
A2 = -sqrt(1-B*B*(1+Y*Y/(Z*Z)));
}
else
{
double X_sqr_inv = 1.0/(X*X);
double _a = (Z*Z)*X_sqr_inv + 1, _b = 2*Z*B*Y*X_sqr_inv, _c = B*B*Y*Y*X_sqr_inv - 1 + B*B;
double _sqrt = sqrt(_b*_b - 4 * _a * _c);
C1 = (-_b + _sqrt)/(2*_a), C2 = (-_b - _sqrt)/(2*_a);
A1 = (-B*Y-C1*Z) / X, A2 = (-B*Y-C2*Z) / X;
}
C = C1, A = A1;
// choose solution that is "90 left" of viewing direction (assumes twist angle
// relatively small, definitely smaller than 90 degrees)
double signmag = fabs(A2)+fabs(Z) - ( fabs(C2)+fabs(X) );
if((sign(A2) != sign(Z) && signmag >= 0) || (sign(C2) == sign(X) && signmag < 0 ))
C = C2, A = A2;
// second column is -(A,B,C) x (X, Y, Z)
result[0][0] = A; result[0][1] = B*Z - C*Y; result[0][2] = -X;
result[1][0] = B; result[1][1] = C*X - A*Z; result[1][2] = -Y;
result[2][0] = C; result[2][1] = A*Y - B*X; result[2][2] = -Z;
// cout << "-----" << Endlx << result << endl << compute_zerotwist_rot<T>(X, Y, Z)<< endl;
// _DMatrix<T> m = fabs(result - compute_zerotwist_rot<T>(X, Y, Z));
// cout << "ERROR " << pointwise_multiply(m,m).sum() << endl;
return result;
}
// in this version, twist is an angle about the viewing direction (X,Y,Z)
template<class T>
_DMatrix<T> rotation_matrix(double X, double Y, double Z, double twist=0)
{
double abs_twist = sin(twist) * cos(Y);
return rotation_matrix_abstwist<T>(X, Y, Z, abs_twist);
}
template<class T>
double twist_of_rotation_matrix(const _DMatrix<T> &rot)
{
return acos(std::min((rot[0][0] * rot[2][2] - rot[2][0] * rot[0][2]) / sqrt(1 - rot[1][2]*rot[1][2]), 1.0));
}
template<class T>
double tilt_of_rotation_matrix(const _DMatrix<T> &rot)
{
return asin(-rot[1][2]);
}
double compute_labeling_cost(map<int, DMatrix> &estimated, Pairs &deltas, vector<Edge> &edge_list, int ii)
{
double cost = 0;
// pairwise costs
for(int i=0; i<(int)edge_list.size(); i++)
{
Edge edge = edge_list[i];
PairwiseDiff this_delta_rot = deltas[edge.first][edge.second];
int me = edge.first, him = edge.second;
if(estimated.find(me) == estimated.end() || estimated.find(him) == estimated.end())
{
ostringstream ofs;
ofs << "missing estimate for camera " << me << " or " << him;
throw(ofs.str());
}
DMatrix mystate_rots = rotation_matrix<double>(estimated[me][0][0], estimated[me][0][1], estimated[me][0][2], estimated[me][0][3]);
cost += distance2_correct(this_delta_rot, mystate_rots, estimated[him]);
}
// incorporate unary costs
for(map<int, DMatrix>::iterator iter = estimated.begin(); iter != estimated.end(); ++iter)
cost += unary_dists[iter->first][pack(from_dist(iter->second[0][0]), from_dist(iter->second[0][1]), from_dist(iter->second[0][2]))];
return cost;
}
DMatrix compute_ls_rotation(const DMatrix &est_matrix, const DMatrix >_matrix)
{
DMatrix corr(3,3);
for(int i=0; i<3; i++)
for(int j=0; j<3; j++)
{
DMatrix X = est_matrix.extract_col(i) - est_matrix.extract_col(i).mean();
DMatrix Y = gt_matrix.extract_col(j) - gt_matrix.extract_col(j).mean();
corr[i][j] = pointwise_multiply(X,Y).mean() / (sqrt(X.covariance()[0][0]) * sqrt(Y.covariance()[0][0]));
}
cerr << "CORR = " << corr << endl;
DMatrix u,s,v;
corr.svd(u,s,v);
corr = u * v.transpose();
return corr;
}
double compute_gt_error(double &err, hash_map<int, PairwiseDiff> >, const set<int> &reachable, map<int, DMatrix> &estimated_dirs, int photo_count, int ii, double &twist_error)
{
double err2=0;
err = 0;
twist_error = 0;
DMatrix gt_matrix(0,3), est_matrix(0,3);
for(int i=0; i<photo_count; i++)
{
if(reachable.find(i) == reachable.end() || gt.find(i) == gt.end())
continue;
// DMatrix last_col(1,3);
// last_col[0][0] = -gt[i].rot[2];
// last_col[0][1] = -gt[i].rot[5];
// last_col[0][2] = -gt[i].rot[8];
gt_matrix = vert_concat(gt_matrix, -gt[i].rot.extract_col(2).transpose());
est_matrix = vert_concat(est_matrix, estimated_dirs[i].transpose().extract(DRect(0,0,0,2)));
}
cout << "GT " << est_matrix << endl << gt_matrix << endl;
// find best (least squared error) rotation matrix between ground truth and estimates
DMatrix gt_rot = compute_ls_rotation(est_matrix, gt_matrix);
int ct=0;
for(int i=0; i<photo_count; i++)
{
if(reachable.find(i) == reachable.end() || gt.find(i) == gt.end())
continue;
DMatrix gt_vec = -gt[i].rot.extract_col(2);
err += vector_norm(gt_vec - estimated_dirs[i].extract(DRect(0,0,2,0)));
DMatrix est_rot_mat = rotation_matrix<double>(estimated_dirs[i][0][0], estimated_dirs[i][0][1], estimated_dirs[i][0][2], estimated_dirs[i][0][3]);
cout << "ZZZZ " << gt_rot.transpose() << " " << est_rot_mat << endl;
DMatrix vec2 = gt_rot.transpose() * est_rot_mat;
DMatrix vec = gt_rot.transpose() * estimated_dirs[i].extract(DRect(0,0,2,0));
double V = vector_norm(vec - gt_vec);
double V_ang = acos((gt_vec[0][0] * vec[0][0] + gt_vec[0][1] * vec[0][1] + gt_vec[0][2] * vec[0][2]) / (vector_norm(vec) * vector_norm(gt_vec)));
double V_ang2= acos((gt_vec[0][0] * -vec2[0][2] + gt_vec[0][1] * -vec2[1][2] + gt_vec[0][2] * -vec2[2][2]) / (vector_norm(vec2.extract_col(2)) * vector_norm(gt_vec)));
double V_tw_ang = fabs(twist_of_rotation_matrix(gt[i].rot) - estimated_dirs[i][0][3]);
twist_error += V_tw_ang;
err2 += V;
ct++;
cout << "gtcost " << ii << " " << "cam " << i << " " << gt_vec[0][0] << "," << gt_vec[0][1] << "," << gt_vec[0][2] << "," << gt[i].rot[1][0] << "(" << gt[i].rot[1][0] << " tw " << twist_of_rotation_matrix(gt[i].rot) << ") " << -vec2[0][2] << "," << -vec2[1][2] << "," << -vec2[2][2] << "," << vec2[1][0] << "(" << estimated_dirs[i][0][3] << "," << est_rot_mat[1][0] << ") " << estimated_dirs[i][0][3] << " " << V << " " << V_ang << " " << V_tw_ang << " " << V_ang2 << endl;
}
cerr << "count = " << ct << endl;
err /= double(ct);
twist_error /= double(ct);
return err2 / double(ct);
}
void copy_message(Msgtmp_type &msg_tmp, Distribution &H, int H_offset)
{
// create msg_tmp, a 3-d matrix version of H
for(int i=0, cp=H_offset; i<GRID_COUNT; i++)
for(int j=0; j<GRID_COUNT; j++)
for(int k=0; k<GRID_COUNT; k++, cp++)
msg_tmp.get(i)[j][k] = H[cp];
}
void copy_message(Message &this_msg, int offset, Msgtmp_type &msg_tmp)
{
for(int i=0, cp=offset; i<GRID_COUNT; i++)
for(int j=0; j<GRID_COUNT; j++)
for(int k=0; k<GRID_COUNT; k++, cp++)
this_msg[cp] = msg_tmp.get(i)[j][k];
}
inline double twist_dist(double t, double t2)
{
return (t2-t)*(t2-t) * TWIST_BIN_SPACING*TWIST_BIN_SPACING;
}
static pthread_mutex_t mymutex = PTHREAD_MUTEX_INITIALIZER;
void compute_message(int me, int him, int cc_ref, const Pairs &deltas, const MessageSet &last_ms, MessageSet ¤t_ms, const vector< _DMatrix<TYPE> > &rotation_matrices)
{
// FIXME: this constructor probably takes a non-trivial amount of time.
vector<_DMultiDMatrix<double> > msg_tmp(TWIST_BIN_COUNT, _DMultiDMatrix<double>(3, GRID_COUNT*TWIST_BIN_COUNT, GRID_COUNT, GRID_COUNT));
double scratch[GRID_COUNT];
Distribution H(LABEL_COUNT);
// make buffer of unary potentials
make_H(H, me, cc_ref);
// incorporate opinions of neighboring nodes
read_messages(H, me, him, deltas, last_ms);
// get expected (predicted) transformation from his viewing direction to mine
Pairs::const_iterator deltas_iter_me = deltas.find(me);
assert(deltas_iter_me != deltas.end());
const NeighborPairs &my_neighbors = deltas_iter_me->second;
NeighborPairs::const_iterator deltas_iter_mehim = my_neighbors.find(him);
assert(deltas_iter_mehim != my_neighbors.end());
const PairwiseDiff &mehim_pd = deltas_iter_mehim->second;
_DMatrix<double> v_rot = -mehim_pd.rot.transpose().extract_col(2);
// _DMatrix<double> v_rot = -deltas[me][him].rot.transpose().extract_col(2);
TYPE v_rot_row[3];
v_rot_row[0] = v_rot[0][0]; v_rot_row[1] = v_rot[0][1]; v_rot_row[2] = v_rot[0][2];
pthread_mutex_lock(&mymutex);
Message &this_msg = current_ms[him][me];
pthread_mutex_unlock(&mymutex);
if(this_msg.size() == 0)
this_msg = Message(LABEL_COUNT);
for(int twist=0; twist < TWIST_BIN_COUNT; twist++)
{
Msgtmp_type &this_msg_tmp = msg_tmp[twist];
// create msg_tmp, a 3-d matrix version of H (for a single twist)
copy_message(this_msg_tmp, H, twist * GRID_COUNT * GRID_COUNT * GRID_COUNT);
// do distance transform over each of 3 dimensions
LOOP_CORRECT(x, y, z, X, Y, Z, 0, 1, 2, v_rot_row, this_msg_tmp, rotation_matrices);
LOOP_CORRECT(x, z, y, X, Z, Y, 0, 2, 1, v_rot_row, this_msg_tmp, rotation_matrices);
LOOP_CORRECT(y, z, x, Y, Z, X, 1, 2, 0, v_rot_row, this_msg_tmp, rotation_matrices);
}
if(TWIST_BIN_COUNT > 1)
{
double scratch[TWIST_BIN_COUNT];
for(int x=0; x<GRID_COUNT; x++)
for(int y=0; y<GRID_COUNT; y++)
for(int z=0; z<GRID_COUNT; z++)
{
for(int t=0; t<TWIST_BIN_COUNT; t++)
scratch[t] = msg_tmp[t].get(x)[y][z];
for(int t=0; t<TWIST_BIN_COUNT; t++)
{
double expected_twist = twist_of_rotation_matrix(rotation_matrix<double>(to_dist(x), to_dist(y), to_dist(z), to_twist(t)) * mehim_pd.rot);
double _min = INFINITY;
for(int t2=0; t2<TWIST_BIN_COUNT; t2++)
{
double val = scratch[t2] + twist_dist(t, t2);
if(val < _min)
_min = val;
}
msg_tmp[t].get(x)[y][z] = _min;
}
}
}
for(int twist=0; twist < TWIST_BIN_COUNT; twist++)
copy_message(this_msg, twist * GRID_COUNT * GRID_COUNT * GRID_COUNT, msg_tmp[twist]);
subtract_dc(this_msg);
}
// Andrew's pre-propagation trick: since the only unary constraint in the graph is at node cc_ref,
// propagate messages out from that node (in depth-first order).
//
void prepropagate_messages(MessageSet ¤t_ms, MessageSet &last_ms, const vector<Edge> &edge_list, Pairs &deltas, int cc_ref, int ref_label, const vector< _DMatrix<TYPE> > &rotation_matrices)
{
stack<int> S;
map<int, int> parents;
S.push(ref_label);
parents.insert(make_pair(ref_label, -1));
while(!S.empty())
{
int node = S.top();
S.pop();
// push neighbors of S on stack
for(NeighborPairs::const_iterator iter = deltas[node].begin(); iter != deltas[node].end(); ++iter)
if(parents.insert(make_pair(iter->first, node)).second)
S.push(iter->first);
// send message from nodes's parent to node
if(parents[node] != -1)
compute_message(parents[node], node, cc_ref, deltas, current_ms, current_ms, rotation_matrices);
}
}
void do_iter_edge_subset(int beg_edge, int end_edge, const vector<Edge> &edge_list, int cc_ref, const Pairs &deltas,
const MessageSet &last_ms, MessageSet ¤t_ms, const vector< _DMatrix<TYPE> > &rotation_matrices)
{
int my_edge_count = end_edge - beg_edge + 1;
for(int i=beg_edge; i<=end_edge; i++)
{
if(my_edge_count < 10000 && !(i%100))
cerr << ".";
else if(!(i%1000))
cerr << ".";
Edge edge = edge_list[i];
int me = edge.first, him = edge.second;
compute_message(me, him, cc_ref, deltas, last_ms, current_ms, rotation_matrices);
}
}
void *bp_thread(void *_p)
{
BP_Params *p = (BP_Params *)_p;
do_iter_edge_subset(p->beg_edge, p->end_edge, *p->edge_list, p->cc_ref, *p->deltas, *p->last_ms, *p->current_ms, *p->rotation_matrices);
return 0;
}
void do_iter_edge_subset(int beg_edge, int end_edge, const vector<Edge> &edge_list, int cc_ref, Pairs &deltas,
MessageSet &last_ms, MessageSet ¤t_ms, vector< _DMatrix<TYPE> > &rotation_matrices, int thread_count)
{
if(thread_count == 0)
return;
pthread_t p;
BP_Params params(beg_edge, (end_edge-beg_edge)/thread_count + beg_edge, &edge_list, cc_ref, &deltas, &last_ms, ¤t_ms, &rotation_matrices);
pthread_create(&p, NULL, &bp_thread, (void *) ¶ms);
do_iter_edge_subset(params.end_edge+1, end_edge, edge_list, cc_ref, deltas, last_ms, current_ms, rotation_matrices, thread_count-1);
pthread_join(p, 0);
}
void read_pairs_file(const char *pairwise_file, int &photo_count, int &edge_count, Pairs &deltas, vector<Edge> &edge_list)
{
int field_count=0;
// try to figure out file format (# of fields per pair)
{
ifstream ifs(pairwise_file, ios::in);
ifs >> photo_count >> edge_count;
if(!ifs.good())
throw string("cannot open ") + pairwise_file;
cerr << "fname = " << pairwise_file << endl;
double t;
for(field_count=0; ifs.good(); field_count++) {
ifs >> t;
//cerr << t << " ";
// if (field_count > 0 && field_count % 18 == 0) {
// cerr << "\n";
// }
}
field_count--;
double i_part;
if(modf(field_count / double(edge_count), &i_part) != 0 || i_part < 14 )
{
cerr << "field_count = " << field_count << endl;
cerr << "edge_count = " << edge_count << endl;
cerr << "i_part = " << i_part << endl;
cerr << "frac part = " << modf(field_count / double(edge_count), &i_part) << endl;
throw string("can't parse file format of ") + pairwise_file;
}
field_count /= edge_count;
}
ifstream ifs(pairwise_file_g, ios::in);
ifs >> photo_count >> edge_count;
cerr << "pairs file seems to have ";
if(field_count <= 15) ifs >> PairwiseDiff::IOOptions::NO_CRATIOS; else cerr << "c_ratios, ";
if(field_count <= 16) ifs >> PairwiseDiff::IOOptions::NO_CONFIDENCE; else cerr << "confidences, ";
int extra_fields = max(field_count - 17, 0);
cerr << " and " << extra_fields << " fields ignored " << endl;
// undirected
edge_count *=2;
edge_list = vector<Edge>(edge_count);
for(int ii=0; ii<edge_count; ii+=2)
{
int i, j;
ifs >> i >> j; // can't combine this line with the next (i and j aren't assigned until after statement returns)
ifs >> deltas[i][j];
deltas[j][i].rot = deltas[i][j].rot.transpose();
deltas[j][i].trans[0] = -deltas[i][j].trans[0];
deltas[j][i].trans[1] = -deltas[i][j].trans[1];
deltas[j][i].trans[2] = -deltas[i][j].trans[2];
deltas[j][i].c1_ratio = deltas[i][j].c2_ratio;
deltas[j][i].c2_ratio = deltas[i][j].c1_ratio;
// burn off extra parameters
double t;
for(int k=0; k<extra_fields; k++)
ifs >> t;
edge_list[ii] = Edge(i,j);
edge_list[ii+1] = Edge(j,i);
}
}
// computes norm(R*t_ij - (t_j - t_i))
// row major matrix, column vectors
// R, t_ij, tj_minus_ti column major (from bundler)
// requires tij and tj_minus_ti be normalized
inline double estimate_geotag_error(double *R, double *t_ij, double *tj_minus_ti) {
return sqrt(sqr(-tj_minus_ti[0] + R[0] * t_ij[0] + R[1] * t_ij[1] + R[2] * t_ij[2])
+ sqr(-tj_minus_ti[1] + R[3] * t_ij[0] + R[4] * t_ij[1] + R[5] * t_ij[2])
+ sqr(-tj_minus_ti[2] + R[6] * t_ij[0] + R[7] * t_ij[1] + R[8] * t_ij[2]));
}
inline double pan_geotag_error(double *R, double *t_ij, double *tj_minus_ti) {
double r[3];
r[0] = R[0] * t_ij[0] + R[1] * t_ij[1] + R[2] * t_ij[2];
r[1] = R[3] * t_ij[0] + R[4] * t_ij[1] + R[5] * t_ij[2];
r[2] = R[6] * t_ij[0] + R[7] * t_ij[1] + R[8] * t_ij[2];
// drop the y coordinate and re-normalize
double new_r[3];
new_r[0] = r[0];
new_r[1] = 0;
new_r[2] = r[2];
double norm = sqrt(sqr(new_r[0]) + sqr(new_r[1]) + sqr(new_r[2]));
new_r[0] /= norm;
new_r[1] /= norm;
new_r[2] /= norm;
// cerr << "old = " << t_ij[0] << " " << t_ij[1] << " " << t_ij[2] << endl;
// cerr << "new = " << new_tij[0] << " " << new_tij[1] << " " << new_tij[2] << endl;
// ignore y coordinate (should be 0 in both vectors)
return sqrt(sqr(new_r[0] - tj_minus_ti[0]) + sqr(new_r[2] - tj_minus_ti[2]));
}
// sample t_j - t_i + N where N is a 2-D gaussian with variance geotag_variance_g; stores the result in sampled
// Note: no sampling is done by default, since geotag_variance_g = 0 (i.e. it always returns just t_j - t_i).
void sample_translation_differences(DMatrix &t_i, DMatrix &t_j, int num_trials, vector<DMatrix> &sampled) {
assert((uint)num_trials == sampled.size());
assert(num_trials == 0 || (sampled[0].rows() == 3 && sampled[0].cols() == 1));
DMatrix mean = t_j - t_i;
//double difference_stddev = sqrt(2*geotag_variance_g);
double difference_stddev = sqrt(2)*geotag_stdev_g;
for (int t = 0; t < num_trials; t++) {
double mag = rand_normal(0, difference_stddev);
double theta = drand48()*2*M_PI;
double x = mean[0][0] + mag*cos(theta);
double z = mean[2][0] + mag*sin(theta);
DMatrix &v = sampled[t];
double norm = sqrt(square(x) + square(z));
v[0][0] = x/norm;
v[1][0] = 0;
v[2][0] = z/norm;
}
}
// sample geotag directions (t_j - t_i)/norm(t_j - t_i); for each
// sample, compute its distance from tij; add this distance to H
void add_to_unary_with_sampling(vector<_DMatrix<TYPE> > &rotation_matrices, DMatrix &ti,
DMatrix &tj, _DMatrix<PD_TYPE> &tij, Distribution &H, double weight, double max_error) {
static const int NUM_TRIALS = 1;
static vector<DMatrix> trans_diffs(NUM_TRIALS);
static vector<double*> rots(rotation_matrices.size());
static bool first = true;
assert((int)rotation_matrices.size() == LABEL_COUNT);
if (first) {
first = false;
for (uint i = 0; i < trans_diffs.size(); i++) {
trans_diffs[i] = DMatrix(3,1);
}
for (uint i = 0; i < rotation_matrices.size(); i++) {
if (is_state_on_sphere((int)i)) {
double *R = new double[9];
for (int ii = 0; ii < 3; ii++) {
for (int j = 0; j < 3; j++) {
R[ii*3+j] = rotation_matrices[i][ii][j];
}
}
rots[i] = R;
}
else {
rots[i] = 0;
}
}
}
// sample t_j - t_i assuming t_i and t_j come from gaussians
sample_translation_differences(ti, tj, num_unary_samples_g, trans_diffs);
double tij_a[3] = {tij[0][0], tij[1][0], tij[2][0]};
for (uint i = 0; i < trans_diffs.size(); i++) {
// take a sample
double td[3] = {trans_diffs[i][0][0], trans_diffs[i][1][0], trans_diffs[i][2][0]};
for (int label = 0; label < LABEL_COUNT; label++) {
if (is_state_on_sphere(label)) {
assert((uint)label < H.size());
if (geotag_pan_g) {
H[label] += weight/trans_diffs.size()*min(max_error, pan_geotag_error(rots[label], tij_a, td));
}
else {
H[label] += weight/trans_diffs.size()*min(max_error, estimate_geotag_error(rots[label], tij_a, td));
}
}
}
}
}
// struct geotag_thread_params {
// vector<Edge> *edge_list;
// int first_edge, num_edges;
// };
// void geotag_unary_thread(void *void_params) {
// geotag_thread_params *params = (geotag_thread_params*)void_params;
// for (int i = params->first_edge; i < params->num_edges; i++) {
// }
// }
// void make_geotag_unary_potentials(vector<Edge> &edge_list) {
// vector<pthread_t> threads(thread_count_g);
// int edges_per_thread = ((int)edge_list.size())/thread_count_g;
// for (int i = 0; i < thread_count_g; i++) {
// geotag_thread_params params;
// params.first_edge = i*edges_per_thread;
// params.num_edges = ((i == thread_count_g - 1) ? edge_list.size() - params.first_edge : edges_per_thread);
// params.edge_list = &edge_list;
// pthread_create(&threads[i], NULL, &geotag_unary_thread, (void *) ¶ms);
// }
// for (int i = 0; i < thread_count_g; i++) {
// pthread_join(&threads[i], 0);
// }
// }
int main(int argc, char *argv[])
{
try {
parse_opts(argc, argv);
// initialize look-up table
is_state_on_sphere(0);
int photo_count, edge_count;
Pairs deltas;
vector<Edge> edge_list;
read_pairs_file(pairwise_file_g, photo_count, edge_count, deltas, edge_list);
if(!no_remove_cut_g)
{
edge_list = remove_cut_edges(edge_list, deltas);
edge_count = edge_list.size();
}
hash_map<int, DMatrix> known_locations;
if (geoplanar_g) {
read_2d_locations_file(geoplanar_g, known_locations);
}
int cc_ref=0;
set<int> reachable = check_reachability(photo_count, edge_count, edge_list, cc_ref, random_ref_g, ccref_highest_degree_g);
if(ref_node_g != -1)
cc_ref = ref_node_g;
cerr << reachable.size() << " reachable, reference node = " << cc_ref << endl;
cout << "ref " << cc_ref << endl;
const int ref_label = pack(from_dist(0), from_dist(0), from_dist(-1), from_twist(0));
// now remove edges that aren't involved in the connected component of interest
edge_list = remove_unconnected(edge_list, reachable, deltas);
cerr << "removed " << edge_count - edge_list.size() << " irrelevant edges (" << edge_list.size() << " remaining)" << endl;
edge_count = edge_list.size();
if(write_edges_g)
{
ofstream ofs((string(pairwise_file_g) + ".cut").c_str());
for(int i=0; i<edge_count; i++)
{
int me = edge_list[i].first, him = edge_list[i].second;
if(him < me)
continue;
ofs << me << " " << him << " ";;
for(int j=0; j<9; j++)
ofs << deltas[me][him].rot[j] << " ";
for(int j=0; j<3; j++)
ofs << deltas[me][him].trans[j] << " ";
ofs << deltas[me][him].c1_ratio << " " << deltas[me][him].c2_ratio << endl;
}
}
hash_map<int, PairwiseDiff> gt;
if(gt_g)
{
ifstream ifs_gt(gt_g, ios::in);
ifs_gt >> PairwiseDiff::IOOptions(PairwiseDiff::IOOptions::NO_CRATIOS);
while(ifs_gt.good())
{
int c;
ifs_gt >> c;
if(!ifs_gt.good())
break;
ifs_gt >> gt[c];
}
cerr << "read " << gt.size() << " gt entries " << endl;
}
map<int, double> vanish_tilts;
if(vanish_g)
{
cerr << "loading tilts and twists... ";
ifstream ifs(vanish_g, ios::in);
cerr << "vanish file = " << vanish_g << endl;
assert(ifs.good());
int cid;
double tilt, twist, conf;
while(ifs.good())
{
ifs >> cid >> tilt >> twist >> conf;
vanish_tilts[cid] = tilt / 180.0 * M_PI;
}
cerr << "done. " << endl;
}
if(param_est_g)
{
int ii=0;
DMatrix tilt_errors(1, photo_count);
for(int cid=0; cid<photo_count; cid++)
{
if(gt.find(cid) == gt.end())
continue;
if(vanish_tilts.find(cid) != vanish_tilts.end())
tilt_errors[0][ii] = sin(tilt_of_rotation_matrix(gt[cid].rot)) - sin(vanish_tilts[cid]);
ii++;
}
tilt_errors = tilt_errors.extract(DRect(0, 0, 0, ii-1));
cout << "tilt stddev = " << (tilt_errors.transpose().covariance()) << endl;
ii=0;
DMatrix edge_errors(edge_count, 3);
for(int i=0; i<edge_count; i++)
{
int me = edge_list[i].first, him = edge_list[i].second;
if(gt.find(me) == gt.end() || gt.find(him) == gt.end())
continue;
DMatrix predicted = compute_zerotwist_rot<double>(-gt[me].rot[0][2], -gt[me].rot[1][2], -gt[me].rot[2][2]) * deltas[me][him].rot;
edge_errors.set_row(i, -predicted.extract_col(2).transpose() - (-gt[him].rot.extract_col(2).transpose()));
ii++;
}
edge_errors = edge_errors.extract(DRect(0, 0, ii-1, 2));
DMatrix edge_cov = edge_errors.covariance();
cout << "edge cov = " << edge_cov << endl;
return 0;
}
if(perfect_pairs_g)
{
ofstream ofs(perfect_pairs_g);
for(int i=0; i<edge_count; i++)
{
int me = edge_list[i].first, him = edge_list[i].second;
if(gt.find(me) == gt.end() || gt.find(him) == gt.end())
continue;
DMatrix rot = gt[me].rot.inverse() * gt[him].rot;
ofs << me << " " << him << " ";
for(int j=0; j<9; j++)
ofs << rot[0][j] << " ";
for(int j=0; j<3; j++)
ofs << 0 << " ";
ofs << deltas[me][him].c1_ratio << " " << deltas[me][him].c2_ratio << endl;
}
return 0;
}
// set up unary costs
cerr << "setting up unary potentials..." << endl;
Distribution generic_unary(LABEL_COUNT), ref_unary(LABEL_COUNT);
const int ref_x = from_dist(0), ref_y = from_dist(0), ref_z = from_dist(-1);
for(int i=0; i< LABEL_COUNT; i++)
{
if(!is_state_on_sphere(i))
generic_unary[i] = ref_unary[i] = INFINITY;
else
{
int x = unpack_t0(i), y = unpack_t1(i), z = unpack_t2(i);
ref_unary[i] = sqr(x-ref_x) + sqr(y-ref_y) + sqr(z-ref_z);
}
}
if(!vanish_g)
{
unary_dists = DistributionMap(photo_count);
if (geoplanar_g) {
for(int cid=0; cid<photo_count; cid++) {
unary_dists[cid] = ref_unary;
}
}
else {
for(int cid=0; cid<photo_count; cid++) {
if(cid == cc_ref)
unary_dists[cid] = ref_unary;
else
unary_dists[cid] = generic_unary;
}
}
}
else
{
unary_dists = DistributionMap(photo_count);
for(int cid=0; cid<photo_count; cid++)
{
if(vanish_tilts.find(cid) == vanish_tilts.end())
unary_dists[cid] = generic_unary;
else
{
double vanish_y = sin(vanish_tilts[cid]);
Distribution &my_unary = unary_dists[cid];
my_unary = Distribution(LABEL_COUNT);
for(int i=0; i< LABEL_COUNT; i++)
{
if(!is_state_on_sphere(i))
my_unary[i] = INFINITY;
else
my_unary[i] = min(sqr(unary_scale_g * (to_dist(unpack_t1(i)) - vanish_y)), unary_trunc_g);
}
if(!geoplanar_g && cid == cc_ref)
my_unary = ref_unary;
}
}
}
// Precompute the map from viewing directions to rotation matricies
cerr << "precomputing rotation matrices... " << TWIST_BIN_COUNT << "," << GRID_COUNT << endl;
vector< _DMatrix<TYPE> > rotation_matrices(LABEL_COUNT);
for(int tw=0; tw < TWIST_BIN_COUNT; tw++)
for(int x=0; x < GRID_COUNT; x++)
for(int y=0; y < GRID_COUNT; y++)
for(int z=0; z < GRID_COUNT; z++)
rotation_matrices[pack(x, y, z, tw)] = rotation_matrix<TYPE>(to_dist(x), to_dist(y), to_dist(z), to_twist(tw));
cerr << "done" << endl;
if (geoplanar_g) {
cerr << "Building geoplanar..." << endl;
int edges_with_geotag = 0;
for (uint ei = 0; ei < edge_list.size(); ei++) {
int me = edge_list[ei].first;
int him = edge_list[ei].second;
if (known_locations.find(me) != known_locations.end()
&& known_locations.find(him) != known_locations.end()) {
edges_with_geotag++;
}
}
cerr << "fraction of edges with 2 geotags = " << ((double)edges_with_geotag) / edge_list.size() << endl;
double weight = scale_geotag_unary_g ? (scaled_geotag_weight_g * ((double)edges_with_geotag) / edge_list.size()) : unary_geotag_weight_g;
cerr << "geotag unary edge weight = " << weight << endl;
ofstream geotag_priors("rotbp_geotag_priors");
cerr << "known locations: " << known_locations.size() << endl;
for (uint ei = 0; ei < edge_list.size(); ei++) {
int me = edge_list[ei].first;
int him = edge_list[ei].second;
if (known_locations.find(me) != known_locations.end()
&& known_locations.find(him) != known_locations.end()) {
DMatrix t_i = known_locations[me];
DMatrix t_j = known_locations[him];
// DMatrix &t_ij = deltas[me][him].trans;
_DMatrix<PD_TYPE> t_ij(3,1,deltas[me][him].trans);
//assert(abs(vector_norm_sq(t_ij) - 1) <= 0.0001);
if (ei % ((int)ceil(edge_list.size()/20.0)) == 0) {
cerr << ".";
}
if (write_geo_priors_g) {
DMatrix diff = t_j - t_i;
diff /= vector_norm(diff);
geotag_priors << me << " " << him << " " << t_ij[0][0] << " " << t_ij[1][0] << " " << t_ij[2][0]
<< " " << diff[0][0] << " " << diff[1][0] << " " << diff[2][0] << endl;
}
add_to_unary_with_sampling(rotation_matrices, t_i, t_j, t_ij, unary_dists[me], weight, unary_geotag_trunc_g);
}
}
geotag_priors.close();
}
cerr << "out of geotag priors" << endl;
if(rescore_g)
{
ifstream ifs(rescore_g);
map<int, DMatrix> estimated_dirs;
cerr << "reading " << rescore_g << endl;
while(ifs.good())
{
int cid;
// handle two formats: cid followed by 3 viewing directions, or cid followed by 9-entry rotation matrix
string str;
getline(ifs, str);
istringstream iss(str);
iss >> cid;
if(!ifs.good())
break;
estimated_dirs[cid] = DMatrix(4,1);
estimated_dirs[cid] = 0;
if(count(str.begin(), str.end(), ' ') > 4)
{
DMatrix tmp(3,3);
iss >> tmp[0][0] >> tmp[0][1] >> tmp[0][2] >> tmp[1][0] >> tmp[1][1] >> tmp[1][2] >> tmp[2][0] >> tmp[2][1] >> tmp[2][2];
estimated_dirs[cid][0][0] = -tmp[0][2];
estimated_dirs[cid][1][0] = -tmp[1][2];
estimated_dirs[cid][2][0] = -tmp[2][2];
estimated_dirs[cid][3][0] = twist_of_rotation_matrix(tmp);
}
else
iss >> estimated_dirs[cid][0][0] >> estimated_dirs[cid][0][1] >> estimated_dirs[cid][0][2];
}
double cost = compute_labeling_cost(estimated_dirs, deltas, edge_list, 0);
double err = 0, err2=0, twist_error=0;
if(gt_g)
err2 = compute_gt_error(err, gt, reachable, estimated_dirs, photo_count, 0, twist_error);
cout << "iter " << 0 << " of " << 0 << " " << cost << " " << err << " " << err2 << " " << twist_error << endl;
return 0;
}
if(discrete_g)
{
// initialize message buffers
MessageSet last_ms;
for(int i=0; i<edge_count; i++)
{
if(last_ms[edge_list[i].first][edge_list[i].second].size() == 0)
last_ms[edge_list[i].first][edge_list[i].second] = Message(LABEL_COUNT);
}
MessageSet current_ms = last_ms;
cerr << "done" << endl;
// Prepropagate messages using BFS from the reference node
if(prepropagate_g)
{
prepropagate_messages(current_ms, last_ms, edge_list, deltas, cc_ref, ref_label, rotation_matrices);
last_ms = current_ms;
}
for(int ii=0; ii<max_ii_g; ii++)
{
cerr << ii << endl;
int beg_edge = 0, end_edge = edge_count-1;
if(thread_count_g > 1)
do_iter_edge_subset(beg_edge, end_edge, edge_list, cc_ref, deltas, last_ms, current_ms, rotation_matrices, thread_count_g);
else
do_iter_edge_subset(beg_edge, end_edge, edge_list, cc_ref, deltas, last_ms, current_ms, rotation_matrices);
last_ms = current_ms;
map<int, DMatrix> estimated_dirs;
cerr << endl << "computing MAP estimates and energy..." << endl;
hash_map<int, vector<TYPE> > final_distribution = compute_final_distribution(current_ms, edge_list, cc_ref);
vector<int> labels = find_map_estimates(final_distribution, photo_count, estimated_dirs, ii);
double cost = compute_labeling_cost(estimated_dirs, deltas, edge_list, ii);
double err = 0, err2=0, twist_error=0;
if(gt_g)
err2 = compute_gt_error(err, gt, reachable, estimated_dirs, photo_count, ii, twist_error);
cout << "iter " << ii << " of " << max_ii_g << " " << cost << " " << err << " " << err2 << " " << twist_error << endl;
}
}
else
{
// continuous code snipped for now
}
} catch(const string &str)
{
cerr << str << endl;
}
}
<file_sep>/data/disco-bp/DLib/test_dt.cpp
#include<DistanceTransform.h>
#include <iostream>
#include <DProfile.h>
#include <DImageIO.h>
DProfile prof(10);
using namespace std;
int main(int argc, char *argv[])
{
int trials = atoi(argv[2]);
#ifdef ASDF
_DImage<unsigned char> img = LoadDImage(argv[1]);
_DMatrix<double> mat;
change_type(img.get_luma_plane(), mat);
DistanceTransform_2D<double> dd(0.01, true);
DistanceTransform_2D<double> dd2(0.01, bool(false));
prof.begin(2);
for(int j=0; j<trials; j++)
dd.do_transform(mat);
// dd.do_transform(mat);
prof.end(2);
prof.begin(1);
for(int j=0; j<trials; j++)
dd2.do_transform(mat);
// dd2.do_transform(row.extract_row(j));
prof.end(1);
cout << (fabs(dd.do_transform(mat) - dd2.do_transform(mat))).max() << endl;
#else
int sz = atoi(argv[3]);
DistanceTransform_2D<double> dd(10, bool(atoi(argv[1])));
DistanceTransform_2D<double> dd2(10, bool(false));
_DPlane<double> row = _DMatrix<double>(1, sz, _DMatrix<double>::random) * 1000;
_DPlane<double> mat(sz,sz);
int SZ = sz*sz;
double *cp = mat[0];
// for(int j=0; j<SZ; j++)
// cp[j] = drand48() * 1000;
prof.begin(2);
for(int j=0; j<trials; j++)
dd.do_transform(row);
// dd.do_transform(mat);
prof.end(2);
prof.begin(1);
for(int j=0; j<trials; j++)
dd2.do_transform(row);
// dd2.do_transform(row.extract_row(j));
prof.end(1);
#endif
// for(int j=0; j<trials; j++)
// cout << (fabs(dd.do_transform(row.extract_row(j)) - dd2.do_transform(row.extract_row(j)))).max() << endl;
}
<file_sep>/src/data_collect.py
import sys
def parse(filename):
fin = open(filename, 'r')
lines = fin.readlines()
iters = []
times = []
loss = []
for line in lines:
tokens = line.split(', ')
for token in tokens:
lr = token.split('=')
l = lr[0]
r = lr[1]
if l.startswith('iter'):
iters.append(int(r))
if l.startswith('elapsed_time'):
times.append(float(r))
if l.startswith('min_loss'):
loss.append(float(r))
fin.close()
return iters, times, loss
iters, times, loss = parse(sys.argv[1])
print(loss)
<file_sep>/data/disco-bp/DLib/DMultiDMatrix.cpp
//
// DLib: A simple image processing library.
//
// <NAME>, 2003-2005
// <EMAIL>
//
// Please do not redistribute this code.
//
//
//
//
#include<DMultiDMatrix.h>
using namespace std;
/*
template<class T>
_DMultiDMatrix<T> _DMultiDMatrix<T>::pointwise_min(const _DMultiDMatrix<T> &m1) const
{
const _DMultiDMatrix &m2 = *this;
assert(same_size(m1, m2));
_DMultiDMatrix<T> result(m1);
for(int i=0; i<m1.planes(); i++)
result.get(i) = _DMatrix<T>::pointwise_min<T>(m1.get(i), m2.get(i));
return result;
}
template<class T>
_DMultiDMatrix<T> _DMultiDMatrix<T>::pointwise_max(const _DMultiDMatrix<T> &m1) const
{
const _DMultiDMatrix &m2 = *this;
assert(same_size(m1, m2));
_DMultiDMatrix<T> result(m1);
for(int i=0; i<m1.planes(); i++)
result.get(i) = ::pointwise_max(m1.get(i), m2.get(i));
return result;
}
template<class T>
_DMatrixArray<T> _DMatrixArray<T>::pointwise_min(const _DMatrixArray<T> &m1) const
{
const _DMatrixArray &m2 = *this;
assert(same_size(m1, m2));
_DMatrixArray<T> result(m1);
for(int i=0; i<m1.planes(); i++)
result.get(i) = ::pointwise_min(m1.get(i), m2.get(i));
return result;
}
template<class T>
_DMatrixArray<T> _DMatrixArray<T>::pointwise_max(const _DMatrixArray<T> &m1) const
{
const _DMatrixArray &m2 = *this;
assert(same_size(m1, m2));
_DMatrixArray<T> result(m1);
for(int i=0; i<m1.planes(); i++)
result.get(i) = ::pointwise_max(m1.get(i), m2.get(i));
return result;
}
*/
template<class T>
_DMatrix<T> pointwise_min(const _DMultiDMatrix<T> &m1, _DMatrix<int> &index)
{
index = _DMatrix<int>(m1.rows(), m1.cols());
index = 0;
_DMatrix<T> result(m1.get(0));
int sz = m1.rows() * m1.cols();
for(int p=1; p<m1.planes(); p++)
{
T *res_ptr = result[0];
int *ind_ptr = index[0];
T *this_ptr = m1.get(p)[0];
for(int i=0; i<sz; i++)
if(this_ptr[i] < res_ptr[i])
res_ptr[i] = this_ptr[i], ind_ptr[i] = p;
}
return result;
}
template<class T>
_DMatrix<T> pointwise_max(const _DMultiDMatrix<T> &m1, _DMatrix<int> &index)
{
index = _DMatrix<int>(m1.rows(), m1.cols());
index = 0;
_DMatrix<T> result(m1.get(0));
int sz = m1.rows() * m1.cols();
for(int p=1; p<m1.planes(); p++)
{
if(!same_size(result, m1.get(p)))
throw string("in pointwise max, not all planes same size");
T *res_ptr = result[0];
int *ind_ptr = index[0];
T *this_ptr = m1.get(p)[0];
for(int i=0; i<sz; i++)
if(this_ptr[i] > res_ptr[i])
res_ptr[i] = this_ptr[i], ind_ptr[i] = p;
}
return result;
}
template<class T2>
bool same_size(const _DMatrixArray<T2> &m1, const _DMatrixArray<T2> &m2)
{
if(m1.planes() != m2.planes())
return false;
for(int i=0; i<m1.planes(); i++)
if(!same_size(m1.get(i), m2.get(i)))
return false;
return true;
}
template<class T2>
bool same_size(const _DMultiDMatrix<T2> &m1, const _DMultiDMatrix<T2> &m2)
{
return m1.rows() == m2.rows() && m1.cols() == m2.cols() && m1.planes() == m2.planes();
}
#define DECLARE(x) \
template bool same_size<x>(const _DMultiDMatrix<x> &m1, const _DMultiDMatrix<x> &m2); \
template bool same_size<x>(const _DMatrixArray<x> &m1, const _DMatrixArray<x> &m2); \
template _DMatrix<x> pointwise_min(const _DMultiDMatrix<x> &m1, _DMatrix<int> &index); \
template _DMatrix<x> pointwise_max(const _DMultiDMatrix<x> &m1, _DMatrix<int> &index);
/* template _DMultiDMatrix<x> pointwise_min<x>(const _DMultiDMatrix<x> &m1, const _DMultiDMatrix<x> &m2); \
template _DMultiDMatrix<x> pointwise_max<x>(const _DMultiDMatrix<x> &m1, const _DMultiDMatrix<x> &m2); \ */
/* template _DMatrixArray<x> pointwise_min<x>(const _DMatrixArray<x> &m1, const _DMatrixArray<x> &m2); \
template _DMatrixArray<x> pointwise_max<x>(const _DMatrixArray<x> &m1, const _DMatrixArray<x> &m2); \ */
DECLARE(double)
DECLARE(short)
DECLARE(int)
DECLARE(float)
DECLARE(char)
DECLARE(unsigned char)
template class _DMatrixArray<double>;
template class _DMatrixArray<float>;
template class _DMatrixArray<int>;
template class _DMatrixArray<short>;
template class _DMatrixArray<char>;
template class _DMultiDMatrix<double>;
template class _DMultiDMatrix<float>;
template class _DMultiDMatrix<int>;
template class _DMultiDMatrix<short>;
template class _DMultiDMatrix<char>;
<file_sep>/src/makefile
all:
g++ -o solver main.cpp -O3 -std=c++11 -fopenmp
g++ -o dataI artsquad.cpp -O3 -std=c++11 -fopenmp
n=2000
edge_density=0.2
bias=1.0
inc=0
max_iter=10000
noise_type=2
noise_ratio=0
decay=0.9
sigma=0.01
output=uniform_n1p2_$(sigma)/
a=-1
b=2
stopping=0.1
graph_file=
synthetic:
mkdir -p $(output)
./solver $(n) $(edge_density) $(bias) $(inc) $(noise_type) $(noise_ratio) $(max_iter) $(decay) $(a) $(b) $(sigma) $(output) $(graph_file)
graph1:
mkdir -p $(output)/$@/
./solver 2000 0.1 1.0 0.0 2 $(noise_ratio) 10000 0.8 -1 2 $(sigma) $(output)/$@/ graph1.meta ${stopping}
graph2:
mkdir -p $(output)/$@/
./solver 2000 0.4 0.2 0.6 2 $(noise_ratio) 10000 0.8 -1 2 $(sigma) $(output)/$@/ graph2.meta ${stopping}
graph3:
mkdir -p $(output)/$@/
./solver 20000 0.003 1.0 0.0 2 $(noise_ratio) 10000 0.8 -1 2 $(sigma) $(output)/$@/ graph3.meta ${stopping}
graph4:
mkdir -p $(output)/$@/
./solver 20000 0.1 0.07 0.21 2 $(noise_ratio) 10000 0.8 -1 2 $(sigma) $(output)/$@/ graph4.meta ${stopping}
draw_graph:
python graph.py $(sigma) 1 graph
python graph.py $(sigma) 2 graph
#python graph.py $(sigma) 3 graph
python graph.py $(sigma) 4 graph
draw_time:
python graph.py $(sigma) 1 time
python graph.py $(sigma) 2 time
#python graph.py $(sigma) 3 time
python graph.py $(sigma) 4 time
| 9d5514c46da4f3a24a73d2cefa834c3c07a6272d | [
"Makefile",
"Python",
"C",
"C++",
"Shell"
] | 55 | C++ | PeterZhouSZ/TranslationSync | 393339e58e667ef2a4b71556c2b3a5507d03f6a1 | 6cba2ee2de2ea28b0bc0b287f9d69f419a22bbfe |
refs/heads/master | <file_sep>from json import encoder
from mitmproxy import ctx
import mitmproxy
'''
# des加解密
'''
from pyDes import des, CBC, PAD_PKCS5 ,ECB
import binascii
# 所有发出的请求数据包都会被这个方法所处理
# 所谓的处理,我们这里只是打印一下一些项;当然可以修改这些项的值直接给这些项赋值即可
'''
md5加密
'''
import hashlib
'''
# des加解密
'''
# 秘钥
KEY=''
def des_encrypt(s):
"""
DES 加密
:param s: 原始字符串
:return: 加密后字符串,16进制
"""
secret_key = KEY
iv = secret_key
k = des(secret_key, ECB, iv, pad=None, padmode=PAD_PKCS5)
en = k.encrypt(s, padmode=PAD_PKCS5)
# print(str(binascii.b2a_hex(en),encoding="utf8"))
return str(binascii.b2a_hex(en),encoding="utf8")
def des_descrypt(s):
"""
DES 解密
:param s: 加密后的字符串,16进制
:return: 解密后的字符串
"""
secret_key = KEY
iv = secret_key
k = des(secret_key, ECB, iv, pad=None, padmode=PAD_PKCS5)
de = k.decrypt(binascii.a2b_hex(s), padmode=PAD_PKCS5)
# print(str(de,encoding="utf8"))
return str(de,encoding="utf8")
def encode_md5(str1):
i = hashlib.md5() # 计算的参数必须是 bytes 类型
str1 = str1+""
# print(str1)
i.update(bytes(str1, encoding = "utf8"))
# print(i.hexdigest())
return i.hexdigest()
#
endata_3 = ''
def request(flow: mitmproxy.http.HTTPFlow):
# 获取请求对象
request = flow.request
# 实例化输出类
info = ctx.log.info
'''
# 打印请求的url
info(request.url)
# 打印请求方法
info(request.method)
# 打印host头
info(request.host)
# 打印请求端口
info(str(request.port))
# 打印所有请求头部
info(str(request.headers))
# 打印cookie头
info(str(request.cookies))
'''
print("》------------request------------------")
print(flow.request.get_text())
print("--------------request----------------《")
# 所有服务器响应的数据包都会被这个方法处理
# 所谓的处理,我们这里只是打印一下一些项
def response(flow):
# 获取响应对象
response = flow.response
# 实例化输出类
info = ctx.log.info
print("》-----------response-----------------")
# 打印响应码
# info(str(response.status_code))
'''
# 打印所有头部
info(str(response.headers))
# 打印cookie头部
info(str(response.cookies))
'''
# 打印响应报文内容
print(str(response.text))
# info(str(response.text))
print("-------------response-----------------《")
<file_sep># 2020.11.21
> 长亭大佬牛逼
# 简介
在渗透测试,使用长亭大佬xray时候,会很爽,小巧强大的优点,不赘述,但是遇到开发对post数据进行des aes sm4 等可逆加密的时候,xray就会对注入等方面,测试的效果就会大大下降,所以就需要一个翻译,中转的代理,来进行前期的数据清洗解密,之后再给xray加载恶意代码后,再给代理加密转出,这个问题就解决了。
- 详情见doc
- 目录如下
-- 简介
-- 环境
-- 如何启动项目
-- 代码
-- Bug记录
-- 最后
<file_sep>from json import encoder
from mitmproxy import ctx
import mitmproxy
'''
# des加解密
'''
from pyDes import des, CBC, PAD_PKCS5 ,ECB
import binascii
# 所有发出的请求数据包都会被这个方法所处理
# 所谓的处理,我们这里只是打印一下一些项;当然可以修改这些项的值直接给这些项赋值即可
'''
md5加密
'''
import hashlib
'''
# des加解密
'''
# 秘钥
KEY='des的key'
#
endata_3 = ''
endata_1 = ''
def des_encrypt(s):
"""
DES 加密
:param s: 原始字符串
:return: 加密后字符串,16进制
"""
secret_key = KEY
iv = secret_key
k = des(secret_key, ECB, iv, pad=None, padmode=PAD_PKCS5)
en = k.encrypt(s, padmode=PAD_PKCS5)
# print(str(binascii.b2a_hex(en),encoding="utf8"))
return str(binascii.b2a_hex(en),encoding="utf8")
def des_descrypt(s):
"""
DES 解密
:param s: 加密后的字符串,16进制
:return: 解密后的字符串
"""
secret_key = KEY
iv = secret_key
k = des(secret_key, ECB, iv, pad=None, padmode=PAD_PKCS5)
de = k.decrypt(binascii.a2b_hex(s), padmode=PAD_PKCS5)
# print(str(de,encoding="utf8"))
return str(de,encoding="utf8")
def encode_md5(str1):
i = hashlib.md5() # 计算的参数必须是 bytes 类型
str1 = str1+"des的key"
# print(str1)
i.update(bytes(str1, encoding = "utf8"))
# print(i.hexdigest())
return i.hexdigest()
def request(flow: mitmproxy.http.HTTPFlow):
request = flow.request
info = ctx.log.info
# 判断post参数
## 获得post参数
# info(str(request.text))
'''
if body中有des参数:
分割处理
else:
加密处理,加工三段
发送其他不转发的proxy
'''
global endata_3
encryptdata = flow.request.get_text()
print('1'+'*'*30)
print(encryptdata)
print('1'+'*'*30)
import json
try:
strdata_all = json.loads(encryptdata)
except:
strdata_all = ''
else:
pass
# strdata_all = json.loads(encryptdata)
if "encryptData" in strdata_all:
print('2'+"*"*30)
strdata_2 = strdata_all["encryptData"]
# print(strdata_2)
en_strdata_all = strdata_2.split("")
# print(en_strdata_all)
# print(en_strdata_all[0])
print(en_strdata_all[1])
# print(en_strdata_all[2])
print(des_descrypt(en_strdata_all[1]))
# 存储第三段字符串
endata_3 = en_strdata_all[2]
# 修改发送到xray的报文
flow.request.set_text(des_descrypt(en_strdata_all[1]))
'''
{"tc":"szmbank","tcard":"6"}
'''
print('2'+"*"*30)
#发送到上游xray处
else:
# 被xray处理的原始字符串--》即将加密还原
print("!"*30)
if strdata_all == '':
pass
else:
# endata_3 = '4a8fd18243825924e664e001b357958a764f59f47412911be36f687d0271f5e335779c6042b8f108ca4246e23cedb74995dbff9d48440581945e8c670072771d60278ee3df4290bf8e69c6007237f0b4792c85ad4dea61abe2b6d23117a5c05493250bffe29330455e8e06a40ba69c209f23db4d96db0d49feb207f1c8910d0c'
# print(strdata_all)
print("[payload]"+str(strdata_all))
strdata_1 = des_encrypt(str(strdata_all))
strdata_0 = encode_md5(str(strdata_all))
strdata_all = strdata_0+'\u001d'+strdata_1+'\u001d'+endata_3
# print(strdata_all)
data_json = {"encryptData":strdata_all}
print(data_json)
flow.request.set_text(str(data_json))
# 发送到8001代理正常出去
print("即将发送。。。。")
proxy =("localhost", 8001)
flow.live.change_upstream_proxy_server(proxy)
print("发送成功。。。。")
def response(flow):
# 获取响应对象
response = flow.response
print("response-->"+endata_1)
info = ctx.log.info
info(str(response.text))
'''
# 实例化输出类
info = ctx.log.info
# 打印响应码
info(str(response.status_code))
# 打印所有头部
info(str(response.headers))
# 打印cookie头部
info(str(response.cookies))
# 打印响应报文内容
info(str(response.text))
''' | ba1ef361856c91ebdfbe9e6bc8a50cddab35bdb4 | [
"Markdown",
"Python"
] | 3 | Python | exampleK/xray-s-friend | c884fbd0dff4400d106792072e08eac0e37fe5b6 | 02a7ad56c1d53df2fec0caee1cceae4ca205145f |
refs/heads/master | <file_sep>//
// ExpandableTableViewCell.swift
// LoadMoreTableViewCell
//
// Created by zhanggongwei on 15/10/14.
// Copyright © 2015年 cfs. All rights reserved.
//
import UIKit
class ExpandableTableViewCell: UITableViewCell {
typealias ExpandClosure = () -> Void
@IBOutlet weak var label: UILabel!
@IBOutlet weak var button: UIButton!
var expandBlock: ExpandClosure?
var expanded: Bool = true {
didSet {
let title = expanded ? "Collapse" : "Show";
button.setTitle(title, forState: .Normal)
}
}
override func awakeFromNib() {
super.awakeFromNib()
// Initialization code
}
override func setSelected(selected: Bool, animated: Bool) {
super.setSelected(selected, animated: animated)
// Configure the view for the selected state
}
@IBAction func onExpand(sender: AnyObject) {
expanded = !expanded;
if expandBlock != nil {
expandBlock!()
}
}
}
<file_sep># IOS Library 
## Animation
[Fold Exapndable Cell](http://blog.pivotal.io/labs/labs/expandable-uitableviewcells)
## Tabbar
### [RDVTabBarController](https://github.com/robbdimitrov/RDVTabBarController)

## Layout
### [Masonry](https://github.com/SnapKit/Masonry)
## Travis CI
<file_sep>//
// ExpandableTableViewController.swift
// LoadMoreTableViewCell
//
// Created by zhanggongwei on 15/10/14.
// Copyright © 2015年 cfs. All rights reserved.
//
import UIKit
class ExpandableTableViewController: UITableViewController {
let datasources : [String] = ["The first step to creating a fancy animation was creating a UITableViewCell (called BookCell) with flexible constraints. By flexible, I mean that no constraint was absolutely required. The cell included a yellow subview subview with a collapsible height constraint — the height constraint always has a constant of 0, and it initially has a priority of 999. Within the collapsible subview, no vertical constraints are required. We set the priority of all the internal vertical constraints to 998.","用人单位法定节假日安排加班,应按不低于日或者小时工资基数的300%支付加班工资,休息日期间安排加班,应当安排同等时间补休,不能安排补休的,按照不低于日或者小时工资基数的200%支付加班工资。","如《广东省工资支付条例》第三十五 条非因劳动者原因造成用人单位停工、停产,未超过一个工资支付周期(最长三十日)的,用人单位应当按照正常工作时间支付工资。超过一个工资支付周期的,可以根据劳动者提供的劳动,按照双方新约定的标准支付工资;用人单位没有安排劳动者工作的,应当按照不低于当地最低工资标准的百分之八十支付劳动者生活费,生活费发放至企业复工、复产或者解除劳动关系。","来看看劳动法克林顿刷卡思考对方卡拉卡斯的楼房卡拉卡斯的疯狂拉萨的罚款 ","中秋节、十一假期分为两类。一类是法定节假日,即9月30日(中秋节)、10月1日、2日、3日共四天为法定节假日;另一类是休息日,即10月4日至10月7日为休息日。","2000(元)÷21.75(天)×200%×1(天)=183.9(元)"]
var expandedIndexPath:Set<NSIndexPath> = Set<NSIndexPath>()
override func viewDidLoad() {
super.viewDidLoad()
// Uncomment the following line to preserve selection between presentations
// self.clearsSelectionOnViewWillAppear = false
// Uncomment the following line to display an Edit button in the navigation bar for this view controller.
// self.navigationItem.rightBarButtonItem = self.editButtonItem()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
// MARK: - Table view data source
override func numberOfSectionsInTableView(tableView: UITableView) -> Int {
return 1
}
override func tableView(tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return self.datasources.count
}
override func tableView(tableView: UITableView, cellForRowAtIndexPath indexPath: NSIndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCellWithIdentifier("ExpandableTableViewCellIndenfier", forIndexPath: indexPath) as! ExpandableTableViewCell
// Configure the cell...
cell.label.text = self.datasources[indexPath.row]
let textHeight = cell.label.sizeThatFits(CGSizeMake(tableView.bounds.size.width, 9999)).height;
if (textHeight > 80) {
cell.button.hidden = false
cell.expanded = expandedIndexPath.contains(indexPath)
cell.expandBlock = {
if cell.expanded {
self.expandedIndexPath.insert(indexPath)
} else {
self.expandedIndexPath.remove(indexPath)
}
self.tableView.reloadRowsAtIndexPaths([indexPath], withRowAnimation: .Automatic)
}
} else {
cell.button.hidden = true
}
return cell
}
override func tableView(tableView: UITableView, heightForRowAtIndexPath indexPath: NSIndexPath) -> CGFloat {
if self.expandedIndexPath.contains(indexPath) {
return 240;
} else {
return 120;
}
}
/*
// Override to support conditional editing of the table view.
override func tableView(tableView: UITableView, canEditRowAtIndexPath indexPath: NSIndexPath) -> Bool {
// Return false if you do not want the specified item to be editable.
return true
}
*/
/*
// Override to support editing the table view.
override func tableView(tableView: UITableView, commitEditingStyle editingStyle: UITableViewCellEditingStyle, forRowAtIndexPath indexPath: NSIndexPath) {
if editingStyle == .Delete {
// Delete the row from the data source
tableView.deleteRowsAtIndexPaths([indexPath], withRowAnimation: .Fade)
} else if editingStyle == .Insert {
// Create a new instance of the appropriate class, insert it into the array, and add a new row to the table view
}
}
*/
/*
// Override to support rearranging the table view.
override func tableView(tableView: UITableView, moveRowAtIndexPath fromIndexPath: NSIndexPath, toIndexPath: NSIndexPath) {
}
*/
/*
// Override to support conditional rearranging of the table view.
override func tableView(tableView: UITableView, canMoveRowAtIndexPath indexPath: NSIndexPath) -> Bool {
// Return false if you do not want the item to be re-orderable.
return true
}
*/
/*
// MARK: - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
override func prepareForSegue(segue: UIStoryboardSegue, sender: AnyObject?) {
// Get the new view controller using segue.destinationViewController.
// Pass the selected object to the new view controller.
}
*/
}
| 8af75a3801d86f4317b135b538777ae33296b555 | [
"Swift",
"Markdown"
] | 3 | Swift | zhgw01/iostrain | c9ceb32fdbd5d1bf30419374bc399a412719ef77 | 6def42f5ecd7c38d7db817d44090b4a37545beb0 |
refs/heads/master | <repo_name>ra9ab/Algorithm<file_sep>/Search_sort.h
#include "std_types.h"
extern void Bubble_Sort(uint_32 *);
extern void Selection_Sort(uint_32 *);
extern void Merge_Sort(uint_32 *);
extern uint_32 Binary_search(uint_32 *, uint_32);
extern uint_32 Linear_search(uint_32 *, uint_32);
void Merge(uint_32 *L ,uint_32 *R,uint_32 *A);
<file_sep>/Search_sort.c
#include "search_sort.h"
#include "stdio.h"
#define arr_size 10
static void Print_array(uint_32 *array);
void main (void)
{
uint_32 option,key,result ;
uint_32 Arr[arr_size]={6,3,42,12,2,7,33,10,24,13};
printf(" 1-Bubble_Sort \n 2-Selection_Sort \n 3-Linear_search \n 4-Binary_search \n 5-Merge Sort");
scanf("%d",&option);
switch (option)
{
case 1 :
Bubble_Sort(Arr);
Print_array(Arr);
break ;
case 2 :
Selection_Sort(Arr);
Print_array(Arr);
break ;
case 3 :
printf("enter key to search\n");
scanf("%d",&key);
result =Linear_search(Arr,key);
if (result)
{
printf("found\n");
printf("your key placed in index : %d",result); }
break ;
case 4 :
printf("enter key to search\n");
scanf("%d",&key);
result =Binary_search(Arr,key);
if (result)
{
printf("found\n");
printf("your key placed in index : %d",result); }
break ;
case 5 :
Merge_Sort(Arr);
Print_array(Arr);
break ;
}
}
void Print_array(uint_32 *array)
{
uint_32 counter=0;
printf("Sorted ArraY : ");
for(counter=0;counter<arr_size-1;counter++)
{
printf("%d ",array[counter]);
}
}
void Bubble_Sort(uint_32 *arr_ptr)
{
uint_32 index_A ,index_B ,temp; /*3c*/
for (index_A=0;index_A<arr_size-1;index_A++)/*c+2cn*/
{
for(index_B=0;index_B<arr_size-1-index_A;index_B++)/*nc+2cn^2*/
{
if (arr_ptr[index_B]>arr_ptr[index_B+1])/*cn^2*/
{
temp =arr_ptr[index_B];/*cn^2*/
arr_ptr[index_B]=arr_ptr[index_B+1];/*cn^2*/
arr_ptr[index_B+1]=temp ;/*cn^2*/
}
}
}
}
/*t(n)=6cn^2+3nc+4c
*O(n)=n^2*/
void Selection_Sort(uint_32 *arr_ptr)
{
uint_32 index_A ,index_B ,temp ,min; /*4c*/
for(index_A=0;index_A<arr_size-1;index_A++)/*c + 2cn*/
{ min = index_A;/*nc*/
for(index_B=index_A+1 ; index_B<arr_size;index_B++)/*cn + 2cn^2*/
{
if(arr_ptr[min]>arr_ptr[index_B])/*cn^2*/
{min = index_B;}/*cn^2*/
}
temp=arr_ptr[index_A];/*cn*/
arr_ptr[index_A]=arr_ptr[min];/*cn*/
arr_ptr[min]=temp ;/*cn*/
}
}
/*4cn^2+6cn+5c
*O(n)=n^2*/
uint_32 Linear_search(uint_32 *arr_ptr,uint_32 Key)
{
uint_32 index ;/*c*/
for (index = 0 ; index<arr_size-1;index++)/*c+2cn*/
{
if(arr_ptr[index]==Key)/*cn*/
{
return index ; /*c*/
}
}
printf("Not found\n");/*c*/
return 0 ; /*c*/
}
/*3cn+4cn
*O(n)=n */
uint_32 Binary_search(uint_32 *arr_ptr, uint_32 Key)
{
uint_32 start , end , middle; /*3c*/
Bubble_Sort(arr_ptr);/*t(n)=6cn^2+3nc+4c*/
start = 0 ; /*c*/
end = arr_size-1 ; /*c*/
middle = (start+end)/2 ; /*c*/
while(start<=end) /*cn*/
{
middle = (start+end)/2 ; /*cn*/
if(arr_ptr[middle]==Key)/*cn*/
{return middle ; }/*cn*/
else if (arr_ptr[middle]<Key)/*cn*/
{
start = middle+1 ; /*cn*/
}
else
{
end = middle-1;/*cn*/
}
}
printf("Not found\n");/*cn*/
return 0 ; /*c*/
}
/*8cn+6clog(n) +(6cn^2+3nc+4c)
O(n)=n^2*/
void Merge_Sort(uint_32 *arr_ptr)
{ uint_32 index ,mid; /*2c*/
mid = arr_size/2; /*c*/
uint_32 left[mid]; /*c*/
uint_32 right[arr_size-mid];/*c*/
for (index=0;index<mid-1;index++)/*cn*/
{
left[index]=arr_ptr[index];/*cn*/
}
for(index=mid; index<arr_size-1 ; index++)/*cn*/
{
right[index-mid]=arr_ptr[index];/*cn*/
}
/*4nc+5c*/
Merge_Sort(left); /*4nc+5c*/
Merge_Sort(right); /*4nc+5c*/
Merge(left,right,arr_ptr); /*log(n)*/
}
/*O(n)=nlog(n)*/
void Merge(uint_32 *L ,uint_32 *R,uint_32 *A)
{
uint_32 Left = arr_size/2 ; /*c*/
uint_32 Right = arr_size-Left ;/*c*/
uint_32 i = 0 ;/*c*/
uint_32 j=0 ;/*c*/
uint_32 k = 0 ; /*c*/
while (i<Left && j <Right)/*clog(n)*/
{
if(L[i]<=R[j]) /*clog(n)*/
{
A[k]=L[i];/*clog(n)*/
i++;/*clog(n)*/
}
else
{
A[k]=R[j];/*clog(n)*/
j++;/*clog(n)*/
}
k++;/*clog(n)*/
}
while (i<Left)
{
A[k]=L[i];
i++;
}
while (j<Right)
{
A[k]=R[j];
k++;
j++;
}
}
/*log(n)*/ | 43f1fcb783e05bd93db526354a2ed271e0ddd695 | [
"C"
] | 2 | C | ra9ab/Algorithm | 08656607a120f7680f30fbe1d91ed2a86b37be29 | ba15c57808195aa43ded9780966c27559bd35282 |
refs/heads/master | <file_sep>package com.techelevator.model;
public interface StreetDao {
//return the next 5 dates for street cleaning on specified street and side
//return a true boolean if street cleaning is the current date and time for a selected street and side
//search for street cleaning by neighborhood. Return the next street cleaning date for all known streets
//Display all data for a street, both sides, sign photo
}
<file_sep>package com.techelevator.model;
public class Submit {
private String email;
private boolean update;
}
<file_sep>User story
As a user
• I'd like to receive reminders
• when the streets I park on frequently have street sweeping
As a user
• I'd like to submit street sweeping data
• Submissions need a photo of the sign and information about where it is
As a user
• I'd like to lookup street sweeping data
For web lookup
• Look up a street by name and side to see if street sweeping is the current date and time
• look up the street sweeping schedule of a street (not side specific) and get the next 5 dates for both sides
• look up the street sweeping schedule by neighborhood, get all known streets with the next date of cleaning
• If cleaning is in the off season let the user know and tell them when it will resume and the date of the first cleaning
• submit info for a street not listed
Database
• streets
• odd or even side
• day of the month
• date cleaning starts
• date cleaning ends
• neighborhood
• Primary key Id corresponding to sign photo
Have an incoming database to put submissions into
MVC
Model
Street
• name
• side
• start date
• end date
• cleaning days
• neighborhood
Submission form
• same stuff as street
• user email address for contact info to let them know if their submission is approved or for further clarification
• wether or not submission is to inform of updating info <file_sep>BEGIN TRANSACTION;
DROP TABLE IF EXISTS street;
CREATE TABLE street (
id int GENERATED BY DEFAULT AS IDENTITY NOT NULL,
name VARCHAR(50) NOT NULL,
side VARCHAR(50) NOT NULL,
neighborhood VARCHAR(50) NOT NULL,
start_month int NOT NULL, --1=January, 2=February, ... 12=December
end_month int NOT NULL, --1=January, 2=February, ... 12=December
cleaning_day INT NOT NULL,--1=Monday, 2=Tuesday, 3=Wednesday, 4=Thursday, 5=Friday, 6=Saturday, 7=Sunday
cleaning_week INT NOT NULL,
start_time time NOT NULL,
end_time time NOT NULL,
constraint id primary key (id),
CONSTRAINT cleaning_day_check CHECK ((cleaning_day = 1) OR (cleaning_day = 2) OR (cleaning_day = 3) OR (cleaning_day = 4) OR (cleaning_day = 5) OR (cleaning_day = 6) OR (cleaning_day = 7)),
CONSTRAINT cleaning_week_check CHECK ((cleaning_week = 1) OR (cleaning_week = 2) OR (cleaning_week = 3) OR (cleaning_week = 4)),
CONSTRAINT start_month_check CHECK ((start_month = 1) OR (start_month = 2) OR (start_month = 3) OR (start_month = 4)OR (start_month = 5)OR (start_month = 6)OR (start_month = 7)OR (start_month = 8)OR (start_month = 9)OR (start_month = 10)OR (start_month = 11)OR (start_month = 12)),
CONSTRAINT end_month_check CHECK ((end_month = 1) OR (end_month = 2) OR (end_month = 3) OR (end_month = 4)OR (end_month = 5)OR (end_month = 6)OR (end_month = 7)OR (end_month = 8)OR (end_month = 9)OR (end_month = 10)OR (end_month = 11)OR (end_month = 12))
);
INSERT INTO street (name, side, neighborhood, start_month, end_month, cleaning_week, cleaning_day, start_time, end_time)
VALUES ('Pennsylvania Avenue', 'even', 'North Side', 4, 12, 1, 4, '08:30:00', '14:00:00');
INSERT INTO street (name, side, neighborhood, start_month, end_month, cleaning_week, cleaning_day, start_time, end_time)
VALUES ('Allegheny Avenue', 'odd', 'North Side', 4, 12, 2, 2, '08:30:00', '14:00:00');
INSERT INTO street (name, side, neighborhood, start_month, end_month, cleaning_week, cleaning_day, start_time, end_time)
VALUES ('Allegheny Avenue', 'even', 'North Side', 4, 12, 2, 1, '08:30:00', '14:00:00');
INSERT INTO street (name, side, neighborhood, start_month, end_month, cleaning_week, cleaning_day, start_time, end_time)
VALUES ('North Avenue', 'odd', 'North Side', 4, 12, 2, 1, '08:30:00', '14:00:00');
ROLLBACK; | 62e4564942f9ed4325d9208c9b13426a07ae82f8 | [
"Markdown",
"Java",
"SQL"
] | 4 | Java | NyarlaCat/StreetReapingDay | 2e75204c6dd8c228a3a2418b86c6a9a18a7a098e | 0b5609a203a17173665bea8cc6dd5ea6568d59a0 |
refs/heads/master | <repo_name>bryano1993/dev-social<file_sep>/config/keys_dev.js
module.exports = {
mongoURI:
"mongodb://bryano1993:<EMAIL>:11113/smartconnector",
secretOrKey: "secret"
};
//only for development
//don't want it to get pushed
<file_sep>/notes.txt
HotKeys
Select same word multiple times Ctrl + D
3 main resources
users
profiles
posts
will need separate files for these because it will get to messy
Using postman (http client) enables different types of http requests
- like post request to register route
- authorization headers
- fantastic piece of software to deal with back end api
json webtoken module
- creates token,
- passport will validate it and extract the user's information from it
email & password login
- localhost:5000/api/users/register
- validating email and password and make sure it exists
12-Creating The JWT
- create a token when `res.json({ msg: "Success" });`
13-Passport JWT Authentication Strategy
- login, will retrieve token for a user
- then we tried to access a protected route, we did it without a token and got unauthorized back
- then we added the correct token and now its responding with the user
- accepting tokens that will validate a user and access our protected routes
14- validation handlers 1
-validating login and user modules
15- validation handlers 2
-finishing up validation for register and login
17 - creating the profile module
-created the profile model and exported it .
18 - Current user profile route
- creating the profile route in routes/api/profile.js to handle back end
19 Create & Update Profile Routes
- created post route to actually create the profile
.profile.js
-get everything in request.body
- fill profile fields
- search for user by user.id
- if they have a profile they are going to update it
- if they dont. Check if there is no handle by that name
if there is it will send back an error
and if not it will go ahead and create the profile
20 Profile Field Validations
-
21 More profile api routes
- Getting user profile by handle
- Get user profile by id
- Get all profiles and serve them by json array
- can change to private if wanting to
- up to you on how long you want tokens to last
22 Add Experience & Education Routes
- to an array(profile) and delete them
23 Delete education and Experience
- allows a user to delete education and experience and user and profile
24 Creating the post model
- might want to add different models like video or images that will be possible to upload
- going to create post that will be able to get liked and commented on
26-get-and-delete-post-routes
- get and delete post routes
27-post-like-and-unlike-routes
- like and remove likes
28-add-and-remove-comment-routes
- create the ability to add and remove comments
30-implementing-react
- incorporating react
- npm i concurrently (enables us to incorporate the front end and the back end at the same time)
- allows us to run a script and have multiple commands
- "dev": "concurrently \"npm run server\" \"npm run client\""
server and client will be ran at the same time
- npm run dev
localhost:5000 is back end
localhost:3000 is front end
32-basic-layout
- created the basic layout which included the navbar, landing, and Footer.js files components and added it to app.js
33-react-router-setup
- we need a router because we are going to have multiple routes
- million ways to do a spcefic things. All about preference
- want to create routes with the components
- will create private routes in the future
35-creating-the-login-form-with-state
- implemented the creation of a login form with states
37-testing-registration-with-our-form
- using axios (frontend) to retrieve https requests
- easier (less code)
38-error-handling-and-display)
- will remove axios for backend communication.
-Will implement redux to take its place
- using to test api and set up error handling
- will implement form validation in bootstrap
- npm i classnames (if a certain something is true then add this class to it)
- implementing backend error handling to our front end
What is redux?
- we create components which can be ui through react
- Redux comes in when you need application level state
- you need to share data between components
- ex) to do list
have 1 component that lists all the to dos
have a form componenet that accesses the to dos
- want to access to do state from your list component
- instead of passing things from componenet to compoennt you get a single source of truth
that you can dispatch to all your compoentnts
- things you want to share between compoenents
- going to have an auth state
- profile state that will hold all profile things
-stuff we want to share
40-redux-store-chrome-extension-setup
npm i redux react-redux redux-thunk
- redux and store intialized
- create auth
- we want to register a user through an action
41-redux-action-and-reducer-workflow
-everything we do are going to be redux actions
42-registration-and-the-error-reducer
- went throug work flow
- commit and action register user
in action file create a type aof test dispatch-
we then manipulate state
then wwe mapped states to props and set auth to prop in our componenet
- if user exists out oput form if it doesnt dont output form
43-redux-login-action-and-set-current-user
-try to remember the work flow from back end
-we need to rewquest token then we need token to access any protected routes
- take token once logged in and store it in local storage
- if we are logged in. If the token is in local storage and validated we are going to send that with everythin grequest we make
-if we are logged in we can access any protected routes
-set up logg out that it will destroy it in local storage so that we cant make requests
- complex
npm i jwt-decode
- token = bearer . This will extract user from that
- implemented login through redux but didn't implement it through the component form yet
44-login-form-functionality
- implement it through the component form yet
45-logout-and-conditional-navbar-links
- last action it auth actions
- login, logout is complete, registration is complete
- will start to work with profile state
46-text-field-group-input-component
- start working with profile state
- create a profile reducer
- create dashboard
47-profile-reducer-and-get-current-profile)
- work on profile state
- create dashboard that will feth login user's profile
48-spinner-component-and-dashboard-start
-created the dashboard component to its fullest
49-private-route-setup
- create protected routes
- have routes only users can access
- switch allows us to redirect when we log out
50-createProfile-component-route
- create ability to have a profile
- create component and route for it
51-form-field-components
- work on form input componenets
- textarefield, input, selectlistgroup
52-create-profile-form
- bring in form field groups and start implementing forms
- create all fields in create profile component
53-create-profile-functionality
- finished created profile fields and implementing it into web app
54-profile-actions-component-and-delete-account
- can now delete an account
55-edit-profile-component
- work on edit profile components
56-add-experience-form
- add feature to add experiences and education from dashboard
- creating lots of components
- programming is repetitive
57-add-experience-functionality
- will be ale to implmenet add exprience functionality
58-add-education-form-and-functionality
- adding education which will be similar to experience functionality
59-dashboard-experience-display-and-delete
- added feature to display expererience in profile
- npm i react-moment
- npm i moment
60-dashboard-education-display-and-delete
- add a feature to display education credientials and then have the ability to delete them
61-profiles-component-and-getProfiles-action
-created features to view the profile component/page
62-profile-items
-created a feature to view all profiles
63-profile-by-handle-and-sub-components
- each profile will go to their respected handle
64-profile-header
- display subcompoents of profile
- want to display header of a profile
65-profile-about-and-credentials
- incorporate about and credentials profile features
66-profile-github-and-touch-ups)
- if user puts in github user name, it will show their repos
67-postState-and-addPost-action
- want users to post and comment
- immplemented an the add post action functionality which will hit end point to get the post
- set up post state with reducer
68-post-and-form-component
- implement component (main post component)(post form component to add a post)
69-getPosts-action-and-postFeed-component
- can add post through application now
- implement getPosts actions
70-post-item-component
- can display posts now
71-delete-like-and-unlike-post
-implement delete, like, and unlike posts feature
72-single-post-display
-implement feature for a single post component and a route for it.
73-comment-form-component-and-action
- want to be able to add a comment through the application
74-comment-display-and-delete
- implement feature to display comments
heroku login
heroku create
git push heroku master
heroku open
| 9533aeddb9076363645e0029995646c6fdaf8ff0 | [
"JavaScript",
"Text"
] | 2 | JavaScript | bryano1993/dev-social | 403e3220a1d9e0a6fca20cb6f6b021206389642c | a5acd0841043abc2eb41d089bb7bdc8ee4ae9fae |
refs/heads/main | <repo_name>MehrdadNajafi/Assignment-7<file_sep>/2- Convert mp4 to mp3.py
from moviepy import editor
video = editor.VideoFileClip('Eminem - Venom.mp4')
video.audio.write_audiofile('Eminem - Venom.mp3')<file_sep>/1- Translator.py
import sys
import colorama
from pyfiglet import Figlet
from colorama import Fore
colorama.init(autoreset=True)
translate_list = []
def load():
try:
print('Loading...')
f = open('translate.txt','r')
data = f.read()
data_list = data.split('\n')
for i in range(1,len(data_list),2):
my_dict = { 'english' : data_list[i-1] ,
'persian' : data_list[i] }
translate_list.append(my_dict)
print('Welcome')
except:
print(Fore.RED + 'Something went wrong. Can\'t load the file !!!')
sys.exit()
def show_menu():
print(Fore.BLUE + "<'''Menu'''>")
print('1- Add new word')
print('2- Translation english to persian')
print('3- Translation persian to english')
print('4- Save and Exit')
def add_word():
print('1- Persian \n2- English ')
user_choice = int(input())
if user_choice == 1:
new_persian_word = input('Please enter new world: ')
c = 0
for i in range(len(translate_list)):
if new_persian_word.lower() == translate_list[i]['persian']:
print('This word already added !!!')
c += 1
break
if c == 0:
new_english_word = input('Enter the English equivalent of the word: ')
my_dict = { 'english' : new_english_word,
'persian' : new_persian_word}
translate_list.append(my_dict)
print('Word added !!!')
elif user_choice == 2:
new_english_word = input('Please enter new world: ')
c = 0
for i in range(len(translate_list)):
if new_english_word.lower() == translate_list[i]['english']:
print('This word already added !!!')
c += 1
break
if c == 0:
new_persian_word = input('Enter the Persian equivalent of the word: ')
my_dict = { 'english' : new_english_word,
'persian' : new_persian_word}
translate_list.append(my_dict)
print('Word added !!!')
def english_to_persian():
english_word = input('Please enter the text: ').lower()
c = 0
for i in english_word:
if i != '.' :
english_word_list = english_word.split(' ')
c += 1
if c == len(english_word):
final_translate = ''
for item in english_word_list:
for i in range(len(translate_list)):
if item == translate_list[i]['english']:
translate = translate_list[i]['persian'] + ' '
final_translate += translate
break
print(final_translate)
c_2 = 0
for i in english_word:
if i == '.':
english_words_list = english_word.split('.')
c_2 += 1
break
if c_2 == 1:
final_en_list = []
for i in range(len(english_words_list)):
en_word_list = english_words_list[i].split(' ')
final_en_list.append(en_word_list)
final_translate = ''
for i in range(len(final_en_list)):
for j in range(len(final_en_list[i])):
for k in range(len(translate_list)):
if final_en_list[i][j] == translate_list[k]['english']:
translate = translate_list[k]['persian'] + ' '
final_translate += translate
break
print(final_translate)
def persian_to_english():
persian_word = input('Please enter your text: ')
c = 0
for i in persian_word:
if i != '.':
c += 1
if c == len(persian_word):
persian_word_list = persian_word.split(' ')
final_translate = ''
for i in range(len(persian_word_list)):
for j in range(len(translate_list)):
if persian_word_list[i] == translate_list[j]['persian']:
translate = translate_list[j]['english'] + ' '
final_translate += translate
break
print(final_translate)
c_2 = 0
for i in persian_word:
if i == '.':
c_2 += 1
break
if c_2 == 1 :
persian_word_list = persian_word.split('.')
final_persian_list = []
for i in range(len(persian_word_list)):
final_persian_list.append(persian_word_list[i].split(' '))
final_translate = ''
for i in range(len(final_persian_list)):
for item in final_persian_list[i]:
for j in range(len(translate_list)):
if item == translate_list[j]['persian']:
translate = translate_list[j]['english'] + ' '
final_translate += translate
break
print(final_translate)
def exit_program():
f = open('translate.txt', 'w')
final_str = ''
for i in range(len(translate_list)):
if i == (len(translate_list)-1):
str = translate_list[i]['english'] + '\n'
final_str += str
str = translate_list[i]['persian']
final_str += str
else:
str = translate_list[i]['english'] + '\n'
final_str += str
str = translate_list[i]['persian'] + '\n'
final_str += str
f.write(final_str)
f.close()
exit()
load()
f = Figlet(font='standard')
print(f.renderText('Translator'))
while True:
show_menu()
user_choice = int(input('Please choose a option: '))
if user_choice == 1:
add_word()
elif user_choice == 2:
english_to_persian()
elif user_choice == 3:
persian_to_english()
elif user_choice == 4:
exit_program() | 6d7d8e0480284c30d193560d628628473959d3f1 | [
"Python"
] | 2 | Python | MehrdadNajafi/Assignment-7 | f02e1e47dfc7fac597b944b3fe9436e1e747273f | 01eb730671f89f1a728d7e7179c2e2535045ac39 |
refs/heads/master | <repo_name>imxx/questions-react<file_sep>/src/reducers/reducer_questions.js
import _ from "lodash";
import { FETCH_QUESTIONS,
FETCH_QUESTION,
CREATE_QUESTION,
ANSWER_QUESTION,
UPDATE_ANSWERS,
REMOVE_QUESTION,
UPDATE_QUESTIONS } from "../actions/index";
const INITIAL_STATE = { questions: [], count: 0 };
export default function(state = INITIAL_STATE, action){
switch(action.type){
case FETCH_QUESTIONS:
return {
questions: action.payload.data.questions,
count: action.payload.data.count
};
case CREATE_QUESTION:
case FETCH_QUESTION:
return {
questions: [action.payload.data.question, ...state.questions],
count: action.payload.data.count
};
case ANSWER_QUESTION:
case UPDATE_ANSWERS:
let answData = (action.payload.data) ? action.payload.data.question : action.payload.question;
let answQuestions = state.questions;
let question = _.find(state.questions, (el) => {
return el._id == answData._id
});
if(question){
question.answers = answData.answers;
return { ...state, questions: [ ...answQuestions ] };
}else{
return state;
}
/*
case REMOVE_QUESTION:
let questions = state.questions;
let questionIndex = _.findIndex(state.questions, (el) => {
return el._id == action.payload.data.question._id
});
questions.splice(questionIndex,1);
if(action.payload.data.new_question) questions.push(action.payload.data.new_question);
return {
questions: [ ...questions ],
count: action.payload.data.count
};
case UPDATE_QUESTIONS:
let data = action.payload;
let questions = state.questions;
let questionIndex = _.findIndex(state.questions, (el) => {
return el._id == data.question._id
});
if(questionIndex >= 0){
questions.splice(questionIndex,1);
if(data.new_question) questions.push(data.new_question);
return {
questions: [ ...questions ],
count: data.count
};
}else{
return { ...state, count: data.count };
}
*/
case REMOVE_QUESTION:
case UPDATE_QUESTIONS:
console.log("Action: " , action);
let data = (action.payload.data) ? action.payload.data : action.payload;
let questions = state.questions;
let questionIndex = _.findIndex(state.questions, (el) => {
return el._id == data.question._id
});
console.log("State questions: ", state.questions);
console.log("QuestionIndex: ", questionIndex);
if(questionIndex >= 0){
questions.splice(questionIndex,1);
if(data.new_question) questions.push(data.new_question);
return {
questions: [ ...questions ],
count: data.count
};
}else{
return { ...state, count: data.count };
}
default:
return state;
}
}<file_sep>/src/components/AnswerButtonsPanel.js
import React, { Component } from "react";
import { answerQuestion } from "../actions/index";
import { connect } from 'react-redux';
export const englishAlphabet = ["a", "b", "c", "d", "e", "f", "g", "h", "i", "j"];
export const colors = ["#ADD8E6","#F08080","#E0FFFF" , "#FAFAD2", "#90EE90", "#FFB6C1","#FFA07A", "#20B2AA", "#87CEFA"];
class AnswerButtonsPanel extends Component {
answerQuestion(answer){
this.props.answerQuestion(this.props.question, answer);
}
addButton(answerKey, answer, placeInArray){
const colors = [ "primary", "success", "info", "warning", "danger" ];
return (<button key={placeInArray} onClick={this.answerQuestion.bind(this, answerKey)}
className={`btn btn-${colors[placeInArray]} col-xs-12 answer-button`}
style={{ backgroundColor: colors[placeInArray] }}>
{englishAlphabet[placeInArray]}: {answer.answer}
</button>);
}
render(){
const answers = this.props.question.answers;
return (<div className="row">
{Object.keys(answers).map((key, i) => {
return this.addButton(key, answers[key], i);
})}
</div>);
}
}
export default AnswerButtonsPanel;
export default connect(null, {answerQuestion})(AnswerButtonsPanel);<file_sep>/src/components/Question.js
import React, { Component } from "react";
import { connect } from "react-redux";
import { bindActionCreators } from "redux";
import { removeOldAndReturnNew } from "../actions/index";
import Details from "./Details";
class Question extends Component {
state = {
detailsShown: false
}
toggleDetails(){
const shown = (this.state.detailsShown) ? false : true;
this.setState({ detailsShown: shown });
}
removeQuestion(){
const newQuestionPositionOffset = this.props.activePage * 10 - 1;
this.props.removeOldAndReturnNew(this.props.question._id, newQuestionPositionOffset);
}
render(){
return (<tr className="question-row">
<td>
<div>
<h4 className="question-title"
onClick={this.toggleDetails.bind(this)}>
{this.props.question.question}
</h4>
<i className="x-icon" title="Remove"
onClick={this.removeQuestion.bind(this)}></i>
</div>
<Details question={this.props.question} shown={this.state.detailsShown} />
</td>
</tr>);
}
}
function mapDispatchToProps(dispatch){
return { ...bindActionCreators({ removeOldAndReturnNew } , dispatch), dispatch };
}
//export default connect(null, mapDispatchToProps)(PostsIndex);
export default connect(null, mapDispatchToProps)(Question);<file_sep>/src/components/QuestionsList.js
import React, { Component } from "react";
import { connect } from "react-redux";
import { bindActionCreators } from "redux";
import { fetchQuestions } from "../actions/index";
import jshashes from "jshashes";
import ReactPaginate from "react-paginate";
import { Pagination } from "react-bootstrap";
import { browserHistory } from 'react-router'
import Question from "./Question";
class QuestionsList extends Component {
state = {
pageNum: 100,
activePage: parseInt(this.props.params.page) || 1
}
componentDidMount(){
if(this.props.params.page)
this.getQuestions({page: this.props.params.page});
else
this.getQuestions({page: 1});
}
handlePageClick(e, selectedEvent){
this.setState({ activePage: selectedEvent.eventKey }, () => {
browserHistory.push(`/page/${selectedEvent.eventKey}`);
this.getQuestions({page: selectedEvent.eventKey})
});
}
getQuestions(argObj){
let page = parseInt(argObj.page);
if(page <= 0) page = 1;
const offset = ( page - 1 ) * 10;
this.props.fetchQuestions(offset, 10);
}
render(){
if(!this.props.questions)
return <h1>Loading...</h1>;
return (
<div>
<table className="table table-hover questions-table">
<tbody>
{this.props.questions.map((q, i) => {
return <Question key={q._id} question={q} activePage={this.state.activePage} />;
})}
</tbody>
</table>
<div className={`questions-pagination`}>
<Pagination
prev
next
first
last
ellipsis
boundaryLinks
items={ Math.ceil(this.props.count / 10) }
maxButtons={5}
activePage={this.state.activePage}
onSelect={this.handlePageClick.bind(this)} />
</div>
</div>
);
}
}
function mapDispatchToProps(dispatch) {
//return { ...bindActionCreators({ fetchQuestions } , dispatch), dispatch };
return bindActionCreators({fetchQuestions}, dispatch);
}
function mapStateToProps(state) {
return {
questions: state.questions.questions,
count: state.questions.count
};
}
export default connect(mapStateToProps, mapDispatchToProps)(QuestionsList);<file_sep>/src/components/BarGraph.js
import React, { Component } from "react";
import ReactDOM from "react-dom";
import { BarChart } from "react-d3";
import { connect } from "react-redux";
import { englishAlphabet } from "./AnswerButtonsPanel";
class BarGraph extends Component {
barGraphData(answers) {
return Object.keys(answers).map(function(answer, i){
return {
label: englishAlphabet[i],
value: answers[answer].count
};
});
}
render(){
if(!this.props.shown || !this.props.width) return <div></div>;
const question = this.props.questions.filter((q) => q._id == this.props.question._id)[0];
return (
<div className={`question-graph`}>
<BarChart data={this.barGraphData(question.answers)}
height={this.props.width * .6 }
width={ this.props.width } />
</div>
);
}
}
function mapStateToProps(state) {
return { questions: state.questions.questions };
}
export default connect(mapStateToProps)(BarGraph);<file_sep>/src/components/PieGraph.js
import React, { Component } from "react";
import { PieChart } from "react-d3";
import { connect } from "react-redux";
import { englishAlphabet } from "./AnswerButtonsPanel";
class PieGraph extends Component {
pieGraphData(answers, totalSum) {
return Object.keys(answers).map(function(answer, i){
const value = (100 / totalSum * answers[answer].count).toFixed(2);
return {
label: englishAlphabet[i],
value: value
};
});
}
getTotalSum(answers){
let sum = 0;
Object.keys(answers).forEach(function(answer, i){
sum += answers[answer].count;
});
return sum;
}
render(){
if(!this.props.shown) return <div></div>;
const question = this.props.questions.filter((q) => q._id == this.props.question._id)[0];
return (
<div className={`question-graph`}>
<PieChart data={this.pieGraphData(question.answers, this.getTotalSum(question.answers))}
height={this.props.width * .6 }
width={ this.props.width }
radius={90}
innerRadius={25}
sectorBorderColor="white" />
</div>
);
}
}
function mapStateToProps(state) {
return { questions: state.questions.questions };
}
export default connect(mapStateToProps)(PieGraph);<file_sep>/src/index.js
import React from "react";
import ReactDOM from "react-dom";
import { Router, browserHistory } from "react-router";
import { Provider } from 'react-redux';
import { createStore, applyMiddleware } from 'redux';
import promise from "redux-promise";
import reducers from './reducers';
import routes from "./routes";
import { fetchState, handleServerSocketMessage } from "./actions/index";
import socketMiddleware from "./middlewares/socketMiddleware";
import io from "socket.io-client";
const socket = io("/");
import toastr from "toastr";
import toastrMiddleware from "./middlewares/toastrMiddleware";
const createStoreWithMiddleware = applyMiddleware(
promise,
socketMiddleware(socket),
toastrMiddleware(toastr)
)(createStore);
const store = createStoreWithMiddleware(reducers);
socket.on("server", data => {
console.log("On server, data: ", data);
store.dispatch(handleServerSocketMessage(data));
});
ReactDOM.render(
<Provider store={store}>
<Router history={browserHistory} routes={routes} />
</Provider>
, document.getElementById("react_app"));
<file_sep>/src/middlewares/toastrMiddleware.js
import { FETCH_QUESTION,
CREATE_QUESTION,
ANSWER_QUESTION,
UPDATE_ANSWERS } from "./../actions/index";
export default toastr => store => next => action => {
switch(action.type){
case FETCH_QUESTION:
case CREATE_QUESTION:
toastr.success(action.payload.data.question.question, 'New question was created.');
break;
case UPDATE_ANSWERS:
case ANSWER_QUESTION:
const payload = (action.payload.data) ? action.payload.data : action.payload;
toastr.success(`${payload.question.question} (${payload.answer})`, 'Question has been answered.');
}
return next(action);
}<file_sep>/src/components/Details.js
import React, { Component } from "react";
import ReactDOM from "react-dom";
import AnswerButtonsPanel from "./AnswerButtonsPanel";
import BarGraph from "./BarGraph";
import PieGraph from "./PieGraph";
class Details extends Component {
state = {
chart: "barchart",
widthOfChart: 0
}
selectChart(event){
this.setState({ chart: event.target.value });
}
componentDidUpdate(){
const el = ReactDOM.findDOMNode(this);
if(el.children.length && this.state.widthOfChart == 0){
const widthOfChart = el.lastChild.offsetWidth;
this.setState({widthOfChart});
}
}
render(){
if(!this.props.shown) return <div></div>;
return (
<div className={`question-details col-xs-12`}>
<div className="answers col-xs-5">
<AnswerButtonsPanel question={this.props.question} />
</div>
<div className="graph col-xs-7 graphs">
<select onChange={this.selectChart.bind(this)} value={this.state.chart} >
<option value="barchart">Barchart</option>
<option value="piechart">Piechart</option>
</select>
<BarGraph width={this.state.widthOfChart} question={this.props.question} shown={(this.state.chart == "barchart") ? true:false} />
<PieGraph width={this.state.widthOfChart} question={this.props.question} shown={(this.state.chart == "piechart") ? true:false} />
</div>
</div>
)
}
}
export default Details;<file_sep>/src/components/CreateQuestionModal.js
import React, {Component} from 'react';
import _ from "lodash";
import { Modal, Button } from "react-bootstrap";
import CreateQuestionForm from "./CreateQuestionForm";
class CreateQuestionModal extends Component {
state = {
answers: 2
}
addAnswerInput(){
this.setState({ answers: this.state.answers + 1 })
}
removeAnswerInput(){
if(this.state.answers < 3) return;
this.setState({ answers: this.state.answers - 1 })
}
render() {
return (
<Modal show={this.props.open} onHide={this.props.toggleModal}>
<Modal.Header closeButton>
<Modal.Title><h3>Add New Question</h3></Modal.Title>
</Modal.Header>
<Modal.Body>
<div className="form-configuration-panel">
<button onClick={this.addAnswerInput.bind(this)} className="btn btn-info">
<i className="fa fa-plus"></i> Add New Answer
</button>
<button onClick={this.removeAnswerInput.bind(this)} className={`btn btn-warning ${(this.state.answers < 3) ? 'hidden' : ''}`} >
<i className="fa fa-minus"></i> Remove Answer
</button>
</div>
<CreateQuestionForm toggleModal={this.props.toggleModal}
fields={_.times(this.state.answers, (n) => "answer_" + (n + 1) ).concat(["question"])} />
</Modal.Body>
<Modal.Footer>
<Button onClick={this.props.toggleModal}>Close</Button>
</Modal.Footer>
</Modal>
);
}
}
export default CreateQuestionModal;<file_sep>/src/components/App.js
import React, { Component } from "react";
import Header from "./Header";
import Footer from "./Footer";
import Details from "./Details";
export default class extends Component {
render(){
return (<div>
<Header />
<div className="row">
{React.cloneElement(this.props.children, { setQuestion: this.setQuestion } )}
</div>
<Footer />
</div>);
}
} | 1c36849e0726d9479a3be02d908413b3e54c76f2 | [
"JavaScript"
] | 11 | JavaScript | imxx/questions-react | 5d053f1b04897028b5ede5f69e82cc64d0b2501f | 9885a79a4967a166307c36af1150955f981a1227 |
refs/heads/main | <file_sep># HEAD
Source code for TKDE 2021 paper ["**Heterogeneous Information Network Embedding with Adversarial Disentangler**"](https://ieeexplore.ieee.org/document/9483653)
## Environment Settings
* python == 3.7.11
* torch == 1.8.0
## Parameter Settings
Please refer to the **yaml file** of the corresponding dataset.
- model
- vae: module architecture and training settings (e.g., learning rate) of the meta-path disentangler
- D_mp: module architecture and training settings (e.g., learning rate) of the meta-path discriminator
- D: module architecture and training settings (e.g., learning rate) of the semantic discriminator
- trainer
- lambda (loss weight settings)
- reconstruct: loss weight for reconstructing input node embedding
- kl: loss weight for Kullback-Leibler Divergence in the meta-path disentangler
- adv_mp_clf: loss weight for adversarial classification of the meta-path discriminator
- gp: loss weight for grad penalty
- d_adv: loss weight for the real/fake classifier
- d_clf: loss weight for the semantic classifier
## Files in the folder
~~~~
HEAD/
├── code/
│ ├── train_ACM.py: training the HEAD model on ACM
│ ├── train_Aminer.py: training the HEAD model on Aminer
│ ├── train_DBLP.py: training the HEAD model on DBLP
│ ├── train_Yelp.py: training the HEAD model on Yelp
│ ├── config/
│ │ ├── ACM.yaml: parameter settings for ACM
│ │ ├── Aminer.yaml: parameter settings for Aminer
│ │ ├── DBLP.yaml: parameter settings for DBLP
│ │ └── Yelp.yaml: parameter settings for Yelp
│ ├── evaluate/
│ │ ├── ACM_evaluate.py
│ │ ├── Aminer_evaluate.py
│ │ ├── DBLP_evaluate.py
│ │ └── Yelp_evaluate.py
│ ├── src/
│ │ ├── bi_model.py: implementation of two meta-paths
│ │ ├── tri_model.py: implementation of three meta-paths
│ │ ├── data.py
│ │ └── tri_model.py
├── datasets/
└── README.md
~~~~
## Basic Usage
~~~
python train_DBLP.py ./config/DBLP.yaml
~~~
## Hyper-parameter Tuning
The architectures of three main modules make a great difference. Besides, there are three key hyper-parameters: *lr*, *kl* and *gp*.
# Reference
```
@article{wang2021heterogeneous,
title={Heterogeneous Information Network Embedding with Adversarial Disentangler},
author={<NAME> and <NAME> and <NAME> <NAME> <NAME>},
journal={IEEE Transactions on Knowledge and Data Engineering},
year={2021},
publisher={IEEE}
}
```
<file_sep>import pickle
def LoadDataset(name, root, mp = None):
assert mp != None
data_root = root + name + '/' + mp +'_pre_train_embedding.p'
with open(data_root, 'rb') as data:
return pickle.load(data)
<file_sep>from torch.backends import cudnn
import sys
import yaml
import os
import shutil
from tensorboardX import SummaryWriter
from torch.utils.data import TensorDataset, DataLoader
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
from torch.autograd import Variable
import warnings
from src.data import LoadDataset
from src.tri_model import LoadNN, LoadVAE
from src.util import calc_grad_penalty, vae_loss
from evaluate.Yelp_evaluate import Yelp_evaluation
# Ingore Warnings
warnings.filterwarnings('ignore')
# Experimental Setting
cudnn.benchmark = True
config_path = sys.argv[1]
config = yaml.load(open(config_path, 'r'))
exp_name = config['exp_setting']['exp_name']
input_dim = config['exp_setting']['input_dim']
n_nodes = config['exp_setting']['n_nodes']
trainer = config['trainer']
if trainer['save_checkpoint']:
model_path = config['exp_setting']['checkpoint_dir'] + exp_name + '/'
if not os.path.exists(model_path):
os.makedirs(model_path)
model_path = model_path + '{}'
if trainer['save_log']:
if os.path.exists(config['exp_setting']['log_dir'] + exp_name):
shutil.rmtree(config['exp_setting']['log_dir'] + exp_name)
writer = SummaryWriter(config['exp_setting']['log_dir'] + exp_name)
# Fix Seed
np.random.seed(config['exp_setting']['seed'])
torch.manual_seed(config['exp_setting']['seed'])
# Load Dataset
mp_1 = config['exp_setting']['mp_1']
mp_2 = config['exp_setting']['mp_2']
mp_3 = config['exp_setting']['mp_3']
data_root = config['exp_setting']['data_root']
batch_size = trainer['batch_size']
mp_1_data = LoadDataset('Yelp', data_root, mp = mp_1)
mp_2_data = LoadDataset('Yelp', data_root, mp = mp_2)
mp_3_data = LoadDataset('Yelp', data_root, mp = mp_3)
dataset = TensorDataset(torch.LongTensor(list(range(n_nodes))), torch.FloatTensor(mp_1_data), torch.FloatTensor(mp_2_data), torch.FloatTensor(mp_3_data))
data_loader = DataLoader(dataset, batch_size = batch_size, shuffle = True)
# Load Model
embed_dim = config['model']['vae']['encoder'][-1][1]
vae_lr = config['model']['vae']['lr']
vae_betas = tuple(config['model']['vae']['betas'])
d_lr = config['model']['D']['lr']
d_betas = tuple(config['model']['D']['betas'])
dmp_lr = config['model']['D_mp']['lr']
dmp_betas = tuple(config['model']['D_mp']['betas'])
vae = LoadVAE(config['model']['vae'], n_nodes, input_dim, embed_dim)
d = LoadNN(config['model']['D'], input_dim)
dmp = LoadNN(config['model']['D_mp'], embed_dim)
reconstruct_loss = nn.MSELoss()
clf_loss = nn.BCEWithLogitsLoss()
# Use CUDA
vae = vae.cuda()
d = d.cuda()
dmp = dmp.cuda()
reconstruct_loss = reconstruct_loss.cuda()
clf_loss = clf_loss.cuda()
# Optimizer
opt_vae = optim.Adam(list(vae.parameters()), lr = vae_lr, betas = vae_betas)
opt_d = optim.Adam(list(d.parameters()), lr = d_lr, betas = d_betas)
opt_dmp = optim.Adam(list(dmp.parameters()), lr = dmp_lr, betas = dmp_betas)
# Training
vae.train()
d.train()
dmp.train()
# Loss Weight Setting
loss_lambda = {}
for k in trainer['lambda'].keys():
init = trainer['lambda'][k]['init']
final = trainer['lambda'][k]['final']
step = trainer['lambda'][k]['step']
loss_lambda[k] = {}
loss_lambda[k]['cur'] = init
loss_lambda[k]['inc'] = (final - init) / step
loss_lambda[k]['final'] = final
# Training
global_step = 0
epoch = 0
cat_best_NMI = 0
cat_best_ARI = 0
cat_best_micro = 0
cat_best_macro = 0
while global_step < trainer['total_step']:
for batch_idx, mp_1_fea, mp_2_fea, mp_3_fea in data_loader:
input_fea = torch.cat([mp_1_fea.type(torch.FloatTensor),
mp_2_fea.type(torch.FloatTensor),
mp_3_fea.type(torch.FloatTensor)], dim = 0)
input_fea = Variable(input_fea.cuda(), requires_grad = False)
batch_idx = batch_idx.cuda()
length = batch_idx.size()[0]
# Meta-path Code Setting
mp_code = np.concatenate([np.repeat(np.array([[*[1], *[0], *[0]]]), length, axis = 0),
np.repeat(np.array([[*[0], *[1], *[0]]]), length, axis = 0),
np.repeat(np.array([[*[0], *[0], *[1]]]), length, axis = 0),], axis = 0)
mp_code = torch.FloatTensor(mp_code)
# Fake Code Setting
# Forword Translation Code: A -> B -> C -> A
forward_code = np.concatenate([np.repeat(np.array([[*[0], *[1], *[0]]]), length, axis = 0),
np.repeat(np.array([[*[0], *[0], *[1]]]), length, axis = 0),
np.repeat(np.array([[*[1], *[0], *[0]]]), length, axis = 0),], axis = 0)
forward_code = torch.FloatTensor(forward_code)
# Backword Translation Code: C -> B -> A -> C
backward_code = np.concatenate([np.repeat(np.array([[*[0], *[0], *[1]]]), length, axis = 0),
np.repeat(np.array([[*[1], *[0], *[0]]]), length, axis = 0),
np.repeat(np.array([[*[0], *[1], *[0]]]), length, axis = 0),], axis = 0)
backward_code = torch.FloatTensor(backward_code)
code = Variable(mp_code.cuda(), requires_grad = False)
invert_code = 1 - code
if global_step % 2 == 0:
trans_code = Variable(torch.FloatTensor(forward_code).cuda(), requires_grad = False)
else:
trans_code = Variable(torch.FloatTensor(backward_code).cuda(), requires_grad = False)
# Train Meta-path Discriminator
opt_dmp.zero_grad()
embedding = vae(input_fea, return_enc = True).detach()
code_pred = dmp(embedding)
dmp_loss = clf_loss(code_pred, code)
dmp_loss.backward()
opt_dmp.step()
# Training Semantic Discriminator
opt_d.zero_grad()
real_pred, real_code_pred = d(input_fea)
if global_step % 2 == 0:
fake_fea = vae(input_fea, batch_idx, insert_type = 'forward')[0].detach()
else:
fake_fea = vae(input_fea, batch_idx, insert_type = 'backward')[0].detach()
fake_pred = d(fake_fea)[0]
real_pred = real_pred.mean()
fake_pred = fake_pred.mean()
gp = loss_lambda['gp']['cur'] * calc_grad_penalty(d, input_fea.data, fake_fea.data)
real_code_pred_loss = clf_loss(real_code_pred, code)
d_loss = real_code_pred_loss + fake_pred - real_pred + gp
d_loss.backward()
opt_d.step()
# Train VAE
opt_vae.zero_grad()
# Reconstruction Phase
reconstruct_batch, mu, logvar = vae(input_fea, batch_idx, insert_type = 'truth')
mse, kl = vae_loss(reconstruct_batch, input_fea, mu, logvar, reconstruct_loss)
reconstruct_batch_loss = loss_lambda['reconstruct']['cur'] * mse + loss_lambda['kl']['cur'] * kl
reconstruct_batch_loss.backward()
# Meta-path Discriminator Adversarial Phase
embedding = vae(input_fea, return_enc = True)
code_pred = dmp(embedding)
adv_code_pred_loss = clf_loss(code_pred, invert_code)
adv_mp_clf_loss = loss_lambda['adv_mp_clf']['cur'] * adv_code_pred_loss
adv_mp_clf_loss.backward()
# Semantic Discriminator Adversarial Phase
embedding = vae(input_fea, return_enc = True).detach()
if global_step % 2 == 0:
fake_fea = vae.decode(embedding, batch_idx, insert_type = 'forward')
else:
fake_fea = vae.decode(embedding, batch_idx, insert_type = 'backward')
adv_d_loss, d_code_pred = d(fake_fea)
adv_d_loss = adv_d_loss.mean()
d_clf_loss = clf_loss(d_code_pred, trans_code)
d_loss = - loss_lambda['d_adv']['cur'] * adv_d_loss + loss_lambda['d_clf']['cur'] * d_clf_loss
d_loss.backward()
opt_vae.step()
# End of Step
print('Step ', global_step, end = '\r', flush = True)
global_step += 1
# Records
if trainer['save_log'] and (global_step % trainer['verbose_step'] == 0):
writer.add_scalar('MSE', mse.item(), global_step)
writer.add_scalar('KL', kl.item(), global_step)
writer.add_scalar('Gradient Penalty', gp.item(), global_step)
writer.add_scalars('Real_Fake Discriminator', {'Real':real_pred.item(),
'Fake':fake_pred.item()}, global_step)
writer.add_scalars('Meta-path Discriminator', {'Real':real_code_pred_loss.item(),
'Fake':d_clf_loss.item()}, global_step)
writer.add_scalars('Adversarial Meta-path Discriminator', {'Classifier':dmp_loss.item(),
'Adversarial Classifier':adv_mp_clf_loss.item()}, global_step)
# Update Lambda
for k in loss_lambda.keys():
if not loss_lambda[k]['cur'] > loss_lambda[k]['final']:
loss_lambda[k]['cur'] += loss_lambda[k]['inc']
# Test
epoch += 1
vae.eval()
mp1_inv_embedding_matrix = torch.empty(n_nodes, embed_dim).cuda()
mp2_inv_embedding_matrix = torch.empty(n_nodes, embed_dim).cuda()
mp3_inv_embedding_matrix = torch.empty(n_nodes, embed_dim).cuda()
mp1_spc_embedding_matrix, mp2_spc_embedding_matrix, mp3_spc_embedding_matrix = vae(return_mp_embedding = True)
for batch_idx, mp_1_fea, mp_2_fea, mp_3_fea in data_loader:
mp_1_fea = Variable(mp_1_fea.type(torch.FloatTensor).cuda(), requires_grad = False)
mp_2_fea = Variable(mp_2_fea.type(torch.FloatTensor).cuda(), requires_grad = False)
mp_3_fea = Variable(mp_3_fea.type(torch.FloatTensor).cuda(), requires_grad = False)
mp1_inv_embedding_matrix[batch_idx] = vae(mp_1_fea, return_enc = True)
mp2_inv_embedding_matrix[batch_idx] = vae(mp_2_fea, return_enc = True)
mp3_inv_embedding_matrix[batch_idx] = vae(mp_3_fea, return_enc = True)
cat_embedding_matrix = torch.cat((mp1_inv_embedding_matrix, mp2_inv_embedding_matrix, mp3_inv_embedding_matrix), dim = 1).cuda()
evaluation = Yelp_evaluation()
# Cluster
# print('>>*************** Cluster ***************<<')
# cat_NMI, cat_ARI = evaluation.evaluate_cluster(cat_embedding_matrix)
# print('<Epoch %d> CAT NMI = %.4f, ARI = %.4f' % (epoch, cat_NMI, cat_ARI))
# if cat_NMI > cat_best_NMI:
# cat_best_NMI = cat_NMI
# cat_best_ARI = cat_ARI
# if trainer['save_best_only']:
# torch.save(cat_embedding_matrix, model_path + '.cat.cluster.embedding')
# # if trainer['save_log']:
# # writer.add_scalars('NMI', {'Invariant Embedding':cat_NMI,
# # 'MP1 Specific Embedding':mp1_spc_NMI,
# # 'MP2 Specific Embedding':mp2_spc_NMI,
# # 'MP3 Specific Embedding':mp3_spc_NMI}, epoch)
# Classification
print('>>*************** Classification ***************<<')
cat_micro, cat_macro = evaluation.evaluate_clf(cat_embedding_matrix)
print('<Epoch %d> CAT Micro-F1 = %.4f, Macro-F1 = %.4f' % (epoch, cat_micro, cat_macro))
if cat_micro > cat_best_micro:
cat_best_micro = cat_micro
cat_best_macro = cat_macro
if trainer['save_best_only']:
torch.save(cat_embedding_matrix, model_path + '.cat.clf.embedding')
# print('\n<Cluster> CAT Best NMI = %.4f, Best ARI = %.4f' % (cat_best_NMI, cat_best_ARI))
print('\n<Classification> CAT Best Micro-F1 = %.4f, Best Macro-F1 = %.4f' % (cat_best_micro, cat_best_macro))
<file_sep>import torch
import torch.nn as nn
from torch.autograd import Variable
def LoadVAE(parameter, n_nodes, input_dim, embed_dim):
enc_list = []
for para in parameter['encoder']:
if para[0] == 'fc':
next_dim, bn, act, dropout = para[1:5]
act = get_act(act)
enc_list.append((para[0], (input_dim, next_dim, bn, act, dropout)))
input_dim = next_dim
else:
raise NameError('Unknown encoder layer type:' + para[0])
dec_list = []
for para in parameter['decoder']:
if para[0] == 'fc':
next_dim, bn, act, dropout, insert_code = para[1:6]
act = get_act(act)
dec_list.append((para[0], (input_dim, next_dim, bn, act, dropout), insert_code))
input_dim = next_dim
else:
raise NameError('Unknown decoder layer type:' + para[0])
return HEAD(enc_list, dec_list, n_nodes, embed_dim)
def LoadNN(parameter, input_dim):
dnet_list = []
for para in parameter['dnn']:
if para[0] == 'fc':
next_dim, bn, act, dropout = para[1:5]
act = get_act(act)
dnet_list.append((para[0], (input_dim, next_dim, bn, act, dropout)))
input_dim = next_dim
else:
raise NameError('Unknown nn layer type:' + para[0])
return Discriminator(dnet_list)
def get_act(name):
if name == 'LeakyReLU':
return nn.LeakyReLU(0.2)
elif name == 'ReLU':
return nn.ReLU()
elif name == 'Tanh':
return nn.Tanh()
elif name == '':
return None
else:
raise NameError('Unknown activation:' + name)
class HEAD(nn.Module):
def __init__(self, enc_list, dec_list, n_nodes, embed_dim):
super(HEAD, self).__init__()
# Meta-path Specific Embedding
self.mp_1_embedding = nn.Embedding(n_nodes, embed_dim)
self.mp_2_embedding = nn.Embedding(n_nodes, embed_dim)
# Encoder
self.enc_layers = []
for l in range(len(enc_list)):
self.enc_layers.append(enc_list[l][0])
if enc_list[l][0] == 'fc':
embed_in, embed_out, norm, act, dropout = enc_list[l][1]
if l == len(enc_list) - 1:
setattr(self, 'enc_mu', FC(embed_in, embed_out, norm, act, dropout))
setattr(self, 'enc_logvar', FC(embed_in, embed_out, norm, act, dropout))
else:
setattr(self, 'enc_' + str(l), FC(embed_in, embed_out, norm, act, dropout))
else:
raise ValueError('Unreconized layer type')
# Decoder
self.dec_layers = []
for l in range(len(dec_list)):
self.dec_layers.append((dec_list[l][0], dec_list[l][2]))
if dec_list[l][0] == 'fc':
embed_in, embed_out, norm, act, dropout = dec_list[l][1]
if dec_list[l][2]:
embed_in += embed_dim
setattr(self, 'dec_' + str(l), FC(embed_in, embed_out, norm, act, dropout))
else:
raise ValueError('Unreconized layer type')
self.apply(weights_init)
def encode(self, x):
for l in range(len(self.enc_layers) - 1):
if self.enc_layers[l] == 'fc':
batch_size = x.size()[0]
x = x.view(batch_size, -1)
x = getattr(self, 'enc_' + str(l))(x)
if self.enc_layers[-1] == 'fc':
batch_size = x.size()[0]
x = x.view(batch_size, -1)
mu = getattr(self, 'enc_mu')(x)
logvar = getattr(self, 'enc_logvar')(x)
return mu, logvar
def decode(self, z, insert_code_idx, insert_type):
if insert_code_idx is not None:
# Meta-path Code Setting
if insert_type == True:
insert_code = torch.cat([self.mp_1_embedding(insert_code_idx),
self.mp_2_embedding(insert_code_idx)], dim = 0)
# Fake Code Setting
else:
insert_code = torch.cat([self.mp_2_embedding(insert_code_idx),
self.mp_1_embedding(insert_code_idx)], dim = 0)
for l in range(len(self.dec_layers)):
if (insert_code is not None) and (self.dec_layers[l][1]):
z = torch.cat([z, insert_code], dim = 1)
z = getattr(self, 'dec_' + str(l))(z)
return z
def reparameterize(self, mu, logvar):
if self.training:
std = logvar.mul(0.5).exp_()
eps = Variable(std.data.new(std.size()).normal_(1))
return eps.mul(std).add_(mu)
else:
return mu
def forward(self, x = None, insert_code_idx = None, insert_type = True, return_enc = False, return_mp_embedding = False):
if return_mp_embedding:
return self.mp_1_embedding.weight, self.mp_2_embedding.weight
batch_size = x.size()[0]
x = x.view(batch_size, -1)
mu, logvar = self.encode(x)
z = self.reparameterize(mu, logvar)
if return_enc:
return z
return self.decode(z, insert_code_idx, insert_type), mu, logvar
def FC(embed_in, embed_out, norm = 'bn', activation = None, dropout = None):
layers = []
layers.append(nn.Linear(embed_in, embed_out))
if dropout is not None:
if dropout > 0:
layers.append(nn.Dropout(dropout))
if norm == 'bn':
layers.append(nn.BatchNorm1d(embed_out))
if activation is not None:
layers.append(activation)
return nn.Sequential(*layers)
def weights_init(model):
classname = model.__class__.__name__
if classname.find('BatchNorm') != -1:
model.weight.data.normal_(0.0, 0.02)
model.bias.data.fill_(0)
class Discriminator(nn.Module):
def __init__(self, layer_list):
super(Discriminator, self).__init__()
self.layer_list = []
for l in range(len(layer_list) - 1):
self.layer_list.append(layer_list[l][0])
if layer_list[l][0] == 'fc':
embed_in, embed_out, norm, act, dropout = layer_list[l][1]
setattr(self, 'layer_' + str(l), FC(embed_in, embed_out, norm, act, dropout))
else:
raise ValueError('Unreconized layer type')
self.layer_list.append(layer_list[-1][0])
embed_in, embed_out, norm, act, _ = layer_list[-1][1]
if not isinstance(embed_out, list):
embed_out = [embed_out]
self.output_amount = len(embed_out)
for idx, d in enumerate(embed_out):
setattr(self, 'layer_out_' + str(idx), FC(embed_in, d, norm, act, 0))
self.apply(weights_init)
def forward(self, x):
for l in range(len(self.layer_list) - 1):
x = getattr(self, 'layer_' + str(l))(x)
output = []
for d in range(self.output_amount):
output.append(getattr(self, 'layer_out_' + str(d))(x))
if self.output_amount == 1:
return output[0]
else:
return tuple(output)
<file_sep>import torch
from torch.autograd import Variable, grad
def calc_grad_penalty(dnet, real_data, fake_data, use_gpu = True, dec_output = 2):
alpha = torch.rand(real_data.shape[0], 1)
alpha = alpha.expand(real_data.size())
if use_gpu:
alpha = alpha.cuda()
interpolates = alpha * real_data + ((1 - alpha) * fake_data)
if use_gpu:
interpolates = interpolates.cuda()
interpolates = Variable(interpolates, requires_grad = True)
if dec_output == 2:
disc_interpolates, _ = dnet(interpolates)
else:
disc_interpolates = dnet(interpolates)
grads = grad(outputs = disc_interpolates, inputs = interpolates,
grad_outputs = torch.ones(disc_interpolates.size()).cuda(),
create_graph = True, retain_graph = True, only_inputs = True)[0]
grad_penalty = ((grads.norm(2, dim = 1) - 1) ** 2).mean()
return grad_penalty
def vae_loss(reconstruct_x, x, mu, logvar, reconstruct_loss):
loss = reconstruct_loss(reconstruct_x, x)
KLD = - 0.5 * torch.sum(1 + logvar - mu.pow(2) - logvar.exp())
return loss, KLD
<file_sep>import pickle
import numpy as np
from sklearn.cluster import KMeans
from sklearn.metrics import normalized_mutual_info_score, adjusted_rand_score
from sklearn.metrics import f1_score
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LogisticRegression
import torch
import pickle
import heapq
import math
class Yelp_evaluation():
def __init__(self):
# Load Business Label
self.business_label = {}
data = pickle.load(open('../datasets/Yelp/business_label.p', 'rb'))
for item in range(len(data)):
(id, label) = data[item]
self.business_label[id] = label
def evaluate_cluster(self, embedding_matrix):
embedding_list = embedding_matrix.tolist()
X = []
Y = []
for p in self.business_label:
X.append(embedding_list[p])
Y.append(self.business_label[p])
Y_pred = KMeans(4).fit(np.array(X)).predict(X)
nmi = normalized_mutual_info_score(np.array(Y), Y_pred)
ari = adjusted_rand_score(np.array(Y), Y_pred)
return nmi, ari
def evaluate_clf(self, embedding_matrix):
embedding_list = embedding_matrix.tolist()
X = []
Y = []
for p in self.business_label:
X.append(embedding_list[p])
Y.append(self.business_label[p])
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size = 0.2, random_state = 0)
LR = LogisticRegression()
LR.fit(X_train, Y_train)
Y_pred = LR.predict(X_test)
micro_f1 = f1_score(Y_test, Y_pred, average = 'micro')
macro_f1 = f1_score(Y_test, Y_pred, average = 'macro')
return micro_f1, macro_f1
<file_sep>import pickle
import numpy as np
from sklearn.cluster import KMeans
from sklearn.metrics import normalized_mutual_info_score, adjusted_rand_score
from sklearn.metrics import f1_score, accuracy_score, roc_auc_score
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LogisticRegression
import torch
class DBLP_evaluation():
def __init__(self):
# Load Author Label
self.author_label = {}
data = pickle.load(open('../datasets/DBLP/author_label.p', 'rb'))
for item in range(len(data)):
(id, label) = data[item]
self.author_label[id] = label
def evaluate_cluster(self, embedding_matrix):
embedding_list = embedding_matrix.tolist()
X = []
Y = []
for p in self.author_label:
X.append(embedding_list[p])
Y.append(self.author_label[p])
Y_pred = KMeans(4).fit(np.array(X)).predict(X)
nmi = normalized_mutual_info_score(np.array(Y), Y_pred)
ari = adjusted_rand_score(np.array(Y), Y_pred)
return nmi, ari
def evaluate_clf(self, embedding_matrix):
embedding_list = embedding_matrix.tolist()
X = []
Y = []
for p in self.author_label:
X.append(embedding_list[p])
Y.append(self.author_label[p])
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size = 0.2, random_state = 0)
LR = LogisticRegression()
LR.fit(X_train, Y_train)
Y_pred = LR.predict(X_test)
micro_f1 = f1_score(Y_test, Y_pred, average = 'micro')
macro_f1 = f1_score(Y_test, Y_pred, average = 'macro')
return micro_f1, macro_f1
| 09d919918170227fbb1636a798083a835a3a6b91 | [
"Markdown",
"Python"
] | 7 | Markdown | BUPT-GAMMA/Heterogeneous-Information-Network-Embedding-with-Adversarial-Disentangler | f2a46dcb3dd5d9ce8ce28bcd2918959bb8fb9d43 | 08a27e3f9ace44d50ddd939e3f3bda99479fb1fe |
refs/heads/master | <repo_name>pioenerio/cli<file_sep>/migrate/migrate.go
package migrate
import (
"database/sql"
"github.com/dnote/cli/infra"
"github.com/dnote/cli/log"
"github.com/pkg/errors"
)
type migration struct {
name string
sql string
}
var migrations = []migration{}
func initSchema(db *sql.DB) (int, error) {
schemaVersion := 0
_, err := db.Exec("INSERT INTO system (key, value) VALUES (? ,?)", "schema", schemaVersion)
if err != nil {
return schemaVersion, errors.Wrap(err, "inserting schema")
}
return schemaVersion, nil
}
func getSchema(db *sql.DB) (int, error) {
var ret int
err := db.QueryRow("SELECT value FROM system where key = ?", "schema").Scan(&ret)
if err == sql.ErrNoRows {
ret, err = initSchema(db)
if err != nil {
return ret, errors.Wrap(err, "initializing schema")
}
} else if err != nil {
return ret, errors.Wrap(err, "querying schema")
}
return ret, nil
}
func execute(ctx infra.DnoteCtx, nextSchema int, m migration) error {
log.Debug("running migration %s\n", m.name)
tx, err := ctx.DB.Begin()
if err != nil {
return errors.Wrap(err, "beginning a transaction")
}
_, err = tx.Exec(m.sql)
if err != nil {
tx.Rollback()
return errors.Wrap(err, "running sql")
}
_, err = tx.Exec("UPDATE system SET value = ? WHERE key = ?", nextSchema, "schema")
if err != nil {
tx.Rollback()
return errors.Wrap(err, "incrementing schema")
}
tx.Commit()
return nil
}
// Run performs unrun migrations
func Run(ctx infra.DnoteCtx) error {
db := ctx.DB
schema, err := getSchema(db)
if err != nil {
return errors.Wrap(err, "getting the current schema")
}
log.Debug("current schema %d\n", schema)
if schema == len(migrations) {
return nil
}
toRun := migrations[schema:]
for idx, m := range toRun {
nextSchema := schema + idx + 1
if err := execute(ctx, nextSchema, m); err != nil {
return errors.Wrapf(err, "running migration %s", m.name)
}
}
return nil
}
<file_sep>/migrate/migrate_test.go
package migrate
import (
"testing"
)
func TestExecute(t *testing.T) {
}
| 4cdb4600736f74b56ff6b0ff8bcd4968b8ad54d0 | [
"Go"
] | 2 | Go | pioenerio/cli | e7940229cf69912f5f6bbe79e4d796f683f318f8 | c7f267feeb5c71bb7222d4fba1c90c568240d5a8 |
refs/heads/master | <file_sep>#ifndef TARGET_H
#define TARGET_H
#include "Location.h"
#include "enumVar.h"
#include "DataCamera.h"
#include "I2c.h"
#include <math.h>
#include <stdbool.h>
#include <vector>
class Target{
private:
Location targetLocation;
DataCamera dataCamera;
Location targetLocationArray;
bool targetHit;
int locArr[2];
public:
Target();
void SetHit(bool Hit);
bool GetHit();
int* GetTargetLocation();
void SetTargetLocation(int x, int y);
void CalculateTargetLocation(int curX, int curY, DirNouse, int distance, int tileSize);
};
#endif<file_sep>#ifndef ENUM_H
#define ENUM_H
typedef enum {
green, blue, red
}TargetID;
typedef enum{
ONE, TWO, THREE
}State;
typedef enum{
LEFT90 = 3, RIGHT90, LEFT , RIGHT, FORWARD, STOP, BACK, TURN360
}DirDrive;
typedef enum{
NORTH, EAST, SOUTH, WEST
}DirNouse;
#define WIDTH 5
#define HEIGHT 5
#endif<file_sep>#include "../header/Map.h"
Map::Map(Motor *mot, int amountTarget){
motor = mot;
map = (int*)calloc(HEIGHT*WIDTH, sizeof(int));
for(uint8_t i=0;i< HEIGHT;i++){
for(uint8_t j=0;j< WIDTH;j++){
*(map+(i*WIDTH)+j) = UNKNOWN;
}
}
amountTargets = amountTarget;
target.resize(amountTargets);
}
int* Map::GetTargetLocation(int id){
int* x = target.at(id).GetTargetLocation();
int *y = x+1;
locArr[0] = *x;
locArr[1] = *y;
return &locArr[0];
}
bool Map::GetTargetHit(int id){
return target.at(id).GetHit();
}
void Map::CalculateTargetLocation(int id, int x, int y, DirNouse dir, int distance){
target.at(id).CalculateTargetLocation(x,y,dir,distance,TILE_SIZE);
}
int* Map::GetDistanceArray(){
ds.ReadDistanceValue();
return ds.GetDistance();
}
int* Map::SetMap(DirNouse dir, int* addrDistance, int *x,int *y){
//set targets in map
for(int i=0;i<amountTargets;i++){
if(*GetTargetLocation(i) != -1){
int *x = GetTargetLocation(i);
int *y = x+1;
*(map+(*y*WIDTH)+*x) = TARGET;
}
}
//get all three distance values
for(uint8_t i=0;i<3;i++){
arrayDistanceValues[i] = *(addrDistance+i);
}
//convert ds distance to amount of tiles;
int tileDistanceA=1, tileDistanceB=1, tileDistanceC=1;//three var to store amount tiles to block
bool state= true;
while(state){
if(arrayDistanceValues[0] > TILE_SIZE){tileDistanceA ++;arrayDistanceValues[0]-=TILE_SIZE;}
else state = false;
}
state = true;
while(state){
if(arrayDistanceValues[1] > TILE_SIZE){tileDistanceB ++;arrayDistanceValues[1]-=TILE_SIZE;}
else state = false;
}
state = true;
while(state){
if(arrayDistanceValues[2] > TILE_SIZE){tileDistanceC ++;arrayDistanceValues[2]-=TILE_SIZE;}
else state = false;
}
//get current location
int numberInMap = *x+(*y * WIDTH);
std::cout << "cur x: " << *x << " cur y: "<< *y<<std::endl;
std::cout << "number is map :" << numberInMap << std::endl;
//determine which ds is map location
//TODO dynamic field = update map = ,
//tiles between cur pos & block = OPEn
//place value in map
int i = 1;
printf("tileA:%d, tileB:%d, tileC:%d\n", tileDistanceA, tileDistanceB, tileDistanceC);
if(dir == NORTH){//0= west, 1 = north, 2 = east
if((numberInMap-tileDistanceA>= 0) && (numberInMap-tileDistanceA < WIDTH*HEIGHT)){//stay in map memory
if(*x - tileDistanceA > 0 ){//prevent going out of field
*(map+numberInMap-tileDistanceA) = CLOSED;
}
}
i=1;
while(tileDistanceA - i > 0){
*(map+numberInMap-i)= OPEN;
i++;
}
if((numberInMap-(tileDistanceB*WIDTH) >= 0) && (numberInMap-(tileDistanceB*WIDTH) < WIDTH*HEIGHT)){
*(map+numberInMap-(tileDistanceB*WIDTH)) = CLOSED;
}
i=1;
while(tileDistanceB - i > 0){
*(map+numberInMap-i*WIDTH)= OPEN;
i++;
}
if((numberInMap+tileDistanceC >= 0) && (numberInMap+tileDistanceC < WIDTH*HEIGHT)){
if(*x + tileDistanceC < WIDTH ){//prevent going out of field
*(map+numberInMap+tileDistanceC) = CLOSED;
}
}
i=1;
while(tileDistanceC - i > 0){
*(map+numberInMap+i)= OPEN;
i++;
}
}
if(dir == EAST){//0 = north, 1 = east, 2 = south
if((numberInMap-(tileDistanceA*WIDTH) >= 0) && (numberInMap-(tileDistanceA*WIDTH) < WIDTH*HEIGHT)){
*(map+numberInMap-(tileDistanceA*WIDTH)) = CLOSED;
}
i=1;
while(tileDistanceA - i > 0){
*(map+numberInMap-i*WIDTH)= OPEN;
i++;
}
if((numberInMap+tileDistanceB >= 0 ) && (numberInMap+tileDistanceB < WIDTH*HEIGHT)){
int numberInRow = *x;
if(numberInRow+tileDistanceB < WIDTH){
*(map+numberInMap+tileDistanceB) = CLOSED;
}
}
i=1;
while(tileDistanceB - i > 0){
*(map+numberInMap+i)= OPEN;
i++;
}
if((numberInMap+(tileDistanceC*WIDTH) >= 0) && (numberInMap+(tileDistanceC*WIDTH) < WIDTH*HEIGHT)){
*(map+numberInMap+(tileDistanceC*WIDTH)) = CLOSED;
}
i=1;
while(tileDistanceC - i > 0){
*(map+numberInMap+i*WIDTH)= OPEN;
i++;
}
}
if(dir == SOUTH){//0 = east, 1 = south, 2 = west
if((numberInMap+tileDistanceA < WIDTH*HEIGHT) && (numberInMap+tileDistanceA > 0)){
if(*x+tileDistanceA < WIDTH){
*(map+numberInMap+tileDistanceA) = CLOSED;
}
}
i=1;
while(tileDistanceA - i > 0){
*(map+numberInMap+i)= OPEN;
i++;
}
if((numberInMap+(tileDistanceB*WIDTH) < WIDTH*HEIGHT) && (numberInMap+(tileDistanceB*WIDTH) >=0 )){
*(map+numberInMap+(tileDistanceB*WIDTH)) = CLOSED;
}
i=1;
while(tileDistanceB - i > 0){
*(map+numberInMap+i*WIDTH)= OPEN;
i++;
}
if((numberInMap-tileDistanceC < WIDTH*HEIGHT) && numberInMap-tileDistanceC >=0){
if(*x -tileDistanceC >=0){
*(map+numberInMap-tileDistanceC) = CLOSED;
}
}
i=1;
while(tileDistanceC - i > 0){
*(map+numberInMap-i)= OPEN;
i++;
}
}
if(dir == WEST){//0 = south, 1, west, 2 = north
if((numberInMap+(tileDistanceA*WIDTH) < WIDTH*HEIGHT) && (numberInMap+(tileDistanceA*WIDTH) >=0)){
*(map+numberInMap+(tileDistanceA*WIDTH)) = CLOSED;
}
i=1;
while(tileDistanceA - i > 0){
*(map+numberInMap-i*WIDTH)= OPEN;
i++;
}
if((numberInMap-tileDistanceB < WIDTH*HEIGHT) && (numberInMap-tileDistanceB >= 0)){
if(*x-tileDistanceB >=0){
*(map+numberInMap-tileDistanceB) = CLOSED;
}
}
i=1;
while(tileDistanceB - i > 0){
*(map+numberInMap-i)= OPEN;
i++;
}
if((numberInMap+(tileDistanceC*WIDTH) < WIDTH* HEIGHT) && (numberInMap+(tileDistanceC*WIDTH) >= 0)){
*(map+numberInMap+(tileDistanceC*WIDTH)) = CLOSED;
}
i=1;
while(tileDistanceC - i > 0){
*(map+numberInMap+i*WIDTH)= OPEN;
i++;
}
}
//place map in map.txt for debug
std::ofstream myfile;
/*myfile.open ("../map.txt");
if(myfile.is_open()){
for(uint8_t i=0;i< HEIGHT;i++){
for(uint8_t j=0;j< WIDTH;j++){
myfile << *(map+(i*WIDTH)+j);
//myfile << " ";
}
myfile << "\n";
}
}
myfile.close();*/
*map = OPEN;
for(uint8_t i=0;i< HEIGHT;i++){
for(uint8_t j=0;j< WIDTH;j++){
std::cout << *(map+(i*WIDTH)+j);
//myfile << " ";
}
std::cout << "\n";
}
return &arrayDistanceValues[0];
}
int* Map::GetMap(){
return &map[0];
}
bool Map::CheckDifference(int* oldMap, int *newMap){
for(int i=0;i<WIDTH*HEIGHT;i++){
if(*(oldMap+i) != *(newMap+i))return true;
}
return false;
}<file_sep>#ifndef DATACAMERA_H
#define DATACAMERA_H
#include "enumVar.h"
class DataCamera{
private:
int offset;
bool hit;
public:
void SetOffset(int);
int GetOffset();
void SetHit(bool h);
bool GetHit();
};
#endif<file_sep>#include "../header/Target.h"
Target::Target(){
SetTargetLocation(-1,-1);
}
void Target::SetHit(bool hit){
targetHit = hit;
}
bool Target::GetHit(){
return targetHit;
}
int* Target::GetTargetLocation(){
locArr[0] = targetLocationArray.GetLocationX();
locArr[1] = targetLocationArray.GetLocationY();
return &locArr[0];
}
void Target::SetTargetLocation(int x, int y){
targetLocationArray.SetLocation(x,y);
}
void Target::CalculateTargetLocation(int curX, int curY, DirNouse dirNouse, int distance, int tileSize){
I2C i2c;
//aks for reset angle??
//turn
int offset = dataCamera.GetOffset();
while(offset != 0){ //bigger range??
offset = dataCamera.GetOffset();
if(offset < 0){//left
//turn right;
i2c.OpenBus();
i2c.WriteBytes(0x00);
i2c.CloseBus();
}
if(offset > 0){//right
//turn left
i2c.OpenBus();
i2c.WriteBytes(0x00);
i2c.CloseBus();
}
}
//get turned angle;
i2c.OpenBus();
i2c.WriteBytes(11);//ask angle ;
int angle = i2c.ReadBytes();
i2c.CloseBus();
//current location + start direction +
//turned angle + distance
int tilesXas = ((int)sin(angle)*distance)/tileSize;//amount tile x as away from cur x
int tilesYas = ((int)cos(angle)*distance)/tileSize;
printf("tilesY as: %d, tilesX as: %d\n", tilesYas, tilesXas);
printf("angle: %d\n", angle);
if(dirNouse == NORTH){
if(angle > 0 && angle <= 90){//x+ y--
targetLocationArray.SetLocation(curX+tilesXas, curY-tilesYas);
}
if(angle > 90 && angle < 180){//x++ y++
targetLocationArray.SetLocation(curX+tilesXas, curY+tilesXas);
}
if(angle < 0 && angle >= -90){//x-- y--
targetLocationArray.SetLocation(curX-tilesXas, curY-tilesXas);
}
if(angle < -90 && angle >= -180){//x-- y++
targetLocationArray.SetLocation(curX-tilesXas, curY+tilesXas);
}
}
if(dirNouse == EAST){
if(angle > 0 && angle <= 90){//x+ y++
targetLocationArray.SetLocation(curX+tilesXas, curY+tilesYas);
}
if(angle > 90 && angle < 180){//x-- y++
targetLocationArray.SetLocation(curX-tilesXas, curY+tilesXas);
}
if(angle < 0 && angle >= -90){//x++ y--
targetLocationArray.SetLocation(curX+tilesXas, curY-tilesXas);
}
if(angle < -90 && angle >= -180){//x-- y--
targetLocationArray.SetLocation(curX-tilesXas, curY-tilesXas);
}
}
if(dirNouse == SOUTH){
if(angle > 0 && angle <= 90){//x-- y++
targetLocationArray.SetLocation(curX-tilesXas, curY+tilesYas);
}
if(angle > 90 && angle < 180){//x-- y--
targetLocationArray.SetLocation(curX-tilesXas, curY-tilesXas);
}
if(angle < 0 && angle >= -90){//x++ y++
targetLocationArray.SetLocation(curX+tilesXas, curY+tilesXas);
}
if(angle < -90 && angle == -180){//x++ y--
targetLocationArray.SetLocation(curX+tilesXas, curY-tilesXas);
}
}
if(dirNouse == WEST){
if(angle > 0 && angle <= 90){//x-- y--
targetLocationArray.SetLocation(curX-tilesXas, curY-tilesYas);
}
if(angle > 90 && angle < 180){//x++ y--
targetLocationArray.SetLocation(curX+tilesXas, curY-tilesXas);
}
if(angle < 0 && angle >= -90){//x-- y++
targetLocationArray.SetLocation(curX-tilesXas, curY+tilesXas);
}
if(angle < -90 && angle >= -180){//x++ y++
targetLocationArray.SetLocation(curX+tilesXas, curY+tilesXas);
}
}
}<file_sep>#ifndef MAP_H
#define MAP_H
#define TILE_SIZE 30 //tile size in cm
#define UNKNOWN 0//value in map is tile is UNKNONW
#define CLOSED 1
#define OPEN 2//value in map is tile is opne
#define TARGET 3
#include <vector>
#include "Target.h"
#include "DistanceSensor.h"
#include "Motor.h"
#include <stdint.h>
#include <iostream>
#include <fstream>
class Map{
private:
int *map;
Motor *motor;
std::vector <Target> target;
DistanceSensor ds;
int arrayDistanceValues[3];
int locArr[2];
int amountTargets;
public:
Map(Motor *motor,int amountTarget);
//map
int* GetMap();
int* SetMap(DirNouse dir, int* addr, int*x, int* y);
int* GetDistanceArray();
bool CheckDifference(int*oldMap , int*newMap);
//target
int* GetTargetLocation(int id);
bool GetTargetHit(int id);
void CalculateTargetLocation(int id, int x, int y, DirNouse dir, int distance);
};
#endif<file_sep>#include "header/Map.h"
#include "header/Route.h"
#include "header/Motor.h"
#include "header/I2c.h"
#include "header/enumVar.h"
#include "opencv2/imgcodecs.hpp"
#include "opencv2/highgui.hpp"
#include "opencv2/imgproc.hpp"
#include <wiringPiI2C.h>
#include <cmath>
#include <iostream>
#include <fstream>
#include <thread>
//#define MANUAL
#define AUTOMATIC
#define AMOUNT_LOCATIONS 4
using namespace cv;
using namespace std;
#define DEVICE_ID 0x08
#define MAX_RES_WIDTH 640
#define MAX_RES_HEIGHT 320
Mat src_gray;
Mat img;
int thresh = 100;
RNG rng(12345);
/*
V draaien om de 3 tegels
V if destenation cell is block pak een cell eerder als destanation
- cell onderscheiden van onbekent, vrij, blocked en target
- iets met states-> als target wordt herkend dan stop met rijden.
- 2 threads -> global var voor state, dan state checken
- angel vragen ?? -180/180 kan niet, toch??.
twee waardes sturen? 0 & 1 daarna waarde, 1 positief, 0 negatief
*/
Point2f thresh_callback(vector<vector<Point>> contourVar, Mat cannyVar);
void swap(float *xp, float *yp);
void selectionSort(vector<Point2f> arr, int n);
void pathfinding();
int openCV();
//global state:
bool targetDetected =false;
int main() {
std::thread path(pathfinding);
//std::thread cv(openCV);
path.join();
//cv.join();
return 0;
}
int openCV(){
// string path1 = "/home/jeroen/Downloads/targetBYR.jpg";
// string path2 = "/home/jeroen/Downloads/multi_targets.jpg";
// Mat src = imread( path1 );
// Setup I2C communication
int fd = wiringPiI2CSetup(DEVICE_ID);
uint8_t data_to_send;
uint8_t lastData;
if (fd == -1) {
std::cout << "Failed to init I2C communication.\n";
return -1;
}
std::cout << "I2C communication successfully setup.\n";
VideoCapture cap(0);
while(true){
cap.read(img);
cvtColor( img, src_gray, COLOR_BGR2GRAY );
blur( src_gray, src_gray, Size(3,3) );
Mat canny_output;
Canny( src_gray, canny_output, thresh, thresh*2 );
vector<vector<Point>> contours;
findContours( canny_output, contours, RETR_TREE, CHAIN_APPROX_SIMPLE );
//recievePackage = thresh_callback();
Point2f target, prePreviousTarget, previousTarget;
previousTarget.x = target.x;
target = thresh_callback(contours, canny_output);
if(target.x != 0 && (target.x < (previousTarget.x + 5) || target.x > (previousTarget.x - 5))){
cout << "Target confirmed! Center position = "<< target << endl;
}
if(target.x != 0){
if(target.x < 300 && target.x != 0){
cout << "Go left" << endl;
data_to_send = 6; // Go right because mirrored
wiringPiI2CWrite(fd, data_to_send);
}else if(target.x > 340){
cout << "Go right" << endl;
data_to_send = 5; // Go left because mirrored
wiringPiI2CWrite(fd, data_to_send);
}else{
cout << "Fire!!!!!" << endl;
data_to_send = 8; // stop driving
wiringPiI2CWrite(fd, data_to_send);
}
}
// if(data_to_send != lastData){
// wiringPiI2CWrite(fd, data_to_send);
// }
imshow( "Source", img );
cout << "********" << endl;
waitKey(1);
}
}
void pathfinding(){
Motor motor;
Map map(&motor,AMOUNT_LOCATIONS);
//variable to count down for a 360 turn
unsigned char turnCounter=2;
//start x, start y, dest x, dest y;
int finishCoordinates[][2] = {
{WIDTH-1, 0},//right top corner
{WIDTH-1,HEIGHT-1},//right under corner
{0, HEIGHT-1},//left under corner
{0,0}//top left corner
};
int routeCounter=0;//counter reached locations f
Route route;
DirNouse dirNouse = WEST;
//init maps
int mapNew[HEIGHT][WIDTH];
int mapOld[HEIGHT][WIDTH];
int *ptrMap;
//current location
int *curLocationX = motor.GetCurrentLocation();
int *curLocationY = curLocationX+1;
//update distance sensor
int *distanceSensorArray = map.GetDistanceArray();
//set map
map.SetMap(dirNouse, distanceSensorArray, curLocationX, curLocationY);
ptrMap = map.GetMap();
for(int i =0;i<HEIGHT;i++){
for(int j=0;j<WIDTH;j++){
mapNew[i][j] = mapOld[i][j] = *(ptrMap+(WIDTH*i)+j);
}
}
//calculate route
route.SetRoute(&mapNew[0][0], 0,0, finishCoordinates[0][0], finishCoordinates[0][1]);
//print routeArray;
route.PrintRoute();
while(1){
//update the distanceSensor//
dirNouse = motor.GetCurrentDirection();
std::cout << "dirNouse: " << dirNouse << std::endl;
distanceSensorArray = map.GetDistanceArray();
//std::cout << "read:: " << +i2c.ReadBytes() << std::endl;
//
//update map
curLocationX = motor.GetCurrentLocation();
curLocationY = curLocationX+1;
map.SetMap(dirNouse, distanceSensorArray, curLocationX, curLocationY);
ptrMap = map.GetMap();
for(int i =0;i<HEIGHT;i++){
for(int j=0;j<WIDTH;j++){
mapNew[i][j]= *(ptrMap+(WIDTH*i)+j);
}
}
bool mapChanged = map.CheckDifference(&mapNew[0][0], &mapOld[0][0]);
//check if map is changed
if(mapChanged){
std::cout << "new route" << std::endl;
for(int i =0;i<HEIGHT;i++){
for(int j=0;j<WIDTH;j++){
mapOld[i][j]= mapNew[i][j];
}
}
route.SetRoute(&mapOld[0][0], *curLocationX, *curLocationY,
finishCoordinates[routeCounter][0], finishCoordinates[routeCounter][1]);
}else{
std::cout << "same route" << std::endl;
}
route.PrintRoute();
//set manual location:
#ifdef MANUAL
int x,y, ownDir;
std::cout << "\nType x:";
std::cin >> x;
std::cout << "\nType y: ";
std::cin >> y;
std::cout << "\nType direction: ";
std::cin >> ownDir;
motor.SetCurrentLocation(x,y);
motor.SetCurrentDirection((DirNouse)ownDir);
sleep(2);
#endif
#ifdef AUTOMATIC
motor.CalculateCurrentLocationWithRoute( route.GetRoute(), route.GetSize());
std::cout << "cur direction" << motor.GetCurrentDirection()<<std::endl;
std::cout << "cur location" << *motor.GetCurrentLocation() << "," << *(motor.GetCurrentLocation()+1) << std::endl;
#endif
//turn 360 after three tiles
if(turnCounter--==0){
motor.Drive(TURN360);
turnCounter = 2;
}
if(targetDetected){//if target is detected get cur location + front distanc sensor
int *distance = map.GetDistanceArray();
map.CalculateTargetLocation(routeCounter,*curLocationX,*curLocationY,dirNouse,*(distance+1));//front camera = +1
}
//ask for target hit
if(map.GetTargetHit(routeCounter) && routeCounter < AMOUNT_LOCATIONS){
routeCounter++;
}
}
}
Point2f thresh_callback(vector<vector<Point>> contourVar, Mat cannyVar)
{
vector<vector<Point>> contours = contourVar;
Mat canny_output = cannyVar;
vector<vector<Point>> contours_poly( contours.size() );
vector<Rect> boundRect( contours.size() );
vector<Point2f>centers( contours.size() );
vector<float>radius( contours.size() );
vector<Vec4i> hierarchy;
vector<Point2f> mc(contours.size());
vector<Moments> mu(contours.size());
// Approximate contours, when contours are closed
for( int i = 0; i < contours.size(); i++ )
{
approxPolyDP( contours[i], contours_poly[i], 3, true );
// boundRect[i] = boundingRect( contours_poly[i] );
// minEnclosingCircle( contours_poly[i], centers[i], radius[i] );
}
// Black Mat to show user cirles found
Mat drawing = Mat::zeros( canny_output.size(), CV_8UC3 );
// get the moments of closed contours
for( int i = 0; i<contours.size(); i++ ){
mu[i] = moments( contours_poly[i], false );
}
//!!!!!!!!!!!!! check matchShapes() -> https://docs.opencv.org/master/d3/dc0/group__imgproc__shape.html#ga556a180f43cab22649c23ada36a8a139
// get the centroid of closed contours.
for( int i = 0; i<contours.size(); i++){
mc[i] = Point2f(round(mu[i].m10/mu[i].m00), round(mu[i].m01/mu[i].m00));
if(isnan(mc[i].x)){
// cout << mc[i] << endl;
mc[i].x = 0;
mc[i].y = 0;
}
}
//Sort mc for grouping
int nCounter = contours.size();
selectionSort(mc, nCounter);
// Grouping circles with matching centers with 2 pixel buffer
Point2f mcGroup[contours.size()];
int j[contours.size()];
int groupCounter = 0;
for(int i = 0; i < contours.size(); i++){
if(mc[i].x < (mc[i + 1].x + 2) && mc[i].x > (mc[i + 1].x - 2) && mc[i].x != 0){
mcGroup[groupCounter] += mc[i + 1];
j[groupCounter]+= 1;
} else {
groupCounter++;
}
}
// To Do: make buffer 5 pixels wide
Point2f totalGroup[contours.size()];
for (int i = 0; i < groupCounter; i++) {
if(j[i] > 10 && j[i] < 20){
totalGroup[i].x = 0;
totalGroup[i].y = 0;
if(mcGroup[i].x != 0 || mcGroup[i].y != 0){
totalGroup[i].x = round(mcGroup[i].x / j[i]);
totalGroup[i].y = round(mcGroup[i].y / j[i]);
// cout << "totalGroup = " << totalGroup[i] << "j[i] = " << j[i] << endl;
}
}
}
//bool target;
Point2f targetGroup[contours.size()];
int targetCounter = 0;
for(int i = 0; i < groupCounter; i++){
if(j[i] > 6 && totalGroup[i].y > 50){ // if group is > 6 and ignore top 50 rows pixels
//target = true;
targetGroup[targetCounter] = totalGroup[i];
targetCounter++;
// boundRect[i] = boundingRect( contours_poly[i] );
} else{
//target = false;
}
}
static int sendPackage[4];
for(int i = 0; i < (targetCounter +1); i++){
if(targetGroup[i].x > 50){
//cout << "Target confirmed! Center position = "<< targetGroup[i] << "Target [" << (i+1) <<"]" << endl;
}
}
int target1, target2;
if(targetCounter > 1){
if(targetGroup[0].x < 320){
target1 = 320 - targetGroup[0].x;
}
else{
target1 = (-1*(320 - targetGroup[0].x));
}
if(targetGroup[1].x < 320){
target2 = 320 - targetGroup[1].x;
}else{
target1 = (-1*(320 - targetGroup[1].x));
}
}else if(targetCounter == 1){
if(targetGroup[0].x < 320){
target1 = 320 - targetGroup[0].x;
}else{
target1 = (-1*(320 - targetGroup[0].x));
}
}
// draw contours
for( int i = 0; i<contours.size(); i++ )
{
Scalar color = Scalar(167,151,0); // B G R values
drawContours(drawing, contours, i, color, 2, 8, hierarchy, 0, Point());
// rectangle( img, boundRect[i].tl(), boundRect[i].br(), color, 2 );
circle( img, targetGroup[i], 4, color, -1, 8, 0 );
}
imshow( "Contours", drawing );
if(target2 == 0){
return targetGroup[0];
}else if(target2 < target1){
return targetGroup[1];
}else{
return targetGroup[0];
}
}
//Swap function for arrays
void swap(float *xp, float *yp)
{
int temp = *xp;
*xp = *yp;
*yp = temp;
}
//sorting vector<point2f> on x value
void selectionSort(vector<Point2f> arr, int n)
{
int i, j, min_idx;
// One by one move boundary of unsorted subarray
for (i = 0; i < n-1; i++)
{
// Find the minimum element in unsorted array
min_idx = i;
for (j = i+1; j < n; j++)
if (arr[j].x < arr[min_idx].x)
min_idx = j;
// Swap the found minimum element with the first element
swap(&arr[min_idx].x, &arr[i].x);
swap(&arr[min_idx].y, &arr[i].y);
}
}
<file_sep># CMAKE generated file: DO NOT EDIT!
# Generated by "Unix Makefiles" Generator, CMake Version 3.16
# Default target executed when no arguments are given to make.
default_target: all
.PHONY : default_target
# Allow only one "make -f Makefile2" at a time, but pass parallelism.
.NOTPARALLEL:
#=============================================================================
# Special targets provided by cmake.
# Disable implicit rules so canonical targets will work.
.SUFFIXES:
# Remove some rules from gmake that .SUFFIXES does not remove.
SUFFIXES =
.SUFFIXES: .hpux_make_needs_suffix_list
# Suppress display of executed commands.
$(VERBOSE).SILENT:
# A target that is always out of date.
cmake_force:
.PHONY : cmake_force
#=============================================================================
# Set environment variables for the build.
# The shell in which to execute make rules.
SHELL = /bin/sh
# The CMake executable.
CMAKE_COMMAND = /usr/bin/cmake
# The command to remove a file.
RM = /usr/bin/cmake -E remove -f
# Escaping for special characters.
EQUALS = =
# The top-level source directory on which CMake was run.
CMAKE_SOURCE_DIR = /home/pi/RobotRPiArduino
# The top-level build directory on which CMake was run.
CMAKE_BINARY_DIR = /home/pi/RobotRPiArduino
#=============================================================================
# Targets provided globally by CMake.
# Special rule for the target rebuild_cache
rebuild_cache:
@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "Running CMake to regenerate build system..."
/usr/bin/cmake -S$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR)
.PHONY : rebuild_cache
# Special rule for the target rebuild_cache
rebuild_cache/fast: rebuild_cache
.PHONY : rebuild_cache/fast
# Special rule for the target edit_cache
edit_cache:
@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "No interactive CMake dialog available..."
/usr/bin/cmake -E echo No\ interactive\ CMake\ dialog\ available.
.PHONY : edit_cache
# Special rule for the target edit_cache
edit_cache/fast: edit_cache
.PHONY : edit_cache/fast
# The main all target
all: cmake_check_build_system
$(CMAKE_COMMAND) -E cmake_progress_start /home/pi/RobotRPiArduino/CMakeFiles /home/pi/RobotRPiArduino/CMakeFiles/progress.marks
$(MAKE) -f CMakeFiles/Makefile2 all
$(CMAKE_COMMAND) -E cmake_progress_start /home/pi/RobotRPiArduino/CMakeFiles 0
.PHONY : all
# The main clean target
clean:
$(MAKE) -f CMakeFiles/Makefile2 clean
.PHONY : clean
# The main clean target
clean/fast: clean
.PHONY : clean/fast
# Prepare targets for installation.
preinstall: all
$(MAKE) -f CMakeFiles/Makefile2 preinstall
.PHONY : preinstall
# Prepare targets for installation.
preinstall/fast:
$(MAKE) -f CMakeFiles/Makefile2 preinstall
.PHONY : preinstall/fast
# clear depends
depend:
$(CMAKE_COMMAND) -S$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) --check-build-system CMakeFiles/Makefile.cmake 1
.PHONY : depend
#=============================================================================
# Target rules for targets named DisplayImage
# Build rule for target.
DisplayImage: cmake_check_build_system
$(MAKE) -f CMakeFiles/Makefile2 DisplayImage
.PHONY : DisplayImage
# fast build rule for target.
DisplayImage/fast:
$(MAKE) -f CMakeFiles/DisplayImage.dir/build.make CMakeFiles/DisplayImage.dir/build
.PHONY : DisplayImage/fast
main.o: main.cpp.o
.PHONY : main.o
# target to build an object file
main.cpp.o:
$(MAKE) -f CMakeFiles/DisplayImage.dir/build.make CMakeFiles/DisplayImage.dir/main.cpp.o
.PHONY : main.cpp.o
main.i: main.cpp.i
.PHONY : main.i
# target to preprocess a source file
main.cpp.i:
$(MAKE) -f CMakeFiles/DisplayImage.dir/build.make CMakeFiles/DisplayImage.dir/main.cpp.i
.PHONY : main.cpp.i
main.s: main.cpp.s
.PHONY : main.s
# target to generate assembly for a file
main.cpp.s:
$(MAKE) -f CMakeFiles/DisplayImage.dir/build.make CMakeFiles/DisplayImage.dir/main.cpp.s
.PHONY : main.cpp.s
src/DataCamera.o: src/DataCamera.cpp.o
.PHONY : src/DataCamera.o
# target to build an object file
src/DataCamera.cpp.o:
$(MAKE) -f CMakeFiles/DisplayImage.dir/build.make CMakeFiles/DisplayImage.dir/src/DataCamera.cpp.o
.PHONY : src/DataCamera.cpp.o
src/DataCamera.i: src/DataCamera.cpp.i
.PHONY : src/DataCamera.i
# target to preprocess a source file
src/DataCamera.cpp.i:
$(MAKE) -f CMakeFiles/DisplayImage.dir/build.make CMakeFiles/DisplayImage.dir/src/DataCamera.cpp.i
.PHONY : src/DataCamera.cpp.i
src/DataCamera.s: src/DataCamera.cpp.s
.PHONY : src/DataCamera.s
# target to generate assembly for a file
src/DataCamera.cpp.s:
$(MAKE) -f CMakeFiles/DisplayImage.dir/build.make CMakeFiles/DisplayImage.dir/src/DataCamera.cpp.s
.PHONY : src/DataCamera.cpp.s
src/DistanceSensor.o: src/DistanceSensor.cpp.o
.PHONY : src/DistanceSensor.o
# target to build an object file
src/DistanceSensor.cpp.o:
$(MAKE) -f CMakeFiles/DisplayImage.dir/build.make CMakeFiles/DisplayImage.dir/src/DistanceSensor.cpp.o
.PHONY : src/DistanceSensor.cpp.o
src/DistanceSensor.i: src/DistanceSensor.cpp.i
.PHONY : src/DistanceSensor.i
# target to preprocess a source file
src/DistanceSensor.cpp.i:
$(MAKE) -f CMakeFiles/DisplayImage.dir/build.make CMakeFiles/DisplayImage.dir/src/DistanceSensor.cpp.i
.PHONY : src/DistanceSensor.cpp.i
src/DistanceSensor.s: src/DistanceSensor.cpp.s
.PHONY : src/DistanceSensor.s
# target to generate assembly for a file
src/DistanceSensor.cpp.s:
$(MAKE) -f CMakeFiles/DisplayImage.dir/build.make CMakeFiles/DisplayImage.dir/src/DistanceSensor.cpp.s
.PHONY : src/DistanceSensor.cpp.s
src/I2C.o: src/I2C.cpp.o
.PHONY : src/I2C.o
# target to build an object file
src/I2C.cpp.o:
$(MAKE) -f CMakeFiles/DisplayImage.dir/build.make CMakeFiles/DisplayImage.dir/src/I2C.cpp.o
.PHONY : src/I2C.cpp.o
src/I2C.i: src/I2C.cpp.i
.PHONY : src/I2C.i
# target to preprocess a source file
src/I2C.cpp.i:
$(MAKE) -f CMakeFiles/DisplayImage.dir/build.make CMakeFiles/DisplayImage.dir/src/I2C.cpp.i
.PHONY : src/I2C.cpp.i
src/I2C.s: src/I2C.cpp.s
.PHONY : src/I2C.s
# target to generate assembly for a file
src/I2C.cpp.s:
$(MAKE) -f CMakeFiles/DisplayImage.dir/build.make CMakeFiles/DisplayImage.dir/src/I2C.cpp.s
.PHONY : src/I2C.cpp.s
src/Location.o: src/Location.cpp.o
.PHONY : src/Location.o
# target to build an object file
src/Location.cpp.o:
$(MAKE) -f CMakeFiles/DisplayImage.dir/build.make CMakeFiles/DisplayImage.dir/src/Location.cpp.o
.PHONY : src/Location.cpp.o
src/Location.i: src/Location.cpp.i
.PHONY : src/Location.i
# target to preprocess a source file
src/Location.cpp.i:
$(MAKE) -f CMakeFiles/DisplayImage.dir/build.make CMakeFiles/DisplayImage.dir/src/Location.cpp.i
.PHONY : src/Location.cpp.i
src/Location.s: src/Location.cpp.s
.PHONY : src/Location.s
# target to generate assembly for a file
src/Location.cpp.s:
$(MAKE) -f CMakeFiles/DisplayImage.dir/build.make CMakeFiles/DisplayImage.dir/src/Location.cpp.s
.PHONY : src/Location.cpp.s
src/Map.o: src/Map.cpp.o
.PHONY : src/Map.o
# target to build an object file
src/Map.cpp.o:
$(MAKE) -f CMakeFiles/DisplayImage.dir/build.make CMakeFiles/DisplayImage.dir/src/Map.cpp.o
.PHONY : src/Map.cpp.o
src/Map.i: src/Map.cpp.i
.PHONY : src/Map.i
# target to preprocess a source file
src/Map.cpp.i:
$(MAKE) -f CMakeFiles/DisplayImage.dir/build.make CMakeFiles/DisplayImage.dir/src/Map.cpp.i
.PHONY : src/Map.cpp.i
src/Map.s: src/Map.cpp.s
.PHONY : src/Map.s
# target to generate assembly for a file
src/Map.cpp.s:
$(MAKE) -f CMakeFiles/DisplayImage.dir/build.make CMakeFiles/DisplayImage.dir/src/Map.cpp.s
.PHONY : src/Map.cpp.s
src/Motor.o: src/Motor.cpp.o
.PHONY : src/Motor.o
# target to build an object file
src/Motor.cpp.o:
$(MAKE) -f CMakeFiles/DisplayImage.dir/build.make CMakeFiles/DisplayImage.dir/src/Motor.cpp.o
.PHONY : src/Motor.cpp.o
src/Motor.i: src/Motor.cpp.i
.PHONY : src/Motor.i
# target to preprocess a source file
src/Motor.cpp.i:
$(MAKE) -f CMakeFiles/DisplayImage.dir/build.make CMakeFiles/DisplayImage.dir/src/Motor.cpp.i
.PHONY : src/Motor.cpp.i
src/Motor.s: src/Motor.cpp.s
.PHONY : src/Motor.s
# target to generate assembly for a file
src/Motor.cpp.s:
$(MAKE) -f CMakeFiles/DisplayImage.dir/build.make CMakeFiles/DisplayImage.dir/src/Motor.cpp.s
.PHONY : src/Motor.cpp.s
src/Route.o: src/Route.cpp.o
.PHONY : src/Route.o
# target to build an object file
src/Route.cpp.o:
$(MAKE) -f CMakeFiles/DisplayImage.dir/build.make CMakeFiles/DisplayImage.dir/src/Route.cpp.o
.PHONY : src/Route.cpp.o
src/Route.i: src/Route.cpp.i
.PHONY : src/Route.i
# target to preprocess a source file
src/Route.cpp.i:
$(MAKE) -f CMakeFiles/DisplayImage.dir/build.make CMakeFiles/DisplayImage.dir/src/Route.cpp.i
.PHONY : src/Route.cpp.i
src/Route.s: src/Route.cpp.s
.PHONY : src/Route.s
# target to generate assembly for a file
src/Route.cpp.s:
$(MAKE) -f CMakeFiles/DisplayImage.dir/build.make CMakeFiles/DisplayImage.dir/src/Route.cpp.s
.PHONY : src/Route.cpp.s
src/Target.o: src/Target.cpp.o
.PHONY : src/Target.o
# target to build an object file
src/Target.cpp.o:
$(MAKE) -f CMakeFiles/DisplayImage.dir/build.make CMakeFiles/DisplayImage.dir/src/Target.cpp.o
.PHONY : src/Target.cpp.o
src/Target.i: src/Target.cpp.i
.PHONY : src/Target.i
# target to preprocess a source file
src/Target.cpp.i:
$(MAKE) -f CMakeFiles/DisplayImage.dir/build.make CMakeFiles/DisplayImage.dir/src/Target.cpp.i
.PHONY : src/Target.cpp.i
src/Target.s: src/Target.cpp.s
.PHONY : src/Target.s
# target to generate assembly for a file
src/Target.cpp.s:
$(MAKE) -f CMakeFiles/DisplayImage.dir/build.make CMakeFiles/DisplayImage.dir/src/Target.cpp.s
.PHONY : src/Target.cpp.s
# Help Target
help:
@echo "The following are some of the valid targets for this Makefile:"
@echo "... all (the default if no target is provided)"
@echo "... clean"
@echo "... depend"
@echo "... rebuild_cache"
@echo "... DisplayImage"
@echo "... edit_cache"
@echo "... main.o"
@echo "... main.i"
@echo "... main.s"
@echo "... src/DataCamera.o"
@echo "... src/DataCamera.i"
@echo "... src/DataCamera.s"
@echo "... src/DistanceSensor.o"
@echo "... src/DistanceSensor.i"
@echo "... src/DistanceSensor.s"
@echo "... src/I2C.o"
@echo "... src/I2C.i"
@echo "... src/I2C.s"
@echo "... src/Location.o"
@echo "... src/Location.i"
@echo "... src/Location.s"
@echo "... src/Map.o"
@echo "... src/Map.i"
@echo "... src/Map.s"
@echo "... src/Motor.o"
@echo "... src/Motor.i"
@echo "... src/Motor.s"
@echo "... src/Route.o"
@echo "... src/Route.i"
@echo "... src/Route.s"
@echo "... src/Target.o"
@echo "... src/Target.i"
@echo "... src/Target.s"
.PHONY : help
#=============================================================================
# Special targets to cleanup operation of make.
# Special rule to run CMake to check the build system integrity.
# No rule that depends on this can have commands that come from listfiles
# because they might be regenerated.
cmake_check_build_system:
$(CMAKE_COMMAND) -S$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) --check-build-system CMakeFiles/Makefile.cmake 0
.PHONY : cmake_check_build_system
<file_sep>#ifndef DistanceSensor_H
#define DistanceSensor_h
#include "enumVar.h"
#include "I2c.h"
class DistanceSensor{
private:
I2C i2c;
int distance[3];
public:
int* GetDistance();
void SetDistance(int d, int i);
void ReadDistanceValue();
};
#endif<file_sep># The set of languages for which implicit dependencies are needed:
set(CMAKE_DEPENDS_LANGUAGES
"CXX"
)
# The set of files for implicit dependencies of each language:
set(CMAKE_DEPENDS_CHECK_CXX
"/home/pi/RobotRPiArduino/main.cpp" "/home/pi/RobotRPiArduino/CMakeFiles/DisplayImage.dir/main.cpp.o"
"/home/pi/RobotRPiArduino/src/DataCamera.cpp" "/home/pi/RobotRPiArduino/CMakeFiles/DisplayImage.dir/src/DataCamera.cpp.o"
"/home/pi/RobotRPiArduino/src/DistanceSensor.cpp" "/home/pi/RobotRPiArduino/CMakeFiles/DisplayImage.dir/src/DistanceSensor.cpp.o"
"/home/pi/RobotRPiArduino/src/I2C.cpp" "/home/pi/RobotRPiArduino/CMakeFiles/DisplayImage.dir/src/I2C.cpp.o"
"/home/pi/RobotRPiArduino/src/Location.cpp" "/home/pi/RobotRPiArduino/CMakeFiles/DisplayImage.dir/src/Location.cpp.o"
"/home/pi/RobotRPiArduino/src/Map.cpp" "/home/pi/RobotRPiArduino/CMakeFiles/DisplayImage.dir/src/Map.cpp.o"
"/home/pi/RobotRPiArduino/src/Motor.cpp" "/home/pi/RobotRPiArduino/CMakeFiles/DisplayImage.dir/src/Motor.cpp.o"
"/home/pi/RobotRPiArduino/src/Route.cpp" "/home/pi/RobotRPiArduino/CMakeFiles/DisplayImage.dir/src/Route.cpp.o"
"/home/pi/RobotRPiArduino/src/Target.cpp" "/home/pi/RobotRPiArduino/CMakeFiles/DisplayImage.dir/src/Target.cpp.o"
)
set(CMAKE_CXX_COMPILER_ID "GNU")
# The include file search paths:
set(CMAKE_CXX_TARGET_INCLUDE_PATH
"/usr/include/opencv"
)
# Targets to which this target links.
set(CMAKE_TARGET_LINKED_INFO_FILES
)
# Fortran module output directory.
set(CMAKE_Fortran_TARGET_MODULE_DIR "")
<file_sep>#include "../header/Motor.h"
Motor::Motor(){
currentLocation.SetLocation(0,0);
counter = 0;
direction = EAST;
}
void Motor::Drive(DirDrive dir){
i2c.OpenBus();
i2c.WriteBytes(dir);
i2c.CloseBus();
}
void Motor::CalculateCurrentLocation(DirNouse dir, int* distanceArr){
int newX = *(GetCurrentLocation());
int newY = *(GetCurrentLocation()+1);
std::cout << "newX, newY: " << +newX<< +newY << std::endl;
std::cout << "Dir is: " << dir << std::endl;
if(dir == NORTH){
if(*(distanceArr+1) < TILE_SIZE-5 || newY == 0){
Drive(RIGHT90);
direction = WEST;
}else{
Drive(FORWARD);
currentLocation.SetLocation(newX, newY-1);
}
}
else if(dir == EAST){
if(*(distanceArr+1)< TILE_SIZE-5 || newX == WIDTH-1){
Drive(RIGHT90);
direction = SOUTH;
}else{
Drive(FORWARD);
currentLocation.SetLocation(newX+1, newY);
}
}
else if(dir == SOUTH){
if(*(distanceArr+1)< TILE_SIZE-5 || newY == HEIGHT-1){
Drive(RIGHT90);
direction = WEST;
}else{
Drive(FORWARD);
currentLocation.SetLocation(newX, newY+1);
}
}
else if(dir == WEST){
if(*(distanceArr+1)< TILE_SIZE-5 || newX == 0){
Drive(RIGHT90);
direction = WEST;
}else{
Drive(FORWARD);
currentLocation.SetLocation(newX-1, newY);
}
}
//small correction v1
/*if(*distanceArr < 5){
Drive(RIGHT);
}
if(*(distanceArr+2) < 5){
Drive(LEFT);
}*/
}
void Motor::CalculateCurrentLocationWithRoute(int *array, int size){
int newX, newY;
int oldX;
int oldY;
for(counter = 0;counter<size;counter+=2){
newX = *(array+counter);
newY = *(array+counter+1);
oldX = *(GetCurrentLocation());
oldY = *(GetCurrentLocation()+1);
/* start::
------------------------------------------
| > |
| |
| \/ |
| |
| |
| |
| |
-------------------------------------------
*/
if(newX > oldX){//robot RIGHT90
if(direction == NORTH){
Drive(RIGHT90);
Drive(FORWARD);
}
if(direction == EAST){
Drive(FORWARD);
}
if(direction == SOUTH){
Drive(LEFT90);
Drive(FORWARD);
}
if(direction == WEST){
Drive(TURN360);
Drive(FORWARD);
}
direction = EAST;
}
if(newY > oldY){//robot down
if(direction == NORTH){
Drive(TURN360);
Drive(FORWARD);
}
if(direction == EAST){
Drive(RIGHT90);
Drive(FORWARD);
}
if(direction == SOUTH){
Drive(FORWARD);
}
if(direction == WEST){
Drive(LEFT90);
Drive(FORWARD);
}
direction = SOUTH;
}
if(newX < oldX){//robot LEFT
if(direction == NORTH){
Drive(LEFT90);
Drive(FORWARD);
}
if(direction == EAST){
Drive(TURN360);
Drive(FORWARD);
}
if(direction == SOUTH){
Drive(RIGHT90);
Drive(FORWARD);
}
if(direction == WEST){
Drive(FORWARD);
}
direction = WEST;
}
if(newY < oldY){//robot up
if(direction == NORTH){
Drive(FORWARD);
}
if(direction == EAST){
Drive(LEFT90);
Drive(FORWARD);
}
if(direction == SOUTH){
Drive(BACK);
}
if(direction == WEST){
Drive(RIGHT90);
Drive(FORWARD);
}
direction = NORTH;
}
currentLocation.SetLocation(newX, newY);
sleep(1);
}
}
int* Motor::GetCurrentLocation(){
arr[0] = currentLocation.GetLocationX();
arr[1] = currentLocation.GetLocationY();
return &arr[0];
}
void Motor::SetCurrentLocation(int x, int y){
currentLocation.SetLocation(x,y);
}
DirNouse Motor::GetCurrentDirection(){
return direction;
}
void Motor::SetCurrentDirection(DirNouse dir){
direction = dir;
}<file_sep>#include "../header/DataCamera.h"
void DataCamera::SetOffset(int s){
offset = s;
}
int DataCamera::GetOffset(){
return offset;
}
void DataCamera::SetHit(bool h){
hit = h;
}
bool DataCamera::GetHit(){
return hit;
}
| 63a48a7c0b2bb192fae65188d7d9c3af869cd8f9 | [
"C",
"CMake",
"Makefile",
"C++"
] | 12 | C++ | olavmulder/RobotRPiArduino | b3efe00842a9ff417446151cc19c489a07aa5c87 | f08e59d92b31aebe8b8915b35056dace86d196fd |
refs/heads/master | <repo_name>jersi2695/checkout-microservices<file_sep>/commons-service/src/main/java/com/hiberus/commons/entities/logistic/SentOrder.java
package com.hiberus.commons.entities.logistic;
import java.io.Serializable;
import java.time.LocalDate;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
@Entity
@Table(name = "sent_orders")
public class SentOrder implements Serializable{
private static final int DAYS_DISPATCH = 4;
private static final long serialVersionUID = -2816196505856099344L;
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private long id;
private long orderId;
private LocalDate dateCreated = LocalDate.now();
private LocalDate dateDispatch = LocalDate.now().plusDays(DAYS_DISPATCH);
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public long getOrderId() {
return orderId;
}
public void setOrderId(long orderId) {
this.orderId = orderId;
}
public LocalDate getDateCreated() {
return dateCreated;
}
public void setDateCreated(LocalDate dateCreated) {
this.dateCreated = dateCreated;
}
public LocalDate getDateDispatch() {
return dateDispatch;
}
public void setDateDispatch(LocalDate dateDispatch) {
this.dateDispatch = dateDispatch;
}
}
<file_sep>/bill-service/src/main/java/com/hiberus/bill/services/BillService.java
package com.hiberus.bill.services;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.hiberus.bill.dao.BillDao;
import com.hiberus.bill.exceptions.BillServiceException;
import com.hiberus.commons.entities.bill.Bill;
import com.hiberus.commons.entities.order.Order;
@Service
public class BillService implements IBillService {
@Autowired
private BillDao billDao;
@Override
public Bill createBill(Order order) throws BillServiceException{
try {
double total = order.getProducts().stream().map(x -> x.getCost() * x.getQuantity()).reduce(0.0, Double::sum);
Bill bill = new Bill();
bill.setOrderId(order.getId());
bill.setTotal(total);
return billDao.save(bill);
}catch (Exception e) {
throw new BillServiceException(e.getMessage(), order != null ? order.getId() : 0);
}
}
public Bill findByOrderId(long orderId) {
return billDao.findByOrderId(orderId);
};
}
<file_sep>/README.md
# Microservices Test
## Installation
Move to the folder `/commons-service` and execute `mvn install`
Then, go to the services folders and execute the follow command `mvn clean package` to create a jar and an images for the service with the help of dockerfile-maven-plugin.
After build the projects and create the images, use the file `docker-compose.yml` in the root of the repository and execute this command `docker-compose up` to up the containers for the images.
You can see the documentation for the checkout service in this url `http://localhost:8090/checkout/swagger-ui.html`
## Design
The solution has 4 main microservices:
- `order-service` stores the order and the products asscoiated to the order.
- `bill-service` creates a bill from an order and calculates the total for the order.
- `logistic-service` creates a sent order, where it stores the orderId and calculate the dispatch date, by default 4 days after the sent order is created.
- `checkout-service` manages all the operation for checkout, and uses the other microservices to execute the buissness logic.
There are two more general service:
- `eureka-server` works to naming and discovery server to allow communicate beetween service without knowing the exact service's url.
- `zuul-server` works as a gateway, this server allows me to expose as public the `checkout-service` and hold the others services hidden
<file_sep>/checkout-service/src/main/java/com/hiberus/checkout/clients/IOrderClientRest.java
package com.hiberus.checkout.clients;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import com.hiberus.commons.entities.order.Order;
@FeignClient(name = "order-service")
public interface IOrderClientRest {
@PostMapping("/api/create")
public Order create(@RequestBody Order order);
}
<file_sep>/order-service/src/main/java/com/hiberus/order/controller/OrderController.java
package com.hiberus.order.controller;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PatchMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.http.HttpStatus;
import com.hiberus.commons.entities.order.Order;
import com.hiberus.order.services.IOrderService;
import io.swagger.annotations.Api;
@RestController
@RequestMapping("api")
@Api(value = "Orders microservice", description = "This API has a CRUD for orders")
public class OrderController {
@Autowired
private IOrderService orderService;
@GetMapping("/list")
public List<Order> list(){
return orderService.findAll();
}
@GetMapping("/detail/{id}")
public Order detail(@PathVariable(name = "id") Long id){
Order order = orderService.findById(id).get();
return order;
}
@PostMapping("/create")
@ResponseStatus(HttpStatus.CREATED)
public Order create(@RequestBody Order order) {
return orderService.save(order);
}
@DeleteMapping("/delete/{id}")
@ResponseStatus(HttpStatus.NO_CONTENT)
public void delete(@PathVariable Long id) {
orderService.delete(id);
}
@PatchMapping("/edit/{id}")
@ResponseStatus(HttpStatus.CREATED)
public Order edit(@RequestBody Order order, @PathVariable Long id) {
Order oldOrder = orderService.findById(id).get();
oldOrder.setStatus(order.getStatus());
return orderService.save(oldOrder);
}
}
<file_sep>/checkout-service/src/main/java/com/hiberus/checkout/exception/CheckoutServiceException.java
package com.hiberus.checkout.exception;
import com.hiberus.commons.entities.order.Order;
public class CheckoutServiceException extends Exception {
private static final long serialVersionUID = 9171781663495659633L;
private Order order;
public CheckoutServiceException(String errorMessage, Order order) {
super(errorMessage);
this.order = order;
}
public Order getOrder() {
return order;
}
}
<file_sep>/checkout-service/src/main/java/com/hiberus/checkout/clients/IBillClientRest.java
package com.hiberus.checkout.clients;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import com.hiberus.commons.entities.bill.Bill;
import com.hiberus.commons.entities.order.Order;
@FeignClient(name = "bill-service")
public interface IBillClientRest {
@PostMapping("/api/generate-bill")
public Bill create(@RequestBody Order order);
}
<file_sep>/checkout-service/src/main/resources/application.properties
spring.application.name=checkout-service
server.port=${PORT:8003}
eureka.client.service-url.defaultZone=http://${EUREKA-SERVER:localhost}:8761/eureka
eureka.instance.instance-id=${spring.application.name}:${spring.application.instance_id:${random.value}}<file_sep>/logistic-service/src/main/resources/application.properties
spring.application.name=logistic-service
server.port=${PORT:8002}
eureka.client.service-url.defaultZone=http://${EUREKA-SERVER:localhost}:8761/eureka
eureka.instance.instance-id=${spring.application.name}:${spring.application.instance_id:${random.value}}
spring.h2.console.enabled=true
<file_sep>/bill-service/src/main/java/com/hiberus/bill/services/IBillService.java
package com.hiberus.bill.services;
import com.hiberus.bill.exceptions.BillServiceException;
import com.hiberus.commons.entities.bill.Bill;
import com.hiberus.commons.entities.order.Order;
public interface IBillService {
Bill createBill(Order order) throws BillServiceException;
Bill findByOrderId(long id);
}
<file_sep>/bill-service/src/main/java/com/hiberus/bill/exceptions/BillServiceException.java
package com.hiberus.bill.exceptions;
public class BillServiceException extends Exception{
private static final long serialVersionUID = 7628904850295106428L;
private long orderId;
public BillServiceException(String errorMessage, long orderId) {
super(errorMessage);
this.setOrderId(orderId);
}
public long getOrderId() {
return orderId;
}
public void setOrderId(long orderId) {
this.orderId = orderId;
}
}
<file_sep>/bill-service/src/main/java/com/hiberus/bill/controllers/BillController.java
package com.hiberus.bill.controllers;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
import com.hiberus.bill.services.IBillService;
import com.hiberus.commons.entities.bill.Bill;
import com.hiberus.commons.entities.order.Order;
import io.swagger.annotations.Api;
@RestController
@RequestMapping("api")
@Api(value = "Bill microservice", description = "This API generate a bill for an order")
public class BillController {
@Autowired
private IBillService billService;
@PostMapping("/generate-bill")
@ResponseStatus(HttpStatus.CREATED)
public ResponseEntity<Bill> create(@RequestBody Order order) {
try {
return ResponseEntity.ok(billService.createBill(order)) ;
}catch (Exception e) {
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body(null);
}
}
@GetMapping("/billByOrderId/{id}")
public ResponseEntity<Bill> detail(@PathVariable(name = "id") Long id){
try {
Bill bill = billService.findByOrderId(id);
if(bill != null) {
return ResponseEntity.ok(bill);
}else {
return ResponseEntity.status(HttpStatus.NOT_FOUND).body(null);
}
}catch (Exception e) {
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body(null);
}
}
}
<file_sep>/checkout-service/src/main/java/com/hiberus/checkout/services/CheckoutService.java
package com.hiberus.checkout.services;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.hiberus.checkout.clients.IBillClientRest;
import com.hiberus.checkout.clients.ILogisticClientRest;
import com.hiberus.checkout.clients.IOrderClientRest;
import com.hiberus.checkout.exception.CheckoutServiceException;
import com.hiberus.checkout.wrapper.CheckoutWrapper;
import com.hiberus.commons.entities.bill.Bill;
import com.hiberus.commons.entities.logistic.SentOrder;
import com.hiberus.commons.entities.order.Order;
@Service
public class CheckoutService implements ICheckoutService{
@Autowired
private IOrderClientRest orderClient;
@Autowired
private IBillClientRest billClient;
@Autowired
private ILogisticClientRest logisticClient;
@Override
public CheckoutWrapper checkoutOrder(Order order) throws CheckoutServiceException {
try {
order = orderClient.create(order);
Bill bill = billClient.create(order);
SentOrder sentOrder = new SentOrder();
sentOrder.setOrderId(order.getId());
sentOrder = logisticClient.createSentOrder(sentOrder);
return new CheckoutWrapper(order, bill, sentOrder);
}catch (Exception e) {
throw new CheckoutServiceException(e.getMessage(), order);
}
}
}
<file_sep>/order-service/src/main/resources/application.properties
spring.application.name=order-service
server.port=${PORT:8000}
eureka.client.service-url.defaultZone=http://${EUREKA-SERVER:localhost}:8761/eureka
eureka.instance.instance-id=${spring.application.name}:${spring.application.instance_id:${random.value}}
spring.h2.console.enabled=true<file_sep>/commons-service/src/main/java/com/hiberus/commons/entities/bill/Bill.java
package com.hiberus.commons.entities.bill;
import java.io.Serializable;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
@Entity
@Table(name = "bills")
public class Bill implements Serializable{
private static final long serialVersionUID = 3744785053498090692L;
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private long id;
private long orderId;
private double total;
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public long getOrderId() {
return orderId;
}
public void setOrderId(long orderId) {
this.orderId = orderId;
}
public double getTotal() {
return total;
}
public void setTotal(double total) {
this.total = total;
}
}
<file_sep>/zuul-server/src/main/resources/application.properties
spring.application.name=zuul-server
server.port=8090
eureka.client.service-url.defaultZone=http://${EUREKA-SERVER:localhost}:8761/eureka
zuul.routes.checkout.service-id=checkout-service
zuul.routes.checkout.path=/checkout/**
| fe17671a264157d94bbafcd6eac9255e3dfea56d | [
"Markdown",
"Java",
"INI"
] | 16 | Java | jersi2695/checkout-microservices | f427d4a257c12b26e145e973d9df1c7c65cf12ab | 82fda97f15e5073f9e7cbbc837148dab2abcfb64 |
refs/heads/master | <repo_name>HACS-workshop/hacspec<file_sep>/tests/vrf_test.py
from specs.vrf import *
from sys import exit
def main (x: int) -> None :
sk0 = bytes.from_hex('9d61b19deffd5a60ba844af492ec2cc44449c5697b326919703bac031cae7f60')
pk0 = ECP2OS(point_mul(sk0, g_ed25519))
msg0 = bytes.from_hex('')
k = felem(nat(896921))
proofComputed = ECVRF_prove(msg0, pk0, sk0, k)
verified = ECVRF_verify (pk0, proofComputed, msg0)
if verified:
print('VRF Test verification succeeded')
else:
print('VRF Test verification failed')
exit(1)
main(0)
<file_sep>/specs/elligator2.py
from curve25519 import *
from speclib import *
from sha2 import sha256
a25519 = felem(486662)
b25519 = felem(1)
u25519 = felem(2)
def f_25519(x:felem_t) -> felem_t:
return fadd(fmul(x,fsqr(x)),fadd(fmul(a25519,fsqr(x)),x))
def hash2curve25519(alpha:vlbytes) -> point_t :
r = felem(bytes.to_nat_be(sha256(alpha)))
d = felem(p25519 - fmul(a25519,finv(fadd(1,fmul(u25519,fsqr(r))))))
e = fexp(f_25519(d),((p25519 - 1)//2))
if e != 1:
return fsub(d,a25519)
else:
return d
<file_sep>/specs/frodo.py
from lib.speclib import *
from specs.sha3 import *
from math import floor
from specs.aes import aes128_encrypt_block
variant_gen_t = refine(str, lambda x: x == 'AES128' or x == 'CSHAKE128')
variant_frodo_kem_t = refine(str, lambda x: x == 'FrodoKEM-640' or x == 'FrodoKEM-976')
@typechecked
def cshake128_frodo(input_b:bytes_t, cstm:uint16_t, outputByteLen:nat) -> \
refine(vlbytes_t, lambda x: array.length(x) == outputByteLen):
inputByteLen = array.length(input_b)
s = array.create(25, uint64(0))
s[0] = uint64(0x10010001a801) | (uint64(cstm) << 48)
s = state_permute(s)
s = absorb(s, 168, inputByteLen, input_b, uint8(0x04))
output = squeeze(s, 168, outputByteLen)
return output
@typechecked
def cshake256_frodo(input_b:bytes_t, cstm:uint16_t, outputByteLen:nat) -> \
refine(vlbytes_t, lambda x: array.length(x) == outputByteLen):
inputByteLen = array.length(input_b)
s = array.create(25, uint64(0))
s[0] = uint64(0x100100018801) | (uint64(cstm) << 48)
s = state_permute(s)
s = absorb(s, 136, inputByteLen, input_b, uint8(0x04))
output = squeeze(s, 136, outputByteLen)
return output
@typechecked
def Frodo(frodo_kem:variant_frodo_kem_t, gen_a:variant_gen_t):
if (frodo_kem == 'FrodoKEM-640'):
params_n = 640
params_logq = 15
params_extracted_bits = 2
crypto_bytes = 16
cdf_table_len = 12
cdf_table = array([4727, 13584, 20864, 26113, 29434, 31278, 32176, 32560, 32704, 32751, 32764, 32767])
cshake_frodo = cshake128_frodo
else:
params_n = 976
params_logq = 16
params_extracted_bits = 3
crypto_bytes = 24
cdf_table_len = 11
cdf_table = array([5638, 15915, 23689, 28571, 31116, 32217, 32613, 32731, 32760, 32766, 32767])
cshake_frodo = cshake256_frodo
bytes_seed_a = 16
params_nbar = 8
params_q = 2 ** params_logq
bytes_mu = (params_extracted_bits * params_nbar * params_nbar) // 8
crypto_publickeybytes = bytes_seed_a + (params_logq * params_n * params_nbar) // 8
crypto_secretkeybytes = crypto_bytes + crypto_publickeybytes #+2 * params_n * params_nbar
crypto_ciphertextbytes = ((params_nbar * params_n + params_nbar * params_nbar) * params_logq) // 8 + crypto_bytes
zqelem_t = natmod_t(params_q)
@typechecked
def zqelem(n:nat):
return natmod(n, params_q)
zqmatrix_t = matrix_t(zqelem_t, params_n, params_n)
@typechecked
def frodo_key_encode(a:bytes_t, b:nat) -> zqmatrix_t:
a = bytes.to_uintn_le(a)
res = matrix.create(params_nbar, params_nbar, zqelem(0))
for i in range(params_nbar):
for j in range(params_nbar):
k = uintn.to_int(a[(i*params_nbar+j)*b:(i*params_nbar+j+1)*b])
res[i][j] = zqelem(k * (params_q // (2 ** b)))
return res
@typechecked
def frodo_key_decode(a:zqmatrix_t, b:nat) -> bytes_t:
res = uintn(0, params_nbar*params_nbar*b)
for i in range(params_nbar):
for j in range(params_nbar):
k = floor(natmod.to_int(a[i][j]) * (2 ** b) / params_q + 1/2)
res = uintn.set_bits(res,(i*params_nbar+j)*b,(i*params_nbar+j+1)*b, uintn(k, b))
return bytes.from_uintn_le(res)
@typechecked
def frodo_pack(n1:nat, n2:nat, a:zqmatrix_t, d:nat) -> bytes_t:
res = uintn(0, n1*n2*d)
for i in range(n1):
for j in range(n2):
res = uintn.set_bits(res,(i*n2+j)*d,(i*n2+j+1)*d, uintn.reverse(uintn(a[i][j], d)))
return bytes.from_uintn_be(uintn.reverse(res))
@typechecked
def frodo_unpack(n1:nat, n2:nat, b:bytes_t, d:nat) -> zqmatrix_t:
b = uintn.reverse(bytes.to_uintn_be(b))
res = matrix.create(n1, n2, zqelem(0))
for i in range(n1):
for j in range(n2):
res[i][j] = zqelem(uintn.reverse(b[(i*n2+j)*d:(i*n2+j+1)*d]))
return res
@typechecked
def frodo_sample(r:uint16_t) -> zqelem_t:
t = uintn.to_int(r >> 1)
e = 0
r0 = uintn.to_int(r & uint16(0x01))
for z in range(cdf_table_len - 1):
if (t > cdf_table[z]):
e += 1
e = ((-1) ** r0) * e
return zqelem(e)
@typechecked
def frodo_sample_matrix(n1:nat, n2:nat, seed:bytes_t, ctr:uint16_t) -> zqmatrix_t:
r = cshake_frodo(seed, ctr, n1 * n2 * 2)
res = matrix.create(n1, n2, zqelem(0))
for i in range(n1):
for j in range(n2):
res[i][j] = frodo_sample(bytes.to_uint16_le(r[2*(i * n2 + j):2*(i * n2 + j + 2)]))
return res
@typechecked
def frodo_sample_matrix_tr(n1:nat, n2:nat, seed:bytes_t, ctr:uint16_t) -> zqmatrix_t:
r = cshake_frodo(seed, ctr, n1 * n2 * 2)
res = matrix.create(n1, n2, zqelem(0))
for i in range(n1):
for j in range(n2):
res[i][j] = frodo_sample(bytes.to_uint16_le(r[2*(j * n1 + i):2*(j * n1 + i + 2)]))
return res
@typechecked
def frodo_gen_matrix_cshake(n:nat, seed:bytes_t) -> zqmatrix_t:
res = matrix.create(n, n, zqelem(0))
for i in range(n):
res_i = cshake128_frodo(seed, uint16(256 + i), n * 2)
for j in range(n):
res[i][j] = zqelem(bytes.to_uint16_le(res_i[(j * 2):(j * 2 + 2)]))
return res
@typechecked
def frodo_gen_matrix_aes(n:nat, seed:bytes_t) -> zqmatrix_t:
res = matrix.create(n, n, zqelem(0))
tmp = array.create(8, uint16(0))
for i in range(n):
for j in range(0, n, 8):
tmp[0] = uint16(i)
tmp[1] = uint16(j)
res_i = aes128_encrypt_block(seed, bytes.from_uint16s_le(tmp))
for k in range(8):
res[i][j+k] = zqelem(bytes.to_uint16_le(res_i[k*2:(k+1)*2]))
return res
if (gen_a == 'AES128'):
frodo_gen_matrix = frodo_gen_matrix_aes
else:
frodo_gen_matrix = frodo_gen_matrix_cshake
@typechecked
def crypto_kem_keypair(coins:bytes_t(2*crypto_bytes+bytes_seed_a)) -> \
tuple2 (bytes_t(crypto_publickeybytes), tuple2 (bytes_t(crypto_secretkeybytes), zqmatrix_t)):
s, x = bytes.split(coins, crypto_bytes)
seed_e, z = bytes.split(x, crypto_bytes)
seed_a = cshake_frodo(z, uint16(0), bytes_seed_a)
a_matrix = frodo_gen_matrix(params_n, seed_a)
s_matrix = frodo_sample_matrix_tr(params_n, params_nbar, seed_e, uint16(1))
e_matrix = frodo_sample_matrix(params_n, params_nbar, seed_e, uint16(2))
b_matrix = a_matrix @ s_matrix + e_matrix
b = frodo_pack(params_n, params_nbar, b_matrix, params_logq)
pk = bytes.concat(seed_a, b)
sk = bytes.concat(s, pk)
return (pk, (sk, s_matrix))
@typechecked
def crypto_kem_enc(coins:bytes_t(bytes_mu), pk:bytes_t(crypto_publickeybytes)) -> \
tuple2 (bytes_t(crypto_ciphertextbytes), bytes_t(crypto_bytes)):
seed_a, b = bytes.split(pk, bytes_seed_a)
g = cshake_frodo(bytes.concat(pk, coins), uint16(3), 3 * crypto_bytes)
seed_e, x = bytes.split(g, crypto_bytes)
k, d = bytes.split(x, crypto_bytes)
sp_matrix = frodo_sample_matrix(params_nbar, params_n, seed_e, uint16(4))
ep_matrix = frodo_sample_matrix(params_nbar, params_n, seed_e, uint16(5))
a_matrix = frodo_gen_matrix(params_n, seed_a)
bp_matrix = sp_matrix @ a_matrix + ep_matrix
c1 = frodo_pack(params_nbar, params_n, bp_matrix, params_logq)
epp_matrix = frodo_sample_matrix(params_nbar, params_nbar, seed_e, uint16(6))
b_matrix = frodo_unpack(params_n, params_nbar, b, params_logq)
v_matrix = sp_matrix @ b_matrix + epp_matrix
mu_encode = frodo_key_encode(coins, params_extracted_bits)
c_matrix = v_matrix + mu_encode
c2 = frodo_pack(params_nbar, params_nbar, c_matrix, params_logq)
ss_init = bytes.concat(c1, bytes.concat(c2, bytes.concat(k, d)))
ss = cshake_frodo(ss_init, uint16(7), crypto_bytes)
ct = bytes.concat(c1, bytes.concat(c2, d))
return (ct, ss)
@typechecked
def crypto_kem_dec(ct:bytes_t(crypto_ciphertextbytes),
sk:tuple2 (bytes_t(crypto_secretkeybytes), zqmatrix_t)) -> bytes_t(crypto_bytes):
c1Len = (params_logq * params_n * params_nbar) // 8
c2Len = (params_logq * params_nbar * params_nbar) // 8
c1, x = bytes.split(ct, c1Len)
c2, d = bytes.split(x, c2Len)
sk1, s_matrix = sk
s, pk = bytes.split(sk1, crypto_bytes)
seed_a, b = bytes.split(pk, bytes_seed_a)
bp_matrix = frodo_unpack(params_nbar, params_n, c1, params_logq)
c_matrix = frodo_unpack(params_nbar, params_nbar, c2, params_logq)
m_matrix = c_matrix - bp_matrix @ s_matrix
mu_decode = frodo_key_decode(m_matrix, params_extracted_bits)
g = cshake_frodo(bytes.concat(pk, mu_decode), uint16(3), 3 * crypto_bytes)
seed_ep, x = bytes.split(g,crypto_bytes)
kp, dp =bytes.split(x, crypto_bytes)
sp_matrix = frodo_sample_matrix(params_nbar, params_n, seed_ep, uint16(4))
ep_matrix = frodo_sample_matrix(params_nbar, params_n, seed_ep, uint16(5))
a_matrix = frodo_gen_matrix(params_n, seed_a)
bpp_matrix = sp_matrix @ a_matrix + ep_matrix
epp_matrix = frodo_sample_matrix(params_nbar, params_nbar, seed_ep, uint16(6))
b_matrix = frodo_unpack(params_n, params_nbar, b, params_logq)
v_matrix = sp_matrix @ b_matrix + epp_matrix
mu_encode = frodo_key_encode(mu_decode, params_extracted_bits)
cp_matrix = v_matrix + mu_encode
ss_init = bytes.concat(c1, c2)
if (d == dp and bp_matrix == bpp_matrix and c_matrix == cp_matrix):
ss_init = bytes.concat(ss_init, bytes.concat(kp, d))
else:
ss_init = bytes.concat(ss_init, bytes.concat(s, d))
ss = cshake_frodo(ss_init, uint16(7), crypto_bytes)
return ss
return (crypto_kem_keypair, crypto_kem_enc, crypto_kem_dec)
<file_sep>/build/hacspec/tests/speclib_test.py
#!/usr/bin/python3
from hacspec.speclib import *
from sys import exit
# Tests for speclib
def test_bytes_from_hex():
b: bytes_t = bytes.from_hex("b8cdb147973dea2ec7")
b2: bytes_t = array.copy(b)
b3: bytes_t = bytes.from_ints([0xb8, 0xcd, 0xb1, 0x47, 0x97, 0x3d, 0xea, 0x2e, 0xc7])
if b != b2 or b != b3:
print("got " + str(b2))
print("expected " + str(b))
print("expected " + str(3))
exit(1)
print("test_bytes_from_hex success!")
def test_concat():
x: bytes_t = bytes.from_ints([0x01, 0x02, 0x03, 0x04, 0x05])
y: bytes_t = bytes.from_ints([0x06, 0x07, 0x08, 0x09, 0x0A])
e: bytes_t = bytes.from_ints([0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A])
z: bytes_t = array.concat(x, y)
if z != e:
print("got " + str(z))
print("expected " + str(e))
exit(1)
print("test_concat success!")
def test_bytes():
x: bytes_t = bytes.from_ints([0x01, 0x02, 0x03, 0x04, 0x05])
y: bytes_t = bytes.from_ints([0x01, 0x02, 0x03, 0x04, 0x05])
if x != y:
print("got " + str(x))
print("expected " + str(y))
exit(1)
print("test_bytes success!")
def test_2d_arrays():
my_array_t = vlvector_t(bytes)
x = my_array_t.create(2, bytes([]))
x[0] = bytes.from_ints([0x01, 0x02, 0x03])
x[1] = bytes.from_ints([0x04, 0x05])
y = my_array_t([bytes.from_ints([0x01, 0x02, 0x03]), bytes.from_ints([0x04, 0x05])])
z = my_array_t.create(2, bytes([]))
z[0] = bytes.from_ints([0x01, 0x02, 0x03])
z[1] = bytes.from_ints([0x04, 0x05])
if x != y:
print("got " + str(x))
print("expected " + str(y))
exit(1)
if x != z:
print("got " + str(x))
print("expected " + str(z))
exit(1)
print("test_2d_arrays success!")
def main():
test_bytes()
test_2d_arrays()
test_concat()
test_bytes_from_hex()
if __name__ == "__main__":
main()
<file_sep>/tests/kyber_test.py
from lib.speclib import *
from specs.kyber import Kyber #crypto_kem_keypair, crypto_kem_enc, crypto_kem_dec
import json
from tests.testlib import print_dot, exit
def main():
file = open('tests/test_vectors/kyber_test_vectors.json')
kyber_test_vectors = json.load(file)
print_dot()
for i in range(len(kyber_test_vectors)):
kyber_k = kyber_test_vectors[i]['kyber_k']
kyber_eta = kyber_test_vectors[i]['kyber_eta']
keypaircoins = bytes.from_hex(kyber_test_vectors[i]['keypaircoins'])
coins = bytes.from_hex(kyber_test_vectors[i]['coins'])
msgcoins = bytes.from_hex(kyber_test_vectors[i]['msgcoins'])
pk_expected = bytes.from_hex(kyber_test_vectors[i]['pk_expected'])
sk_expected = bytes.from_hex(kyber_test_vectors[i]['sk_expected'])
ct_expected = bytes.from_hex(kyber_test_vectors[i]['ct_expected'])
ss_expected = bytes.from_hex(kyber_test_vectors[i]['ss_expected'])
(crypto_kem_keypair, crypto_kem_enc, crypto_kem_dec) = Kyber(kyber_k,kyber_eta)
pk, sk = crypto_kem_keypair(keypaircoins, coins)
ct, ss1 = crypto_kem_enc(pk, msgcoins)
ss2 = crypto_kem_dec(ct, sk)
#We do not check (sk == sk_expected) since we use an INV-NTT representation for sk
if (ss1 == ss2 and ss1 == ss_expected and ct == ct_expected and pk == pk_expected):
print("Kyber Test "+str(i)+" successful!")
else:
print("Kyber Test failed!")
print("Computed shared secret 1: " + str(ss1))
print("Computed shared secret 2: " + str(ss2))
print("Expected shared secret: " + str(ss_expected))
exit(1)
exit(0)
main()
<file_sep>/build/hacspec/tests/check_test_fail.py
# Testing spec checker. Negative tests
from hacspec.speclib import *
@typechecked
def fail_lists() -> None:
y = [0, 1] # This should fail
<file_sep>/compiler/Readme.md
# Compilers
hacspec can be compiled F\*.
Other formal languages should follow.
## Spec Checker
The base for the F\* compiler is an AST generated from the hacspec.
This also allows us to perform rigorous (type) checking on specs.
Run
make
to build the spec checker.
## F\*
hacspec compiles to F\*. Run
make
to build the F\* compiler.
The following targets exist for the compiler to generate F\* and check specs
from `../specs`.
make -C fstar-compiler/specs
make -C fstar-compiler/specs check
make -C fstar-compiler/specs tests
Note that this requires `HACL_HOME` to point to a copy of [HACL\*](https://github.com/mitls/hacl-star/) and `FSTAR_HOME` to a copy of [F\*](https://github.com/FStarLang/FStar).
<file_sep>/specs/kyber.py
#!/usr/bin/python3
from lib.speclib import *
from specs.sha3 import *
from math import floor
variant_k_t = refine(nat_t, lambda x: x == 2 or x == 3 or x == 4)
variant_eta_t = refine(nat_t, lambda x: x == 5 or x == 4 or x == 3)
kyber_max_iter : int = 1 << 32
kyber_q : int = 7681
kyber_n : int = 256
kyber_dt : int = 11
kyber_du : int = 11
kyber_dv : int = 3
kyber_polycompressedbytes : int = 96
kyber_polybytes : int = 416
kyber_symbytes : int = 32
kyber_polyveccompressedbytes : FunctionType = lambda kyber_k: kyber_k * 352
kyber_polyvecbytes : FunctionType = lambda kyber_k: kyber_k * kyber_polybytes
kyber_indcpa_publickeybytes : FunctionType = lambda kyber_k: kyber_polyveccompressedbytes(kyber_k) + kyber_symbytes
kyber_indcpa_secretkeybytes : FunctionType = kyber_polyvecbytes
kyber_indcpa_bytes : FunctionType = lambda kyber_k: kyber_polyveccompressedbytes(kyber_k) + kyber_polycompressedbytes
kyber_publickeybytes : FunctionType = kyber_indcpa_publickeybytes
kyber_secretkeybytes : FunctionType = lambda kyber_k: kyber_indcpa_secretkeybytes(kyber_k) + kyber_indcpa_publickeybytes(kyber_k) + 2 * kyber_symbytes
kyber_ciphertextbytes : FunctionType = kyber_indcpa_bytes
symbytes_t = bytes_t(kyber_symbytes)
@typechecked
def kyber_publickey_t(kyber_k:variant_k_t) -> bytes_t:
return bytes_t(kyber_publickeybytes(kyber_k))
@typechecked
def kyber_secretkey_t(kyber_k:variant_k_t) -> bytes_t:
return bytes_t(kyber_secretkeybytes(kyber_k))
@typechecked
def kyber_ciphertext_t(kyber_k:variant_k_t) -> bytes_t:
return bytes_t(kyber_ciphertextbytes(kyber_k))
@typechecked
def Kyber(kyber_k:variant_k_t,kyber_eta:variant_eta_t) \
-> tuple_t(FunctionType, FunctionType, FunctionType):
zqelem_t = natmod_t(kyber_q)
@typechecked
def zqelem(n:nat_t) -> natmod_t:
return natmod(n,kyber_q)
zqpoly_t = vector_t(zqelem_t,kyber_n)
zqpolyvec_t = vector_t(zqpoly_t,kyber_k)
zqpolymatrix_t = vector_t(zqpolyvec_t,kyber_k)
@typechecked
def zqpoly(a:array_t(zqelem_t,kyber_n)) -> zqpoly_t:
return vector(a,zqelem(0))
zqpoly0: vlarray_t = vector.create(kyber_n,zqelem(0))
omega : natmod_t = zqelem(3844)
psi : natmod_t = zqelem(62)
n_inv : natmod_t = zqelem(7651)
psi_inv : natmod_t = zqelem(1115)
omega_inv : natmod_t = zqelem(6584)
@typechecked
def zqpoly_mul(p:zqpoly_t, q:zqpoly_t) -> zqpoly_t:
s : vector_t = vector.poly_mul(p,q,zqelem(0))
low : vector_t
high : vector_t
low,high = vector.split(s,kyber_n)
r : zqpoly_t = low - high
return r
@typechecked
def zqpolyvec_dot(p:zqpolyvec_t, q:zqpolyvec_t) -> zqpoly_t:
t: vector_t = vector.create(kyber_n, zqelem(0))
for i in range(kyber_k):
t += zqpoly_mul(p[i], q[i])
return(t)
@typechecked
def zqpolymatrix_dot(p:zqpolymatrix_t, q:zqpolyvec_t) -> zqpolyvec_t:
t: vector_t = vector.create(kyber_k, vector.create(kyber_n, zqelem(0)))
for i in range(kyber_k):
t[i] = zqpolyvec_dot(p[i],q)
return(t)
@typechecked
def decode(l:nat_t) -> FunctionType: # Callable[[bytes_t], zqpoly_t]
@typechecked
def _decode(b:bytes_t) -> zqpoly_t:
beta : uintn_t = bytes.to_uintn_le(b)
res: vector_t = vector.create(kyber_n, zqelem(0))
for i in range(kyber_n):
res[i] = zqelem(uintn.to_int(beta[i*l:(i+1)*l]))
return res
return _decode
@typechecked
def encode(l:nat_t) -> FunctionType: # Callable[[zqpoly_t],bytes_t]:
@typechecked
def _encode(p:zqpoly_t) -> bytes_t:
beta : uintn_t = uintn(0,256*l)
for i in range(kyber_n):
beta = uintn.set_bits(beta,i*l,(i+1)*l,
uintn(natmod.to_nat(p[i]), l))
return bytes.from_uintn_le(beta)
return _encode
@typechecked
def compress(d:int) -> FunctionType: # Callable[[zqelem_t],zqelem_t]:
@typechecked
def _compress(x:zqelem_t) -> zqelem_t:
x : int = natmod.to_int(x)
d2 : int = 2 ** d
res : int = speclib.floor(d2 / kyber_q * x + 1 /2)
return zqelem(res % d2)
return _compress
@typechecked
def decompress(d:int) -> FunctionType: # Callable[[zqelem_t],zqelem_t]:
@typechecked
def _decompress(x:zqelem_t) -> zqelem_t:
x : int = natmod.to_int(x)
d2 : int = 2 ** d
res : int = speclib.floor(kyber_q / d2 * x + 1/2)
return zqelem(res)
return _decompress
@typechecked
def decode_decompress(d:int) -> FunctionType:
@typechecked
def _decode_decompress(b:bytes_t) -> vector_t:
return array.map(decompress(d), decode(d)(b))
return _decode_decompress
@typechecked
def compress_encode(d:int) -> FunctionType:
@typechecked
def _compress_encode(b:zqpoly_t) -> bytes_t:
return encode(d)(array.map(compress(d), b))
return _compress_encode
@typechecked
def msg_to_poly(m:symbytes_t) -> zqpoly_t:
return decode_decompress(1)(m)
@typechecked
def poly_to_msg(p:zqpoly_t) -> symbytes_t:
return compress_encode(1)(p)
@typechecked
def pack_sk(sk:zqpolyvec_t) -> bytes_t(kyber_indcpa_secretkeybytes(kyber_k)):
#encode_13(sk mod+ q)
return bytes.concat_blocks(array.map(encode(13), sk), bytes.empty())
@typechecked
def unpack_sk(packedsk:bytes_t(kyber_indcpa_secretkeybytes(kyber_k))) -> zqpolyvec_t:
#decode_13(sk)
res : array_t(array_t)
last : array_t
res, last = bytes.split_blocks(packedsk, kyber_polybytes)
res = array.map(decode(13), res)
return res
@typechecked
def pack_pk(pk:zqpolyvec_t, seed:symbytes_t) -> bytes_t(kyber_indcpa_publickeybytes(kyber_k)):
#(encode_dt(compress_q(t, dt)) || seed)
return bytes.concat_blocks(array.map(compress_encode(kyber_dt), pk), seed)
@typechecked
def unpack_pk(packedpk:bytes_t(kyber_indcpa_publickeybytes(kyber_k))) -> tuple_t (zqpolyvec_t, symbytes_t):
#decompress_q(decode_dt(pk), dt)
res : array_t(array_t)
seed : array_t
res, seed = bytes.split_blocks(packedpk, 352)
pk : array_t = array.map(decode_decompress(kyber_dt), res)
return (pk, seed)
@typechecked
def pack_ciphertext(b:zqpolyvec_t, v:zqpoly_t) -> bytes_t(kyber_indcpa_bytes(kyber_k)):
#(encode_du(compress_q(b, du)) || encode_dv(compress_q(v, dv)))
return bytes.concat_blocks(array.map(compress_encode(kyber_du), b), compress_encode(kyber_dv)(v))
@typechecked
def unpack_ciphertext(c:bytes_t(kyber_indcpa_bytes(kyber_k))) -> tuple_t (zqpolyvec_t, zqpoly_t):
#(decompress_q(decode_du(c), du), decompress_q(decode_dv(c_v), dv))
u1 : array_t(array_t)
v1 : array_t
u1, v1 = bytes.split_blocks(c, 352)
u : array_t = array.map(decode_decompress(kyber_du), u1)
v : zqpoly_t = decode_decompress(kyber_dv)(v1)
return (u, v)
@typechecked
def cbd(buf:bytes_t(kyber_eta * kyber_n // 4)) -> zqpoly_t:
beta : uintn_t = bytes.to_uintn_le(buf)
res : vector_t = vector.create(kyber_n, zqelem(0))
for i in range(kyber_n):
a : int = uintn.bit_count(beta[2 * i * kyber_eta: (2 * i + 1) * kyber_eta])
b : int = uintn.bit_count(beta[(2 * i + 1) * kyber_eta:(2 * i + 2) * kyber_eta])
res[i] = zqelem(a - b)
return res
#cbd(prf(seed, nonce)), prf = shake256
@typechecked
def zqpoly_getnoise(seed:symbytes_t) -> FunctionType: # Callable[[int],zqpoly_t]:
@typechecked
def _getnoise(nonce:int) -> zqpoly_t:
extseed : vlbytes_t = bytes.concat(seed, bytes.singleton(uint8(nonce)))
buf : vlbytes_t = shake256(kyber_symbytes + 1, extseed, kyber_eta * kyber_n // 4)
return cbd(buf)
return _getnoise
@typechecked
def shake128_absorb(inputByteLen:size_nat_t,
input_b:vlbytes_t) -> state_t:
s: array_t = array.create(25, uint64(0))
return absorb(s,168, inputByteLen, input_b, uint8(0x1F))
@typechecked
def shake128_squeeze(s:state_t,
outputByteLen:size_nat_t) -> vlbytes_t:
return squeeze(s, 168, outputByteLen)
#parse(xof(p || a || b)), xof = shake128
@typechecked
def genAij_hat(seed:symbytes_t, a:uint8_t, b:uint8_t) -> zqpoly_t:
shake128_rate : int = 168
res: vector_t = vector.create(kyber_n, zqelem(0))
extseed : vlbytes_t = bytes.concat(bytes.concat(seed, bytes.singleton(a)), bytes.singleton(b))
maxnblocks : int = 4
nblocks : int = maxnblocks
state : state_t = shake128_absorb(kyber_symbytes + 2, extseed)
buf : vlbytes_t = shake128_squeeze(state, shake128_rate * nblocks)
i : int = 0
j : int = 0
for ctr in range(kyber_max_iter):
d : int = uintn.to_int(buf[i]) + 256 * uintn.to_int(buf[i + 1])
d = d % (2**13)
if (d < kyber_q):
res[j] = zqelem(d)
j = j + 1
i = i + 2
if (i > shake128_rate * nblocks - 2):
nblocks = 1
buf = shake128_squeeze(state, shake128_rate * nblocks)
i = 0
if j == kyber_n:
break
return res
@typechecked
def genAij(seed:symbytes_t) -> FunctionType: # Callable[[int,int],zqpoly_t]:
@typechecked
def zqpoly_invntt(p:zqpoly_t) -> zqpoly_t:
np: vector_t = vector.create(kyber_n, zqelem(0))
for i in range(kyber_n):
for j in range(kyber_n):
np[i] += (p[j] * (omega_inv ** (i * j)))
np[i] *= n_inv * (psi_inv ** i)
return np
@typechecked
def zqpoly_bit_reverse(p:zqpoly_t) -> zqpoly_t:
return vector.createi(kyber_n, zqelem(0), lambda i: p[int(uintn.reverse(uint8(i)))])
@typechecked
def _genAij(a:int, b:int) -> zqpoly_t:
return zqpoly_invntt(zqpoly_bit_reverse(genAij_hat(seed, uint8(a), uint8(b))))
return _genAij
@typechecked
def kyber_cpapke_keypair(coins:symbytes_t) -> \
tuple_t (bytes_t(kyber_indcpa_publickeybytes(kyber_k)), bytes_t(kyber_indcpa_secretkeybytes(kyber_k))):
rhosigma : vlbytes_t = sha3_512(kyber_symbytes, coins)
rho : vlbytes_t
sigma : vlbytes_t
rho,sigma = bytes.split(rhosigma,kyber_symbytes)
A = matrix.createi(kyber_k, kyber_k, lambda i,j: genAij(rho)(j,i))
s = vector.createi(kyber_k, zqpoly0, zqpoly_getnoise(sigma))
e = vector.createi(kyber_k, zqpoly0, lambda i: zqpoly_getnoise(sigma)(kyber_k + i))
t : zqpolyvec_t = zqpolymatrix_dot(A,s) + e
sk : bytes_t = pack_sk(s)
pk : bytes_t = pack_pk(t, rho)
return (pk, sk)
@typechecked
def kyber_cpapke_encrypt(m:symbytes_t,
packedpk:bytes_t(kyber_indcpa_publickeybytes(kyber_k)),
coins:symbytes_t) -> \
bytes_t(kyber_indcpa_bytes(kyber_k)):
t : zqpolyvec_t
rho : symbytes_t
t, rho = unpack_pk(packedpk)
At = matrix.createi(kyber_k, kyber_k, genAij(rho))
r = vector.createi(kyber_k, zqpoly0, zqpoly_getnoise(coins))
e1 = vector.createi(kyber_k, zqpoly0, lambda i: zqpoly_getnoise(coins)(kyber_k+i))
e2 : zqpoly_t = zqpoly_getnoise(coins)(kyber_k + kyber_k)
u : zqpolyvec_t = zqpolymatrix_dot(At,r) + e1
v : zqpoly_t = zqpolyvec_dot(t,r) + e2 + msg_to_poly(m)
c : bytes_t = pack_ciphertext(u, v)
return c
@typechecked
def kyber_cpapke_decrypt(c:bytes_t(kyber_indcpa_bytes(kyber_k)),
sk:bytes_t(kyber_indcpa_secretkeybytes(kyber_k))) -> symbytes_t:
u : zqpolyvec_t
v : zqpoly_t
u, v = unpack_ciphertext(c)
s : zqpolyvec_t = unpack_sk(sk)
d : zqpoly_t = zqpolyvec_dot(s,u)
d = v - d
msg : symbytes_t = poly_to_msg(d)
return msg
@typechecked
def crypto_kem_keypair(keypaircoins:symbytes_t, coins:symbytes_t) -> \
tuple_t (kyber_publickey_t(kyber_k), kyber_secretkey_t(kyber_k)):
pk : bytes_t
sk1 : bytes_t
pk, sk1 = kyber_cpapke_keypair(keypaircoins)
sk : bytes_t = bytes.concat(sk1, bytes.concat(pk, bytes.concat(sha3_256(kyber_publickeybytes(kyber_k), pk), coins)))
return (pk, sk)
@typechecked
def crypto_kem_enc(pk:kyber_publickey_t(kyber_k),msgcoins:symbytes_t) -> \
tuple_t (kyber_ciphertext_t(kyber_k), symbytes_t):
buf : bytes_t = bytes.concat(sha3_256(kyber_symbytes, msgcoins),
sha3_256(kyber_publickeybytes(kyber_k), pk))
kr : vlbytes_t = sha3_512(2 * kyber_symbytes, buf)
ct : bytes_t = kyber_cpapke_encrypt(buf[0:kyber_symbytes], pk, kr[kyber_symbytes:(2*kyber_symbytes)])
kr[kyber_symbytes:(2*kyber_symbytes)] = sha3_256(kyber_ciphertextbytes(kyber_k), ct)
ss : vlbytes_t = sha3_256(2*kyber_symbytes, kr)
return (ct, ss)
@typechecked
def crypto_kem_dec(ct:kyber_ciphertext_t(kyber_k), sk:kyber_secretkey_t(kyber_k)) -> \
symbytes_t:
sk1 : bytes_t
x : bytes_t
pk : bytes_t
sk2 : bytes_t
kr_ : bytes_t
sk1,x = bytes.split(sk,kyber_indcpa_secretkeybytes(kyber_k))
pk,x = bytes.split(x,kyber_indcpa_publickeybytes(kyber_k))
sk2,kr_ = bytes.split(x,kyber_symbytes)
buf : bytes_t = bytes.concat(kyber_cpapke_decrypt(ct, sk1), sk2)
kr : vlbytes_t = sha3_512(2 * kyber_symbytes, buf)
cmp1 : bytes_t = kyber_cpapke_encrypt(buf[0:kyber_symbytes], pk, kr[kyber_symbytes:(2 * kyber_symbytes)])
kr[kyber_symbytes:(2 * kyber_symbytes)] = sha3_256(kyber_ciphertextbytes(kyber_k), ct)
if (cmp1 != ct):
kr[0:kyber_symbytes] = kr_
ss : vlbytes_t = sha3_256(2 * kyber_symbytes, kr)
return ss
return (crypto_kem_keypair,crypto_kem_enc,crypto_kem_dec)
<file_sep>/specs/blake2.py
from lib.speclib import *
variant_t,variant = refine(nat_t, lambda x: x == 0 or x == 1)
out_size_t,out_size = refine(nat_t, lambda x: x <= 32)
@typechecked
def highbits_128(x:uint128_t) -> uint64_t:
return uint64(x >> 64)
@typechecked
def highbits_64(x:uint64_t) -> uint32_t:
return uint32(x >> 32)
@typechecked
def blake2(v:variant_t) -> FunctionType:
if v == 1:
bits_in_word: int = 64
rounds_in_f: int = 12
block_bytes: int = 128
_R1: int = 32
_R2: int = 24
_R3: int = 16
_R4: int = 63
working_vector_t = array_t(uint64_t, 16)
hash_vector_t = array_t(uint64_t, 8)
index_t = range_t(0, 16)
_IV: hash_vector_t = hash_vector_t([
uint64(0x6A09E667F3BCC908), uint64(0xBB67AE8584CAA73B),
uint64(0x3C6EF372FE94F82B), uint64(0xA54FF53A5F1D36F1),
uint64(0x510E527FADE682D1), uint64(0x9B05688C2B3E6C1F),
uint64(0x1F83D9ABFB41BD6B), uint64(0x5BE0CD19137E2179)
])
to_word: FunctionType = uint64
word_t = uint64_t
minus_one: uint64_t = uint64(0xFFFFFFFFFFFFFFFF)
data_internal_t,data_internal = refine(vlbytes_t, lambda x: array.length(
x) < 2 ** 64 and (array.length(x) % block_bytes == 0))
key_t,key = refine(vlbytes_t, lambda x: array.length(x) <= 64)
key_size_t,key_size = refine(nat_t, lambda x: x <= 64)
to_words_le: FunctionType = bytes.to_uint64s_le
from_words_le: FunctionType = bytes.from_uint64s_le
low_bits: FunctionType = to_word
high_bits: FunctionType = highbits_128
double_word_t: FunctionType = uint128_t
to_double_word: FunctionType = uint128
max_size_t: int = 2**64 - 1
data_t,data = refine(vlbytes_t, lambda x: bytes.length(x)
< max_size_t - 2 * block_bytes)
else:
bits_in_word = 32
rounds_in_f = 10
block_bytes = 64
_R1 = 16
_R2 = 12
_R3 = 8
_R4 = 7
working_vector_t = array_t(uint32_t, 16)
hash_vector_t = array_t(uint32_t, 8)
index_t = range_t(0, 16)
_IV = hash_vector_t([
uint32(0x6A09E667), uint32(0xBB67AE85),
uint32(0x3C6EF372), uint32(0xA54FF53A),
uint32(0x510E527F), uint32(0x9B05688C),
uint32(0x1F83D9AB), uint32(0x5BE0CD19)
])
to_word = uint32
word_t = uint32_t
minus_one = uint32(0xFFFFFFFF)
data_internal_t,data_internal = refine(vlbytes_t, lambda x: array.length(
x) < 2 ** 64 and (array.length(x) % block_bytes == 0))
key_t,key = refine(vlbytes_t, lambda x: array.length(x) <= 32)
key_size_t,key_size = refine(nat_t, lambda x: x <= 32)
to_words_le = bytes.to_uint32s_le
from_words_le = bytes.from_uint32s_le
low_bits = to_word
high_bits = highbits_64
double_word_t = uint64_t
to_double_word = uint64
max_size_t = 2**32 - 1
data_t,data = refine(vlbytes_t, lambda x: bytes.length(x)
< max_size_t - 2 * block_bytes)
sigma_t = array_t(index_t, 16 * 12)
_SIGMA: sigma_t = sigma_t([
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3,
11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4,
7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8,
9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13,
2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9,
12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11,
13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10,
6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5,
10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3
])
@typechecked
def _G(v: working_vector_t, a: index_t, b: index_t, c: index_t, d: index_t, x: word_t, y: word_t) -> working_vector_t:
v_: working_vector_t = array.copy(v)
v_[a] = v_[a] + v_[b] + x
v_[d] = uintn.rotate_right(v_[d] ^ v_[a], _R1)
v_[c] = v_[c] + v_[d]
v_[b] = uintn.rotate_right(v_[b] ^ v_[c], _R2)
v_[a] = v_[a] + v_[b] + y
v_[d] = uintn.rotate_right(v_[d] ^ v_[a], _R3)
v_[c] = v_[c] + v_[d]
v_[b] = uintn.rotate_right(v_[b] ^ v_[c], _R4)
return v_
@typechecked
def _F(h: hash_vector_t, m: working_vector_t, t: double_word_t, flag: bool) -> hash_vector_t:
v: array_t = array.create(16, to_word(0))
v[0:8] = h
v[8:16] = _IV
v[12] = v[12] ^ low_bits(t)
v[13] = v[13] ^ high_bits(t)
if flag == True:
v[14] = v[14] ^ minus_one
for i in range(rounds_in_f):
s: sigma_t = _SIGMA[i * 16:(i + 1) * 16]
v = _G(v, 0, 4, 8, 12, m[s[0]], m[s[1]])
v = _G(v, 1, 5, 9, 13, m[s[2]], m[s[3]])
v = _G(v, 2, 6, 10, 14, m[s[4]], m[s[5]])
v = _G(v, 3, 7, 11, 15, m[s[6]], m[s[7]])
v = _G(v, 0, 5, 10, 15, m[s[8]], m[s[9]])
v = _G(v, 1, 6, 11, 12, m[s[10]], m[s[11]])
v = _G(v, 2, 7, 8, 13, m[s[12]], m[s[13]])
v = _G(v, 3, 4, 9, 14, m[s[14]], m[s[15]])
for i in range(8):
h[i] = h[i] ^ v[i] ^ v[i + 8]
return h
@contract3(lambda data, input_bytes, kk, nn: True,
lambda data, input_bytes, kk, nn, res: array.length(res) == nn)
@typechecked
def blake2_internal(data: data_internal_t, input_bytes: double_word_t, kk: key_size_t, nn: out_size_t) \
-> vlbytes_t:
h = array.copy(_IV)
h[0] = h[0] ^ to_word(0x01010000) ^ (to_word(kk) << 8) ^ to_word(nn)
data_blocks: array_t = array.length(data) // block_bytes
if data_blocks > 1:
for i in range(data_blocks - 1):
h = _F(h, to_words_le(
data[block_bytes * i:block_bytes * (i + 1)]), to_double_word((i + 1) * block_bytes), False)
if kk == 0:
h = _F(h, to_words_le(
data[block_bytes * (data_blocks - 1):block_bytes * data_blocks]), input_bytes, True)
else:
h = _F(h, to_words_le(
data[block_bytes * (data_blocks - 1):block_bytes * data_blocks]), input_bytes + to_double_word(block_bytes), True)
return from_words_le(h)[0:nn]
@contract3(lambda data, key, nn: True, lambda data, key, nn, res: array.length(res) == nn)
@typechecked
def blake2(data: data_t, key: key_t, nn: out_size_t) -> vlbytes_t:
ll: int = array.length(data)
kk: int = array.length(key)
data_blocks: int = (ll - 1) // block_bytes + 1
padded_data_length: int = data_blocks * block_bytes
if kk == 0:
padded_data = bytes(array.create(padded_data_length, uint8(0)))
padded_data[0:ll] = data
else:
padded_data = bytes(array.create(padded_data_length + block_bytes, uint8(0)))
padded_data[0:kk] = key
padded_data[block_bytes:block_bytes+ll] = data
return blake2_internal(padded_data, to_double_word(ll), key_size(nat(kk)), nn)
return blake2
blake2s: FunctionType = blake2(variant(nat(0)))
blake2b: FunctionType = blake2(variant(nat(1)))
<file_sep>/specs/curve448.py
#!/usr/bin/python3
from lib.speclib import *
p448 : int = 2 ** 448 - 2 ** 224 - 1
felem_t = natmod_t(p448)
@typechecked
def to_felem(x: nat_t) -> felem_t:
return natmod(x, p448)
@typechecked
def finv(x: felem_t) -> felem_t:
return x ** (p448 - 2)
point_t = tuple_t(felem_t, felem_t)
@typechecked
def point(a: nat_t, b: nat_t) -> point_t:
return to_felem(a), to_felem(b)
scalar_t = uintn_t(448)
@typechecked
def to_scalar(n:nat_t) -> scalar_t:
return uintn(n, 448)
serialized_point_t = bytes_t(56)
serialized_scalar_t = bytes_t(56)
@typechecked
def decodeScalar(s: serialized_scalar_t) -> scalar_t:
k: serialized_scalar_t = bytes.copy(s)
k[0] &= uint8(252)
k[55] |= uint8(128)
return to_scalar(bytes.to_nat_le(k))
@typechecked
def decodePoint(u: serialized_point_t) -> point_t:
b : nat_t = bytes.to_nat_le(u)
return point((b % (2 ** 448)) % p448, 1)
@typechecked
def encodePoint(p: point_t) -> serialized_point_t:
x:felem_t
y:felem_t
(x,y) = p
b : int = natmod.to_int(x * finv(y))
return bytes.from_nat_le(b, 56)
@typechecked
def point_add_and_double(q: point_t, nq: point_t, nqp1: point_t) -> tuple_t(point_t, point_t):
x_1 : felem_t
x_2 : felem_t
x_3 : felem_t
z_1 : felem_t
z_2 : felem_t
z_3 : felem_t
(x_1, z_1) = q
(x_2, z_2) = nq
(x_3, z_3) = nqp1
a : felem_t = x_2 + z_2
aa : felem_t = a ** 2
b : felem_t = x_2 - z_2
bb : felem_t = b ** 2
e : felem_t = aa - bb
c : felem_t = x_3 + z_3
d : felem_t = x_3 - z_3
da : felem_t = d * a
cb : felem_t = c * b
x_3 : felem_t = (da + cb) ** 2
z_3 : felem_t = x_1 * ((da - cb) ** 2)
x_2 : felem_t = aa * bb
z_2 : felem_t = e * (aa + (to_felem(39081) * e))
return ((x_2, z_2), (x_3, z_3))
@typechecked
def montgomery_ladder(k: scalar_t, init: point_t) -> point_t:
p0: point_t = point(1, 0)
p1: point_t = init
for i in range(448):
if k[447-i] == bit(1):
(p1, p0) = point_add_and_double(init, p1, p0)
else:
(p0, p1) = point_add_and_double(init, p0, p1)
return(p0)
@typechecked
def scalarmult(s: serialized_scalar_t, p: serialized_point_t) -> serialized_point_t:
s_ : scalar_t = decodeScalar(s)
p_ : point_t = decodePoint(p)
r : point_t = montgomery_ladder(s_, p_)
return encodePoint(r)
<file_sep>/archive/spec-checker/haskell/examples/salsa20-func.py
def rotate(x, n):
x &= 0xffffffff
return ((x << n) | (x >> (32 - n))) & 0xffffffff
def step(s0, i, j, k, r):
s1 = s0
s1[i] ^= rotate(s[j] + s[k],r)
return s1
def quarterround(s0, i0, i1, i2, i3):
s1 = step(s0, i1, i0, i3, 7)
s2 = step(s1, i2, i1, i0, 9)
s3 = step(s2, i3, i2, i1, 13)
s4 = step(s3, i0, i3, i2, 18)
return s4
def rowround(s0):
s1 = quarterround(s0, 0, 1, 2, 3)
s2 = quarterround(s1, 5, 6, 7, 4)
s3 = quarterround(s2, 10, 11, 8, 9)
s4 = quarterround(s3, 15, 12, 13, 14)
def columnround(s0):
s1 = quarterround(s0, 0, 4, 8, 12)
s2 = quarterround(s1, 5, 9, 13, 1)
s3 = quarterround(s2, 10, 14, 2, 6)
s4 = quarterround(s3, 15, 3, 7, 11)
return s4
def doubleround(s0):
s1 = columnround(s0)
s2 = rowround(s1)
return s2
# def hsalsa20(n,k):
# n=''.join([chr(n[i]) for i in range(16)])
# n = struct.unpack('<4I', n)
# k=''.join([chr(k[i]) for i in range(32)])
# k = struct.unpack('<8I', k)
# s = [0] * 16
# s[::5] = struct.unpack('<4I', 'expand 32-byte k')
# s[1:5] = k[:4]
# s[6:10] = n
# s[11:15] = k[4:]
# for i in range(10): s = doubleround(s)
# s = [s[i] for i in [0,5,10,15,6,7,8,9]]
# return struct.pack('<8I',*s)<file_sep>/archive/formal-models/fips-180-4-sha-2/NIST.FIPS.180-4-August-2015.md
**FIPS PUB 180-4 **
**FEDERAL INFORMATION PROCESSING STANDARDS PUBLICATION**
**Secure Hash Standard (SHS)**
**CATEGORY: COMPUTER SECURITY SUBCATEGORY: CRYPTOGRAPHY**
Information Technology Laboratory
National Institute of Standards and Technology
Gaithersburg, MD 20899-8900
This publication is available free of charge from:
<http://dx.doi.org/10.6028/NIST.FIPS.180-4>
August 2015
<!-- {width="1.7409722222222221in" height="1.8055555555555556in"} -->
**U.S. Department of Commerce**
*<NAME>, Secretary*
**National Institute of Standards and Technology**
*<NAME>, Under Secretary for Standards and Technology and
Director*
FOREWORD
The Federal Information Processing Standards Publication Series of the
National Institute of Standards and Technology (NIST) is the official
series of publications relating to standards and guidelines adopted and
promulgated under the provisions of the Federal Information Security
Management Act (FISMA) of 2002.
Comments concerning FIPS publications are welcomed and should be
addressed to the Director, Information Technology Laboratory, National
Institute of Standards and Technology, 100 Bureau Drive, Stop 8900,
Gaithersburg, MD 20899-8900.
<NAME>, Director
Information Technology Laboratory
**Abstract**
This standard specifies hash algorithms that can be used to generate
digests of messages. The digests are used to detect whether messages
have been changed since the digests were generated.
*Key words*: computer security, cryptography, message digest, hash
function, hash algorithm, Federal Information Processing Standards,
Secure Hash Standard.
**Federal Information**
**Processing Standards Publication 180-4**
August 2015
**Announcing the**
**SECURE HASH STANDARD**
Federal Information Processing Standards Publications (FIPS PUBS) are
issued by the National Institute of Standards and Technology (NIST)
after approval by the Secretary of Commerce pursuant to Section 5131 of
the Information Technology Management Reform Act of 1996 (Public Law
104-106), and the Computer Security Act of 1987 (Public Law 100-235).
1. **Name of Standard**: Secure Hash Standard (SHS) (FIPS PUB 180-4).
2. **Category of Standard**: Computer Security Standard, Cryptography.
3. **Explanation**: This Standard specifies secure hash algorithms -
SHA-1, SHA-224, SHA-256, SHA-384, SHA-512, SHA-512/224 and
SHA-512/256 - for computing a condensed representation of electronic
data (message). When a message of any length less than 2^64^ bits
(for SHA-1, SHA-224 and SHA-256) or less than 2^128^ bits (for
SHA-384, SHA-512, SHA-512/224 and SHA-512/256) is input to a hash
algorithm, the result is an output called a message digest. The
message digests range in length from 160 to 512 bits, depending on
the algorithm. Secure hash algorithms are typically used with other
cryptographic algorithms, such as digital signature algorithms and
keyed-hash message authentication codes, or in the generation of
random numbers (bits).
The hash algorithms specified in this Standard are called secure
because, for a given algorithm, it is computationally infeasible 1) to
find a message that corresponds to a given message digest, or 2) to find
two different messages that produce the same message digest. Any change
to a message will, with a very high probability, result in a different
message digest. This will result in a verification failure when the
secure hash algorithm is used with a digital signature algorithm or a
keyed-hash message authentication algorithm.
This Standard supersedes FIPS 180-3 \[FIPS 180-3\].
1. **Approving Authority**: Secretary of Commerce.
2. **Maintenance Agency**: U.S. Department of Commerce, National
Institute of Standards and Technology (NIST), Information Technology
Laboratory (ITL).
**6. Applicability**: This Standard is applicable to all Federal
departments and agencies for the protection of sensitive unclassified
information that is not subject to Title 10 United States Code Section
2315 (10 USC 2315) and that is not within a national security system as
defined in Title 40 United States Code Section 11103(a)(1) (40 USC
11103(a)(1)). Either this Standard or Federal Information Processing
Standard (FIPS) 202 must be implemented wherever a secure hash algorithm
is required for Federal applications, including as a component within
other cryptographic algorithms and protocols. This Standard may be
adopted and used by non-Federal Government organizations.
**7. Specifications**: Federal Information Processing Standard (FIPS)
180-4, Secure Hash Standard (SHS) (affixed).
**8. Implementations:** The secure hash algorithms specified herein may
be implemented in software, firmware, hardware or any combination
thereof. Only algorithm implementations that are validated by NIST will
be considered as complying with this standard. Information about the
validation program can be obtained at
<http://csrc.nist.gov/groups/STM/index.html>.
**9. Implementation Schedule**: Guidance regarding the testing and
validation to FIPS 180-4 and its relationship to FIPS 140-2 can be found
in IG 1.10 of the Implementation Guidance for FIPS PUB 140-2 and the
Cryptographic Module Validation Program at
<http://csrc.nist.gov/groups/STM/cmvp/index.html>.
**10. Patents**: Implementations of the secure hash algorithms in this
standard may be covered by U.S. or foreign patents.
**11. Export Control**: Certain cryptographic devices and technical data
regarding them are\
subject to Federal export controls. Exports of cryptographic modules
implementing this standard and technical data regarding them must comply
with these Federal regulations and be licensed by the Bureau of Export
Administration of the U.S. Department of Commerce. Information about
export regulations is available at: <http://www.bis.doc.gov/index.htm>.
**12. Qualifications:** While it is the intent of this Standard to
specify general security requirements for generating a message digest,
conformance to this Standard does not assure that a particular
implementation is secure. The responsible authority in each agency or
department shall assure that an overall implementation provides an
acceptable level of security. This Standard will be reviewed every five
years in order to assess its adequacy.
**13. Waiver Procedure:** The Federal Information Security Management
Act (FISMA) does not allow for waivers to a FIPS that is made mandatory
by the Secretary of Commerce.
**14. Where to Obtain Copies of the Standard**: This publication is
available electronically by accessing
<http://csrc.nist.gov/publications/>. Other computer security
publications are available at the same web site.
**Federal Information**
**Processing Standards Publication 180-4**
**Specifications for the**
**SECURE HASH STANDARD**
**Table of Contents**
1\. INTRODUCTION 3
2\. DEFINITIONS 4
2.1 Glossary of Terms and Acronyms 4
2.2 Algorithm Parameters, Symbols, and Terms 4
2.2.1 Parameters 4
2.2.2 Symbols and Operations 5
3\. NOTATION AND CONVENTIONS 7
3.1 Bit Strings and Integers 7
3.2 Operations on Words 8
4\. FUNCTIONS AND CONSTANTS 10
4.1 Functions 10
4.1.1 SHA-1 Functions 10
4.1.2 SHA-224 and SHA-256 Functions 10
4.1.3 SHA-384, SHA-512, SHA-512/224 and SHA-512/256 Functions 11
4.2 Constants 11
4.2.1 SHA-1 Constants 11
4.2.2 SHA-224 and SHA-256 Constants 11
4.2.3 SHA-384, SHA-512, SHA-512/224 and SHA-512/256 Constants 12
5\. PREPROCESSING 13
5.1 Padding the Message 13
5.1.1 SHA-1, SHA-224 and SHA-256 13
5.1.2 SHA-384, SHA-512, SHA-512/224 and SHA-512/256 13
5.2 Parsing the Message 14
5.2.1 SHA-1, SHA-224 and SHA-256 14
5.2.2 SHA-384, SHA-512, SHA-512/224 and SHA-512/256 14
5.3 Setting the Initial Hash Value (*H*^(0)^) 14
5.3.1 SHA-1 14
5.3.2 SHA-224 14
5.3.3 SHA-256 15
5.3.4 SHA-384 15
5.3.5 SHA-512 15
5.3.6 SHA-512/t 16
6\. SECURE HASH ALGORITHMS 18
6.1 SHA-1 18
6.1.1 SHA-1 Preprocessing 18
6.1.2 SHA-1 Hash Computation 18
6.1.3 Alternate Method for Computing a SHA-1 Message Digest 20
6.2 SHA-256 21
6.2.1 SHA-256 Preprocessing 22
6.2.2 SHA-256 Hash Computation 22
6.3 SHA-224 23
6.4 SHA-512 24
6.4.1 SHA-512 Preprocessing 24
6.4.2 SHA-512 Hash Computation 24
6.5 SHA-384 26
6.6 SHA-512/224 26
6.7 SHA-512/256 26
7\. TRUNCATION OF A MESSAGE DIGEST 27
APPENDIX A: Additional Information 28
A.1 Security of the Secure Hash Algorithms 28
A.2 Implementation Notes 28
A.3 Object Identifiers 28
APPENDIX B: REFERENCES 29
APPENDIX C: Technical Changes from FIPS 180-3 30
ERRATUM 31
1. INTRODUCTION
===============
This Standard specifies secure hash algorithms, SHA-1, SHA-224, SHA-256,
SHA-384, SHA-512, SHA-512/224 and SHA-512/256. All of the algorithms are
iterative, one-way hash functions that can process a message to produce
a condensed representation called a *message digest*. These algorithms
enable the determination of a message’s integrity: any change to the
message will, with a very high probability, result in a different
message digest. This property is useful in the generation and
verification of digital signatures and message authentication codes, and
in the generation of random numbers or bits.
Each algorithm can be described in two stages: preprocessing and hash
computation. Preprocessing involves padding a message, parsing the
padded message into *m*-bit blocks, and setting initialization values to
be used in the hash computation. The hash computation generates a
*message schedule* from the padded message and uses that schedule, along
with functions, constants, and word operations to iteratively generate a
series of hash values. The final hash value generated by the hash
computation is used to determine the message digest.
The algorithms differ most significantly in the security strengths that
are provided for the data being hashed. The security strengths of these
hash functions and the system as a whole when each of them is used with
other cryptographic algorithms, such as digital signature algorithms and
keyed-hash message authentication codes, can be found in \[SP 800-57\]
and \[SP 800-107\].
Additionally, the algorithms differ in terms of the size of the blocks
and words of data that are used during hashing or message digest sizes.
Figure 1 presents the basic properties of these hash algorithms.
-------------------------------------------------------------------------------------------------
**Algorithm** **Message Size** **Block Size** **Word Size** **Message Digest Size**
<!--- **(bits)** **(bits)** **(bits)** **(bits)** --->
------------------- ------------------ ---------------- --------------- -------------------------
> **SHA-1** < 2^64^ 512 32 160
> **SHA-224** < 2^64^ 512 32 224
> **SHA-256** < 2^64^ 512 32 256
> **SHA-384** < 2^128^ 1024 64 384
> **SHA-512** < 2^128^ 1024 64 512
> **SHA-512/224** < 2^128^ 1024 64 224
> **SHA-512/256** < 2^128^ 1024 64 256
-------------------------------------------------------------------------------------------------
Figure 1: Secure Hash Algorithm Properties
2. DEFINITIONS
==============
2.1 Glossary of Terms and Acronyms
----------------------------------
> Bit A binary digit having a value of 0 or 1.
>
> Byte A group of eight bits.
>
> FIPS Federal Information Processing Standard.
>
> NIST National Institute of Standards and Technology.
>
> SHA Secure Hash Algorithm.
>
> SP Special Publication
>
> Word A group of either 32 bits (4 bytes) or 64 bits (8 bytes),
> depending on the secure hash algorithm.
2.2 Algorithm Parameters, Symbols, and Terms
--------------------------------------------
### 2.2.1 Parameters
The following parameters are used in the secure hash algorithm
specifications in this Standard.
> *a, b, c, …, h* Working variables that are the *w*-bit words used in
> the computation of the hash values, *H*^(*i*)^.
>
> The *i*^th^ hash value. *H*^(0)^ is the *initial* hash value;
> *H*^(*N*)^ is the *final* hash value and is used to determine the
> message digest.
>
> The *j*^th^ word of the *i*^th^ hash value, where is the left-most
> word of hash value *i*.
>
> *K~t~* Constant value to be used for the iteration *t* of the hash
> computation.
>
> *k* Number of zeroes appended to a message during the padding step.
>
> Length of the message, *M*, in bits.
>
> *m* Number of bits in a message block, *M^(i)^*.
>
> *M* Message to be hashed.
>
> *M^(i)^* Message block *i*, with a size of *m* bits.
>
> The *j*^th^ word of the *i*^th^ message block, where is the left-most
> word of message block *i*.
>
> *n* Number of bits to be rotated or shifted when a word is operated
> upon.
>
> *N* Number of blocks in the padded message.
>
> *T* Temporary *w*-bit word used in the hash computation.
>
> *w* Number of bits in a word.
>
> *W~t~* The *t*^th^ *w*-bit word of the message schedule.
```cryptol
import Cryptol::Extras
```
### 2.2.2 Symbols and Operations
The following symbols are used in the secure hash algorithm
specifications; each operates on *w*-bit words.
> ∧ Bitwise AND operation.
```cryptol
// (&&) : {a} a -> a -> a
```
>
> ∨ Bitwise OR (“inclusive-OR”) operation.
>
```cryptol
// (||) : {a} a -> a -> a
```
> ⊕ Bitwise XOR (“exclusive-OR”) operation.
>
```cryptol
// (^) : {a} a -> a -> a
```
> ¬ Bitwise complement operation.
```cryptol
// (~) : {a} a -> a
```
>
> + Addition modulo 2*^w^*.
>
```cryptol
// (+) : {a} (Arith a) => a -> a -> a
```
> << Left-shift operation, where *x* << *n* is obtained by
> discarding the left-most *n* bits of the word *x* and then padding the
> result with *n* zeroes on the right.
>
```cryptol
// (<<) : {a, b, c} (fin b) => [a]c -> [b] -> [a]c
```
> >> Right-shift operation, where *x* >> *n* is obtained by
> discarding the right-most *n* bits of the word *x* and then padding
> the result with *n* zeroes on the left.
>
```cryptol
// (>>) : {a, b, c} (fin b) => [a]c -> [b] -> [a]c
```
> The following operations are used in the secure hash algorithm
> specifications:
>
> ***ROTL* *^n^*(*x*)** The *rotate left* (circular left shift)
> operation, where *x* is a *w*-bit word and *n* is an integer with 0
> *n* < *w*, is defined by *ROTL ^n^*(*x*)=(*x* << *n*)\
> (*x* >> *w* - *n*).
```cryptol
ROTL (n : [8]) x = x <<< n
ROTLDef n x = ~(n < width x) || (ROTL n x == ((x << n) || (x >> ((width x) - n))))
property ROTLDefs n (x8 : [8]) (x16 : [16]) (x32 : [32]) (x64 : [64]) =
ROTLDef n x8 &&
ROTLDef n x16 &&
ROTLDef n x32 &&
ROTLDef n x64
```
>
> ***ROTR ^n^*(*x*)** The *rotate right* (circular right shift)
> operation, where *x* is a *w*-bit word and *n* is an integer with 0
> *n* < *w*, is defined by *ROTR* *^n^*(*x*)=(*x* >> *n*)\
> (*x* << *w* - *n*).
```cryptol
ROTR (n : [8]) x = x >>> n
ROTRDef n x = ~(n < width x) || (ROTR n x == ((x >> n) || (x << ((width x) - n))))
property ROTRDefs n (x8 : [8]) (x16 : [16]) (x32 : [32]) (x64 : [64]) =
ROTRDef n x8 && ROTRDef n x16 && ROTRDef n x32 && ROTRDef n x64
```
>
> ***SHR ^n^*(*x*)** The *right shift* operation, where *x* is a *w*-bit
> word and *n* is an integer with 0 *n* < *w*, is defined by *SHR
> ^n^*(*x*)=*x* >> *n*.
```cryptol
SHR : {a, b, c} (fin b) => [b] -> [a]c -> [a]c
SHR n x = x >> n
```
3. NOTATION AND CONVENTIONS
===========================
3.1 Bit Strings and Integers
----------------------------
The following terminology related to bit strings and integers will be
used.
1. A *hex digit* is an element of the set {0, 1,…, 9, a,…, f}. A hex
> digit is the representation of a 4-bit string. For example, the
> hex digit “7” represents the 4-bit string “0111”, and the hex
> digit “a” represents the 4-bit string “1010”.
2. A *word* is a *w*-bit string that may be represented as a sequence
> of hex digits. To convert a word to hex digits, each 4-bit string
> is converted to its hex digit equivalent, as described in (1)
> above. For example, the 32-bit string
1010 0001 0000 0011 1111 1110 0010 0011
> can be expressed as “a103fe23”, and the 64-bit string
1010 0001 0000 0011 1111 1110 0010 0011
0011 0010 1110 1111 0011 0000 0001 1010
> can be expressed as “a103fe2332ef301a”.
>
> *Throughout this specification, the “big-endian” convention is used
> when expressing both 32- and 64-bit words, so that within each word,
> the most significant bit is stored in the left-most bit position.*
1. An *integer* may be represented as a word or pair of words. A word
> representation of the message length, , in bits, is required for
> the padding techniques of Sec. 5.1.
> An integer between 0 and 2^32^-1 *inclusive* may be represented as a
> 32-bit word. The least significant four bits of the integer are
> represented by the right-most hex digit of the word representation.
> For example, the integer 291=2^8^ + 2^5^ + 2^1^ + 2^0^=256+32+2+1 is
> represented by the hex word “00000123”.
>
> The same holds true for an integer between 0 and 2^64^-1 *inclusive*,
> which may be represented as a 64-bit word.
>
> If *Z* is an integer, 0 *Z* < 2^64^, then *Z*=2^32^*X* + *Y*, where
> 0 *X* < 2^32^ and 0 *Y* < 2^32^. Since *X* and *Y* can be
> represented as 32-bit words *x* and *y*, respectively, the integer *Z*
> can be represented as the pair of words (*x*, *y*). This property is
> used for SHA-1, SHA-224 and SHA-256.
>
> If *Z* is an integer, 0 *Z* < 2^128^, then *Z*=2^64^*X* + *Y*,
> where 0 *X* < 2^64^ and 0 *Y* < 2^64^. Since *X* and *Y* can be
> represented as 64-bit words *x* and *y*, respectively, the integer *Z*
> can be represented as the pair of words (*x*, *y*). This property is
> used for SHA-384, SHA-512, SHA-512/224 and SHA-512/256.
1. For the secure hash algorithms, the size of the *message block* -
> *m* bits - depends on the algorithm.
<!-- -->
a) For **SHA-1, SHA-224** and **SHA-256**, each message block has **512
> bits**, which are represented as a sequence of sixteen **32-bit
> words**.
b) For **SHA-384**, **SHA-512, SHA-512/224** and **SHA-512/256** each
> message block has **1024 bits**, which are represented as a
> sequence of sixteen **64-bit words**.
3.2 Operations on Words
-----------------------
The following operations are applied to *w*-bit words in all five secure
hash algorithms. SHA-1, SHA-224 and SHA-256 operate on 32-bit words
(*w*=32), and SHA-384, SHA-512, SHA-512/224 and SHA-512/256 operate on
64-bit words (*w*=64).
1. Bitwise *logical* word operations: , , , and (see Sec. 2.2.2).
2. Addition modulo 2*^w^*.
> The operation *x* + *y* is defined as follows. The words *x* and *y*
> represent integers *X* and *Y*, where 0 *X* < 2*^w^* and 0 *Y* <
> 2*^w^*. For positive integers *U* and *V*, let be the remainder upon
> dividing *U* by *V*. Compute
>
> *Z*=( *X* + *Y* ) mod 2*^w^*.
>
> Then 0 *Z* < 2*^w^*. Convert the integer *Z* to a word, *z*, and
> define *z=x* + *y*.
1. The *right shift* operation ***SHR ^n^*(*x*)**, where *x* is a
> *w*-bit word and *n* is an integer with 0 *n* < *w*, is defined
> by
> *SHR ^n^*(*x*)=*x* >> *n*.
>
> This operation is used in the SHA-224, SHA-256, SHA-384, SHA-512,
> SHA-512/224 and SHA-512/256 algorithms.
1. The *rotate right* (circular right shift) operation ***ROTR
> ^n^*(*x*)**, where *x* is a *w*-bit word and *n* is an integer
> with 0 *n* < *w*, is defined by
> *ROTR* *^n^*(*x*)=(*x* >> *n*) (*x* << *w* - *n*).
>
> Thus, *ROTR* *^n^*(*x*) is equivalent to a circular shift (rotation)
> of *x* by *n* positions to the right.
>
> This operation is used by the SHA-224, SHA-256, SHA-384, SHA-512,
> SHA-512/224 and SHA-512/256 algorithms.
5. The *rotate left* (circular left shift) operation, ***ROTL*
> *^n^*(*x*)**, where *x* is a *w*-bit word and *n* is an integer
> with 0 *n* < *w*, is defined by
> *ROTL ^n^*(*x*)=(*x* << *n*) (*x* >> *w* - *n*).
>
> Thus, *ROTL ^n^*(*x*) is equivalent to a circular shift (rotation) of
> *x* by *n* positions to the left.
>
> This operation is used only in the SHA-1 algorithm.
5. Note the following equivalence relationships, where *w* is fixed in
> each relationship:
> *ROTL ^n^*(*x*) *ROTR ^w-n^*(*x*)
```cryptol
ROTLREquiv n x = ROTL n x == ROTR (width x - n) x
property ROTLREquivs n (x8 : [8]) (x16 : [16]) (x32 : [32]) (x64 : [64]) =
ROTLREquiv n x8 && ROTLREquiv n x16 && ROTLREquiv n x32 && ROTLREquiv n x64
```
>
> *ROTR ^n^*(*x*) *ROTL ^w-n^*(*x*)
```cryptol
ROTRLEquiv n x = ROTR n x == ROTL (width x - n) x
property ROTRLEquivs n (x8 : [8]) (x16 : [16]) (x32 : [32]) (x64 : [64]) =
ROTRLEquiv n x8 && ROTRLEquiv n x16 && ROTRLEquiv n x32 && ROTRLEquiv n x64
```
4. FUNCTIONS AND CONSTANTS
==========================
4.1 Functions
-------------
This section defines the functions that are used by each of the
algorithms. Although the SHA-224, SHA-256, SHA-384,SHA-512, SHA-512/224
and SHA-512/256 algorithms all use similar functions, their descriptions
are separated into sections for SHA-224 and SHA-256 (Sec. 4.1.2) and for
SHA-384, SHA-512, SHA-512/224 and SHA-512/256 (Sec. 4.1.3), since the
input and output for these functions are words of different sizes. Each
of the algorithms include *Ch*(*x*, *y*, *z*) and *Maj*(*x*, *y*, *z*)
functions; the exclusive-OR operation () in these functions may be
replaced by a bitwise OR operation (∨) and produce identical results.
### 4.1.1 SHA-1 Functions
SHA-1 uses a sequence of logical functions, *f*~0~, *f*~1~,…, *f*~79~.
Each function *f*~t~, where 0 *t* 79, operates on three 32-bit words,
*x*, *y*, and *z*, and produces a 32-bit word as output. The function
*f~t~* (*x*, *y*, *z*) is defined as follows:
*Ch*(*x*, *y*, *z*)=(*xy*) (*xz*) 0 *t* 19
*Parity*(*x*, *y*, *z*)=x *y* *z* 20 *t* 39
*f~t~* (*x*, *y*, *z*) = (4.1)
*Maj*(*x*, *y*, *z*)=(*xy*) (*xz*) (*yz*) 40 *t* 59
*Parity*(*x*, *y*, *z*)=*x* *y* *z* 60 *t* 79.
```cryptol
Ch : {a} a -> a -> a -> a
Ch x y z = (x && y) ^ (~x && z)
Parity : {a} a -> a -> a -> a
Parity x y z = x ^ y ^ z
Maj : {a} a -> a -> a -> a
Maj x y z = (x && y) ^ (x && z) ^ (y && z)
Ch' : {a} a -> a -> a -> a
Ch' x y z = (x && y) || (~x && z)
Maj' : {a} a -> a -> a -> a
Maj' x y z = (x && y) || (x && z) || (y && z)
ChXorOrEquiv x y z = Ch x y z == Ch' x y z
property ChXorOrEquiv32 (x : [32]) (y : [32]) (z : [32]) = ChXorOrEquiv x y z
f : [8] -> [32] -> [32] -> [32] -> [32]
f t = if t <= 19 then Ch
else if t <= 39 then Parity
else if t <= 59 then Maj
else if t <= 79 then Parity
else error "f: t is out of range"
```
### 4.1.2 SHA-224 and SHA-256 Functions
SHA-224 and SHA-256 both use six logical functions, where *each function
operates on 32-bit words*, which are represented as *x*, *y*, and *z*.
The result of each function is a new 32-bit word.
= (4.2)
= (4.3)
= *ROTR* ^2^(*x*) *ROTR* ^13^(*x*) *ROTR* ^22^(*x*) (4.4)
= *ROTR* ^6^(*x*) *ROTR* ^11^(*x*) *ROTR* ^25^(*x*) (4.5)
= *ROTR* ^7^(*x*) *ROTR* ^18^(*x*) *SHR* ^3^(*x*) (4.6)
= *ROTR* ^17^(*x*) *ROTR* ^19^(*x*) *SHR* ^10^(*x*) (4.7)
```cryptol
S_0_256 x = ROTR 2 x ^ ROTR 13 x ^ ROTR 22 x
S_1_256 x = ROTR 6 x ^ ROTR 11 x ^ ROTR 25 x
s_0_256 x = ROTR 7 x ^ ROTR 18 x ^ SHR 3 x
s_1_256 x = ROTR 17 x ^ ROTR 19 x ^ SHR 10 x
```
### 4.1.3 SHA-384, SHA-512, SHA-512/224 and SHA-512/256 Functions
SHA-384, SHA-512, SHA-512/224 and SHA-512/256 use six logical functions,
where *each function operates on 64-bit words*, which are represented as
*x*, *y*, and *z*. The result of each function is a new 64-bit word.
= (4.8)
= (4.9)
= *ROTR* ^28^(*x*) *ROTR* ^34^(*x*) *ROTR* ^39^(*x*) (4.10)
= *ROTR* ^14^(*x*) *ROTR* ^18^(*x*) *ROTR* ^41^(*x*) (4.11)
= *ROTR* ^1^(*x*) *ROTR* ^8^(*x*) *SHR* ^7^(*x*) (4.12)
= *ROTR* ^19^(*x*) *ROTR* ^61^(*x*) *SHR* ^6^(*x*) (4.13)
```cryptol
property ChXorOrEquiv64 (x : [64]) (y : [64]) (z : [64]) = ChXorOrEquiv x y z
S_0_512 x = ROTR 28 x ^ ROTR 34 x ^ ROTR 39 x
S_1_512 x = ROTR 14 x ^ ROTR 18 x ^ ROTR 41 x
s_0_512 x = ROTR 1 x ^ ROTR 8 x ^ SHR 7 x
s_1_512 x = ROTR 19 x ^ ROTR 61 x ^ SHR 6 x
```
4.2 Constants
-------------
### 4.2.1 SHA-1 Constants
SHA-1 uses a sequence of eighty constant 32-bit words, *K*~0~, *K*~1~,…,
*K*~79~, which are given by
5a827999 0 *t* 19
6ed9eba1 20 *t* 39
*K~t~* = (4.14)
8f1bbcdc 40 *t* 59
ca62c1d6 60 *t* 79
```cryptol
sha1_K : [80][32]
sha1_K = [ 0x5a827999 | t <- [ 0 .. 19] ] #
[ 0x6ed9eba1 | t <- [20 .. 39] ] #
[ 0x8f1bbcdc | t <- [40 .. 59] ] #
[ 0xca62c1d6 | t <- [60 .. 79] ]
```
### 4.2.2 SHA-224 and SHA-256 Constants
SHA-224 and SHA-256 use the same sequence of sixty-four constant 32-bit
words, . These words represent the first thirty-two bits of the
fractional parts of the cube roots of the first sixty-four prime
numbers. In hex, these constant words are (from left to right)
> 428a2f98 71374491 b5c0fbcf e9b5dba5 3956c25b 59f111f1 923f82a4
> ab1c5ed5
>
> d807aa98 12835b01 243185be 550c7dc3 72be5d74 80deb1fe 9bdc06a7
> c19bf174
>
> e49b69c1 efbe4786 0fc19dc6 240ca1cc 2de92c6f 4a7484aa 5cb0a9dc
> 76f988da
>
> 983e5152 a831c66d b00327c8 bf597fc7 c6e00bf3 d5a79147 06ca6351
> 14292967
>
> 27b70a85 2e1b2138 4d2c6dfc 53380d13 650a7354 766a0abb 81c2c92e
> 92722c85
>
> a2bfe8a1 a81a664b c24b8b70 c76c51a3 d192e819 d6990624 f40e3585
> 106aa070
>
> 19a4c116 1e376c08 2748774c 34b0bcb5 391c0cb3 4ed8aa4a 5b9cca4f
> 682e6ff3
>
> 748f82ee 78a5636f 84c87814 8cc70208 90befffa a4506ceb bef9a3f7
> c67178f2
```cryptol
K32 : [64][32]
K32 = [ 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5,
0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174,
0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da,
0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967,
0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85,
0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070,
0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3,
0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2 ]
```
### 4.2.3 SHA-384, SHA-512, SHA-512/224 and SHA-512/256 Constants
SHA-384, SHA-512, SHA-512/224 and SHA-512/256 use the same sequence of
eighty constant 64-bit words, . These words represent the first
sixty-four bits of the fractional parts of the cube roots of the first
eighty prime numbers. In hex, these constant words are (from left to
right)
> 428a2f98d728ae22 7137449123ef65cd b5c0fbcfec4d3b2f e9b5dba58189dbbc
>
> 3956c25bf348b538 59f111f1b605d019 923f82a4af194f9b ab1c5ed5da6d8118
>
> d807aa98a3030242 12835b0145706fbe 243185be4ee4b28c 550c7dc3d5ffb4e2
>
> 72be5d74f27b896f 80deb1fe3b1696b1 9bdc06a725c71235 c19bf174cf692694
>
> e49b69c19ef14ad2 efbe4786384f25e3 0fc19dc68b8cd5b5 240ca1cc77ac9c65
>
> 2de92c6f592b0275 4a7484aa6ea6e483 5cb0a9dcbd41fbd4 76f988da831153b5
>
> 983e5152ee66dfab a831c66d2db43210 b00327c898fb213f bf597fc7beef0ee4
>
> c6e00bf33da88fc2 d5a79147930aa725 06ca6351e003826f 142929670a0e6e70
>
> 27b70a8546d22ffc 2e1b21385c26c926 4d2c6dfc5ac42aed 53380d139d95b3df
>
> 650a73548baf63de 766a0abb3c77b2a8 81c2c92e47edaee6 92722c851482353b
>
> a2bfe8a14cf10364 a81a664bbc423001 c24b8b70d0f89791 c76c51a30654be30
>
> d192e819d6ef5218 d69906245565a910 f40e35855771202a 106aa07032bbd1b8
>
> 19a4c116b8d2d0c8 1e376c085141ab53 2748774cdf8eeb99 34b0bcb5e19b48a8
>
> 391c0cb3c5c95a63 4ed8aa4ae3418acb 5b9cca4f7763e373 682e6ff3d6b2b8a3
>
> 748f82ee5defb2fc 78a5636f43172f60 84c87814a1f0ab72 8cc702081a6439ec
>
> 90befffa23631e28 a4506cebde82bde9 bef9a3f7b2c67915 c67178f2e372532b
>
> ca273eceea26619c d186b8c721c0c207 eada7dd6cde0eb1e f57d4f7fee6ed178
>
> 06f067aa72176fba 0a637dc5a2c898a6 113f9804bef90dae 1b710b35131c471b
>
> 28db77f523047d84 32caab7b40c72493 3c9ebe0a15c9bebc 431d67c49c100d4c
>
> 4cc5d4becb3e42b6 597f299cfc657e2a 5fcb6fab3ad6faec 6c44198c4a475817
```cryptol
K64 : [80][64]
K64 = [ 0x428a2f98d728ae22, 0x7137449123ef65cd, 0xb5c0fbcfec4d3b2f, 0xe9b5dba58189dbbc,
0x3956c25bf348b538, 0x59f111f1b605d019, 0x923f82a4af194f9b, 0xab1c5ed5da6d8118,
0xd807aa98a3030242, 0x12835b0145706fbe, 0x243185be4ee4b28c, 0x550c7dc3d5ffb4e2,
0x72be5d74f27b896f, 0x80deb1fe3b1696b1, 0x9bdc06a725c71235, 0xc19bf174cf692694,
0xe49b69c19ef14ad2, 0xefbe4786384f25e3, 0x0fc19dc68b8cd5b5, 0x240ca1cc77ac9c65,
0x2de92c6f592b0275, 0x4a7484aa6ea6e483, 0x5cb0a9dcbd41fbd4, 0x76f988da831153b5,
0x983e5152ee66dfab, 0xa831c66d2db43210, 0xb00327c898fb213f, 0xbf597fc7beef0ee4,
0xc6e00bf33da88fc2, 0xd5a79147930aa725, 0x06ca6351e003826f, 0x142929670a0e6e70,
0x27b70a8546d22ffc, 0x2e1b21385c26c926, 0x4d2c6dfc5ac42aed, 0x53380d139d95b3df,
0x650a73548baf63de, 0x766a0abb3c77b2a8, 0x81c2c92e47edaee6, 0x92722c851482353b,
0xa2bfe8a14cf10364, 0xa81a664bbc423001, 0xc24b8b70d0f89791, 0xc76c51a30654be30,
0xd192e819d6ef5218, 0xd69906245565a910, 0xf40e35855771202a, 0x106aa07032bbd1b8,
0x19a4c116b8d2d0c8, 0x1e376c085141ab53, 0x2748774cdf8eeb99, 0x34b0bcb5e19b48a8,
0x391c0cb3c5c95a63, 0x4ed8aa4ae3418acb, 0x5b9cca4f7763e373, 0x682e6ff3d6b2b8a3,
0x748f82ee5defb2fc, 0x78a5636f43172f60, 0x84c87814a1f0ab72, 0x8cc702081a6439ec,
0x90befffa23631e28, 0xa4506cebde82bde9, 0xbef9a3f7b2c67915, 0xc67178f2e372532b,
0xca273eceea26619c, 0xd186b8c721c0c207, 0xeada7dd6cde0eb1e, 0xf57d4f7fee6ed178,
0x06f067aa72176fba, 0x0a637dc5a2c898a6, 0x113f9804bef90dae, 0x1b710b35131c471b,
0x28db77f523047d84, 0x32caab7b40c72493, 0x3c9ebe0a15c9bebc, 0x431d67c49c100d4c,
0x4cc5d4becb3e42b6, 0x597f299cfc657e2a, 0x5fcb6fab3ad6faec, 0x6c44198c4a475817 ]
```
5. PREPROCESSING
================
Preprocessing consists of three steps: padding the message, *M* (Sec.
5.1), parsing the message into message blocks (Sec. 5.2), and setting
the initial hash value, *H*^(0)^ (Sec. 5.3).
5.1 Padding the Message
-----------------------
The purpose of this padding is to ensure that the padded message is a
multiple of 512 or 1024 bits, depending on the algorithm. Padding can be
inserted before hash computation begins on a message, or at any other
time during the hash computation prior to processing the block(s) that
will contain the padding.
### 5.1.1 SHA-1, SHA-224 and SHA-256
Suppose that the length of the message, *M*, is bits. Append the bit “1”
to the end of the message, followed by *k* zero bits, where *k* is the
smallest, non-negative solution to the equation . Then append the 64-bit
block that is equal to the number expressed using a binary
representation. For example, the (8-bit ASCII) message “**abc**” has
length , so the message is padded with a one bit, then zero bits, and
then the message length, to become the 512-bit padded message
423 64
01100001 01100010 01100011 1 00…00 00…011000
“**a**” “**b**” “**c**”
The length of the padded message should now be a multiple of 512 bits.
```cryptol
pad512 : {msgLen}
(fin msgLen,
64 >= width msgLen) =>
[msgLen] -> [msgLen + 65 + (512 - (msgLen + 65) % 512) % 512]
pad512 msg = msg # [True] # (zero:[padding]) # (`msgLen:[64])
where type contentLen = msgLen + 65
type padding = (512 - contentLen % 512) % 512
```
### 5.1.2 SHA-384, SHA-512, SHA-512/224 and SHA-512/256
Suppose the length of the message *M*, in bits, is bits. Append the bit
“1” to the end of the message, followed by *k* zero bits, where *k* is
the smallest non-negative solution to the equation . Then append the
128-bit block that is equal to the number expressed using a binary
representation. For example, the (8-bit ASCII) message “**abc**” has
length , so the message is padded with a one bit, then zero bits, and
then the message length, to become the 1024-bit padded message
871 128
01100001 01100010 01100011 1 00…00 00…011000
“**a**” “**b**” “**c**”
The length of the padded message should now be a multiple of 1024 bits.
```cryptol
pad1024 : {msgLen}
(fin msgLen,
128 >= width msgLen) =>
[msgLen] -> [msgLen + 129 + (1024 - (msgLen + 129) % 1024) % 1024]
pad1024 msg = msg # [True] # (zero:[padding]) # (`msgLen:[128])
where type contentLen = msgLen + 129
type padding = (1024 - contentLen % 1024) % 1024
```
5.2 Parsing the Message
-----------------------
The message and its padding must be parsed into *N* *m*-bit blocks.
### 5.2.1 SHA-1, SHA-224 and SHA-256
For SHA-1, SHA-224 and SHA-256, the message and its padding are parsed
into *N* 512-bit blocks, *M*^(1)^, *M*^(2)^,…, *M*^(*N*)^. Since the 512
bits of the input block may be expressed as sixteen 32-bit words, the
first 32 bits of message block *i* are denoted , the next 32 bits are ,
and so on up to .
```cryptol
parse512 : {blocks} [512 * blocks] -> [blocks][512]
parse512 = split
padparse512 : {msgLen,blocks}
( fin msgLen,
64 >= width msgLen,
blocks == (msgLen + 65 + 511) / 512) =>
[msgLen] -> [blocks][512]
padparse512 M = parse512 (pad512 M)
```
### 5.2.2 SHA-384, SHA-512, SHA-512/224 and SHA-512/256
For SHA-384, SHA-512, SHA-512/224 and SHA-512/256, the message and its
padding are parsed into *N* 1024-bit blocks, *M*^(1)^, *M*^(2)^,…,
*M*^(*N*)^. Since the 1024 bits of the input block may be expressed as
sixteen 64-bit words, the first 64 bits of message block *i* are denoted
, the next 64 bits are , and so on up to .
```cryptol
parse1024 : {blocks} [1024 * blocks] -> [blocks][1024]
parse1024 = split
padparse1024 : {msgLen,blocks}
( fin msgLen,
128 >= width msgLen,
blocks == (msgLen + 129 + 1023) / 1024 ) =>
[msgLen] -> [blocks][1024]
padparse1024 M = parse1024 (pad1024 M)
```
5.3 Setting the Initial Hash Value (*H*^(0)^)
---------------------------------------------
Before hash computation begins for each of the secure hash algorithms,
the initial hash value, *H*^(0)^, must be set. The size and number of
words in *H*^(0)^ depends on the message digest size.
### 5.3.1 SHA-1
For SHA-1, the initial hash value, *H*^(0)^, shall consist of the
following five 32-bit words, in hex:
= 67452301
= efcdab89
= 98badcfe
= 10325476
= c3d2e1f0
```cryptol
sha1_H0 : [5][32]
sha1_H0 = [ 0x67452301,
0xefcdab89,
0x98badcfe,
0x10325476,
0xc3d2e1f0 ]
```
### 5.3.2 SHA-224
For SHA-224, the initial hash value, *H*^(0)^, shall consist of the
following eight 32-bit words, in hex:
= c1059ed8
= 367cd507
= 3070dd17
= f70e5939
= ffc00b31
= 68581511
= 64f98fa7
= befa4fa4
```cryptol
sha224_H0 : [8][32]
sha224_H0 = [ 0xc1059ed8,
0x367cd507,
0x3070dd17,
0xf70e5939,
0xffc00b31,
0x68581511,
0x64f98fa7,
0xbefa4fa4 ]
```
### 5.3.3 SHA-256
For SHA-256, the initial hash value, *H*^(0)^, shall consist of the
following eight 32-bit words, in hex:
= 6a09e667
= bb67ae85
= 3c6ef372
= a54ff53a
= 510e527f
= 9b05688c
= 1f83d9ab
= 5be0cd19
```cryptol
sha256_H0 : [8][32]
sha256_H0 = [ 0x6a09e667,
0xbb67ae85,
0x3c6ef372,
0xa54ff53a,
0x510e527f,
0x9b05688c,
0x1f83d9ab,
0x5be0cd19 ]
```
These words were obtained by taking the first thirty-two bits of the
fractional parts of the square roots of the first eight prime numbers.
### 5.3.4 SHA-384
For SHA-384, the initial hash value, *H*^(0)^, shall consist of the
following eight 64-bit words, in hex:
= cbbb9d5dc1059ed8
= 629a292a367cd507
= 9159015a3070dd17
= 152fecd8f70e5939
= 67332667ffc00b31
= 8eb44a8768581511
= db0c2e0d64f98fa7
= 47b5481dbefa4fa4
```cryptol
sha384_H0 : [8][64]
sha384_H0 = [ 0xcbbb9d5dc1059ed8,
0x629a292a367cd507,
0x9159015a3070dd17,
0x152fecd8f70e5939,
0x67332667ffc00b31,
0x8eb44a8768581511,
0xdb0c2e0d64f98fa7,
0x47b5481dbefa4fa4 ]
```
These words were obtained by taking the first sixty-four bits of the
fractional parts of the square roots of the ninth through sixteenth
prime numbers.
### 5.3.5 SHA-512
For SHA-512, the initial hash value, *H*^(0)^, shall consist of the
following eight 64-bit words, in hex:
= 6a09e667f3bcc908
= bb67ae8584caa73b
= 3c6ef372fe94f82b
= a54ff53a5f1d36f1
= 510e527fade682d1
= 9b05688c2b3e6c1f
= 1f83d9abfb41bd6b
= 5be0cd19137e2179
```cryptol
sha512_H0 : [8][64]
sha512_H0 = [ 0x6a09e667f3bcc908,
0xbb67ae8584caa73b,
0x3c6ef372fe94f82b,
0xa54ff53a5f1d36f1,
0x510e527fade682d1,
0x9b05688c2b3e6c1f,
0x1f83d9abfb41bd6b,
0x5be0cd19137e2179 ]
```
These words were obtained by taking the first sixty-four bits of the
fractional parts of the square roots of the first eight prime numbers.
### 5.3.6 SHA-512 / *t*
"SHA-512 / *t*" is the general name for a *t*-bit hash function based on
SHA-512 whose output is truncated to *t* bits. Each hash function
requires a distinct initial hash value. This section provides a
procedure for determining the initial value for SHA-512/ *t* for a given
value of *t*.
For SHA-512 / *t*, *t* is any positive integer without a leading zero such
that *t* < 512, and *t* is not 384. For example: *t* is 256, but not
0256, and “SHA-512 / *t*” is “SHA-512/256” (an 11 character long ASCII
string), which is equivalent to 53 48 41 2D 35 31 32 2F 32 35 36 in
hexadecimal.
The initial hash value for SHA-512 / *t*, for a given value of *t*, shall
be generated by the SHA-512 / *t* IV Generation Function below.
*SHA-512/t IV Generation Function*
(begin:)
Denote *H*^(0)′^ to be the initial hash value of SHA-512 as specified in
Section 5.3.5 above.
Denote *H*^(0)′′^ to be the initial hash value computed below.
*H*^(0)^ is the IV for SHA-512 / *t*.
For *i* = 0 to 7
> {
>
> *H~i~*^(0)′′^ = *H~i~*^(0)′^ a5a5a5a5a5a5a5a5(in hex).
>
> }
*H*^(0)^ = SHA-512 (“SHA-512 / *t*”) using *H*^(0)′′^ as the IV, where *t*
is the specific truncation value.
(end.)
```cryptol
sha512t_H_internal : [8][64]
sha512t_H_internal = [ h ^ 0xa5a5a5a5a5a5a5a5 | h <- sha512_H0]
```
SHA-512/224 (*t* = 224) and SHA-512/256 (*t* = 256) are **approved**
hash algorithms. Other SHA-512 / *t* hash algorithms with different *t*
values may be specified in \[SP 800-107\] in the future as the need
arises. Below are the IVs for SHA-512/224 and SHA-512/256.
#### 5.3.6.1 SHA-512/224
For SHA-512/224, the initial hash value, *H*^(0)^, shall consist of the
following eight 64-bit words, in hex:
= 8C3D37C819544DA2
= 73E1996689DCD4D6
= 1DFAB7AE32FF9C82
= 679DD514582F9FCF
= 0F6D2B697BD44DA8
= 77E36F7304C48942
= 3F9D85A86A1D36C8
= 1112E6AD91D692A1
```cryptol
sha512_224_H0 : [8][64]
sha512_224_H0 = [ 0x8C3D37C819544DA2,
0x73E1996689DCD4D6,
0x1DFAB7AE32FF9C82,
0x679DD514582F9FCF,
0x0F6D2B697BD44DA8,
0x77E36F7304C48942,
0x3F9D85A86A1D36C8,
0x1112E6AD91D692A1 ]
property sha512_224_H0s_equiv =
sha512_224_H0 == split (sha512t "SHA-512/224" sha512t_H_internal)
```
These words were obtained by executing the *SHA-512/t IV Generation
Function* with *t* = 224.
#### 5.3.6.2 SHA-512/256
For SHA-512/256, the initial hash value, *H*^(0)^, shall consist of the
following eight 64-bit words, in hex:
= 22312194FC2BF72C
= 9F555FA3C84C64C2
= 2393B86B6F53B151
= 963877195940EABD
= 96283EE2A88EFFE3
= BE5E1E2553863992
= 2B0199FC2C85B8AA
= 0EB72DDC81C52CA2
```cryptol
sha512_256_H0 : [8][64]
sha512_256_H0 = [ 0x22312194FC2BF72C,
0x9F555FA3C84C64C2,
0x2393B86B6F53B151,
0x963877195940EABD,
0x96283EE2A88EFFE3,
0xBE5E1E2553863992,
0x2B0199FC2C85B8AA,
0x0EB72DDC81C52CA2 ]
property sha512_256_H0s_equiv =
sha512_256_H0 == split (sha512t "SHA-512/256" sha512t_H_internal)
```
These words were obtained by executing the *SHA-512/t IV Generation
Function* with *t* = 256.
6. SECURE HASH ALGORITHMS
=========================
In the following sections, the hash algorithms are not described in
ascending order of size. SHA-256 is described before SHA-224 because the
specification for SHA-224 is identical to SHA-256, except that different
initial hash values are used, and the final hash value is truncated to
224 bits for SHA-224. The same is true for SHA-512, SHA-384, SHA-512/224
and SHA-512/256, except that the final hash value is truncated to 224
bits for SHA-512/224, 256 bits for SHA-512/256 or 384 bits for SHA-384.
For each of the secure hash algorithms, there may exist alternate
computation methods that yield identical results; one example is the
alternative SHA-1 computation described in Sec. 6.1.3. Such alternate
methods may be implemented in conformance to this standard.
6.1 SHA-1
---------
SHA-1 may be used to hash a message, *M*, having a length of bits, where
. The algorithm uses 1) a message schedule of eighty 32-bit words, 2)
five working variables of 32 bits each, and 3) a hash value of five
32-bit words. The final result of SHA-1 is a 160-bit message digest.
The words of the message schedule are labeled *W*~0~, *W*~1~,…, *W*~79~.
The five working variables are labeled ***a***, ***b***, ***c***,
***d***, and ***e***. The words of the hash value are labeled , which
will hold the initial hash value, *H*^(0)^, replaced by each successive
intermediate hash value (after each message block is processed),
*H*^(*i*)^, and ending with the final hash value, *H*^(*N*)^. SHA-1 also
uses a single temporary word, *T*.
### 6.1.1 SHA-1 Preprocessing
1. Set the initial hash value, *H*^(0)^, as specified in Sec. 5.3.1.
2. The message is padded and parsed as specified in Section 5.
### 6.1.2 SHA-1 Hash Computation
The SHA-1 hash computation uses functions and constants previously
defined in Sec. 4.1.1 and Sec. 4.2.1, respectively. Addition (+) is
performed modulo 2^32^.
Each message block, *M*^(1)^, *M*^(2)^, …, *M*^(*N*)^, is processed in
order, using the following steps:
> For *i*=1 to *N*:
>
> {
1. Prepare the message schedule, {*W~t~*}:
> =
>
> *ROTL*^1^()
```cryptol
sha1_W : [16][32] -> [80][32]
sha1_W Mblock = W
where W = Mblock # [ ROTL 1 (W@(t - 3) ^ W@(t - 8) ^ W@(t - 14) ^ W@(t-16)) | t <- [16 .. 79] ]
```
1. Initialize the five working variables, ***a***, ***b***, ***c***,
> ***d***, and ***e***, with the (*i*-1)^st^ hash value:
2. For *t*=0 to 79:
> {
>
> }
```cryptol
sha1_T : [80][32] -> [8] -> [5][32] -> [32]
sha1_T W t abcde = (ROTL 5 a) + (f t b c d) + e + (sha1_K @ t) + (W @ t)
where [a, b, c, d, e] = abcde
sha1_helper : [16][32] -> [80][32] -> [8] -> [6][32] -> [6][32]
sha1_helper Mblock W t Tabcde = [ T', a', b', c', d', e' ]
where [T, a, b, c, d, e] = Tabcde
T' = sha1_T W t [a', b', c', d', e']
e' = d
d' = c
c' = ROTL 30 b
b' = a
a' = T
```
```cryptol
sha1_block : [16][32] -> [5][32] -> [5][32]
sha1_block Mblock abcde = drop`{1}(Tabcdes ! 0)
where W = sha1_W Mblock
sha1_h = sha1_helper Mblock W
Tabcde0 = [sha1_T W 0 abcde] # abcde
Tabcdes = [ Tabcde0 ] # [ sha1_h (t+1) (Tabcdes @ t) | t <- [ 0 .. 79 ] ]
```
```cryptol
// Another (cleaner) specification of sha1_block processing
sha1_block' : [16][32] -> [5][32] -> [5][32]
sha1_block' Mblock [a0, b0, c0, d0, e0] =
[as@80, bs@80, cs@80, ds@80, es@80]
where
W = sha1_W Mblock
Ts = [ sha1_T W t [a, b, c, d, e]
| a <- as | b <- bs | c <- cs | d <- ds | e <- es
| t <- [0..79]
]
es = [e0] # ds
ds = [d0] # cs
cs = [c0] # [ ROTL 30 b | b <- bs ]
bs = [b0] # as
as = [a0] # Ts
property sha1_blocks_equiv Mblock H =
sha1_block Mblock H == sha1_block' Mblock H
```
1. Compute the *i*^th^ intermediate hash value *H*^(*i*)^:
> }
```cryptol
sha1_H : [5][32] -> [5][32] -> [5][32]
sha1_H H abcde = zipWith (+) abcde H
sha1_Hblock : [5][32] -> [512] -> [5][32]
sha1_Hblock H Mblock = sha1_H H (sha1_block (split Mblock) H)
sha1_Hblock' : [5][32] -> [512] -> [5][32]
sha1_Hblock' H Mblock = sha1_H H (sha1_block' (split Mblock) H)
property sha1_Hblocks_equiv Mblock H =
sha1_Hblock Mblock H == sha1_Hblock' Mblock H
```
After repeating steps one through four a total of *N* times (i.e., after
processing *M^(N)^*), the resulting 160-bit message digest of the
message, *M*, is
```cryptol
sha1parsed : {blocks} (fin blocks) => [blocks][512] -> [160]
sha1parsed Mparsed = join (Hs ! 0)
where Hs = [sha1_H0] # [ sha1_Hblock H Mblock | H <- Hs | Mblock <- Mparsed]
sha1 : {n} (width (8*n) <= 64) => [n][8] -> [160]
sha1 M = sha1parsed (padparse512 (join M))
```
### 6.1.3 Alternate Method for Computing a SHA-1 Message Digest
The SHA-1 hash computation method described in Sec. 6.1.2 assumes that
the message schedule *W*~0~, *W*~1~,…, *W*~79~ is implemented as an
array of eighty 32-bit words. This is efficient from the standpoint of
the minimization of execution time, since the addresses of *W~t~*~-3~,…,
W~*t*-16~ in step (2) of Sec. 6.1.2 are easily computed.
However, if memory is limited, an alternative is to regard {*W~t~*} as a
circular queue that may be implemented using an array of sixteen 32-bit
words, *W*~0~, *W*~1~,…, *W*~15~. The alternate method that is described
in this section yields the same message digest as the SHA-1 computation
method described in Sec. 6.1.2. Although this alternate method saves
sixty-four 32-bit words of storage, it is likely to lengthen the
execution time due to the increased complexity of the address
computations for the {*W~t~*} in step (3).
For this alternate SHA-1 method, let *MASK*=0000000f (in hex). As in
Sec. 6.1.1, addition is performed modulo 2^32^. Assuming that the
preprocessing as described in Sec. 6.1.1 has been performed, the
processing of *M*^(*i*)^ is as follows:
> For *i*=1 to *N*:
>
> {
1. For *t*=0 to 15:
> {
>
> }
1. Initialize the five working variables, ***a***, ***b***, ***c***,
> ***d***, and ***e***, with the (*i*-1)^st^ hash value:
2. For *t*=0 to 79:
> {
>
> If then
>
> {
>
> }
>
> }
```cryptol
sha1_T_alt : [32] -> [8] -> [5][32] -> [32]
sha1_T_alt Ws t abcde = (ROTL 5 a) + (f t b c d) + e + (sha1_K @ t) + Ws
where [a, b, c, d, e] = abcde
sha1_helper_alt : [16][32] -> [8] -> [6][32] -> ([16][32], [6][32])
sha1_helper_alt W t Tabcde = (W', [ T', a', b', c', d', e' ])
where [T, a, b, c, d, e] = Tabcde
MASK = 0x0000000f
s = ((zero:[24]) # t) && MASK
Ws = if t >= 16 then
ROTL 1 (W @ ((s + 13) && MASK) ^
W @ ((s + 8) && MASK) ^
W @ ((s + 2) && MASK) ^
W @ s)
else W @ t
W' = [ if s == i then Ws else W @ i | i <- [ 0 .. 15 ] ]
T' = sha1_T_alt Ws t [a', b', c', d', e']
e' = d
d' = c
c' = ROTL 30 b
b' = a
a' = T
```
```cryptol
sha1_block_alt : [16][32] -> [5][32] -> [5][32]
sha1_block_alt Mblock abcde = drop`{1} (WTabcdes ! 0).1
where W = Mblock
Tabcde0 = [sha1_T_alt (W @ 0) 0 abcde] # abcde
WTabcdes = [ (W, Tabcde0) ] #
[ sha1_helper_alt Wt (t+1) Tabcde
where (Wt, Tabcde) = WTabcdes @ t
| t <- [ 0 .. 79 ] ]
property sha1_block_alt_equiv Mblock H =
sha1_block Mblock H == sha1_block_alt Mblock H
```
1. Compute the *i*^th^ intermediate hash value *H*^(*i*)^:
```cryptol
sha1_Hblock_alt : [5][32] -> [512] -> [5][32]
sha1_Hblock_alt H Mblock = sha1_H H (sha1_block_alt (split Mblock) H)
property sha1_Hblock_alt_equiv Mblock H =
sha1_Hblock Mblock H == sha1_Hblock_alt Mblock H
```
> }
After repeating steps one through four a total of *N* times (i.e., after
processing *M^(N)^*), the resulting 160-bit message digest of the
message, *M*, is
```cryptol
sha1parsed_alt : {blocks} (fin blocks) => [blocks][512] -> [160]
sha1parsed_alt Mparsed = join (Hs ! 0)
where Hs = [sha1_H0] # [ sha1_Hblock_alt H Mblock | H <- Hs | Mblock <- Mparsed]
sha1_alt : {n} (width (8*n) <= 64) => [n][8] -> [160]
sha1_alt M = sha1parsed_alt (padparse512 (join M))
// (Can only check, instead of prove)
property sha1_alt_equiv M = sha1 M == sha1_alt M
```
6.2 SHA-256
-----------
SHA-256 may be used to hash a message, *M*, having a length of bits,
where . The algorithm uses 1) a message schedule of sixty-four 32-bit
words, 2) eight working variables of 32 bits each, and 3) a hash value
of eight 32-bit words. The final result of SHA-256 is a 256-bit message
digest.
The words of the message schedule are labeled *W*~0~, *W*~1~,…, *W*~63~.
The eight working variables are labeled ***a***, ***b***, ***c***,
***d***, ***e***, ***f***, ***g***, and ***h***. The words of the hash
value are labeled , which will hold the initial hash value, *H*^(0)^,
replaced by each successive intermediate hash value (after each message
block is processed), *H*^(*i*)^, and ending with the final hash value,
*H*^(*N*)^. SHA-256 also uses two temporary words, *T*~1~ and *T*~2~.
### 6.2.1 SHA-256 Preprocessing
1. Set the initial hash value, *H*^(0)^, as specified in Sec. 5.3.3.
2. The message is padded and parsed as specified in Section 5.
### 6.2.2 SHA-256 Hash Computation
The SHA-256 hash computation uses functions and constants previously
defined in Sec. 4.1.2 and Sec. 4.2.2, respectively. Addition (+) is
performed modulo 2^32^.
Each message block, *M*^(1)^, *M*^(2)^, …, *M*^(*N*)^, is processed in
order, using the following steps:
> For *i*=1 to *N*:
>
> {
1. Prepare the message schedule, {*W~t~*}:
> =
```cryptol
sha256_W : [16][32] -> [64][32]
sha256_W Mblock = W
where W = Mblock # [ s_1_256 (W @ (t - 2)) +
W @ (t - 7) +
s_0_256 (W @ (t - 15)) +
W @ (t - 16)
| t <- [ 16 .. 63 ] ]
```
1. Initialize the eight working variables, ***a***, ***b***, ***c***,
> ***d***, ***e***, ***f***, ***g***, and ***h***, with the
> (*i*-1)^st^ hash value:
2. For *t*=0 to 63:
> {
>
> }
```cryptol
sha256_T1 : [64][32] -> [8] -> [32] -> [32] -> [32] -> [32] -> [32]
sha256_T1 W t e f g h = h + (S_1_256 e) + (Ch e f g) + (K32 @ t) + (W @ t)
sha256_T2 : [32] -> [32] -> [32] -> [32]
sha256_T2 a b c = (S_0_256 a) + (Maj a b c)
sha256_helper : [16][32] -> [64][32] -> [8] -> [10][32] -> [10][32]
sha256_helper Mblock W t T1T2abcdefgh = [ T1', T2', a', b', c', d', e', f', g', h' ]
where [T1, T2, a, b, c, d, e, f, g, h] = T1T2abcdefgh
T1' = sha256_T1 W t e' f' g' h'
T2' = sha256_T2 a' b' c'
h' = g
g' = f
f' = e
e' = d + T1
d' = c
c' = b
b' = a
a' = T1 + T2
```
```cryptol
sha256_block : [16][32] -> [8][32] -> [8][32]
sha256_block Mblock abcdefgh = drop`{2}(T1T2abcdefghs ! 0)
where W = sha256_W Mblock
sha256_h = sha256_helper Mblock W
T1T2abcdefgh0 = [sha256_T1 W 0 e f g h] # [sha256_T2 a b c] # abcdefgh
where [a, b, c, d, e, f, g, h] = abcdefgh
T1T2abcdefghs = [ T1T2abcdefgh0 ] #
[ sha256_h (t+1) (T1T2abcdefghs @ t)
| t <- [ 0 .. 63 ] ]
```
```cryptol
// Another (cleaner) specification of sha256_block processing
sha256_block' : [16][32] -> [8][32] -> [8][32]
sha256_block' Mblock abcdefgh =
[as@64, bs@64, cs@64, ds@64, es@64, fs@64, gs@64, hs@64]
where
[ a, b, c, d, e, f, g, h ] = abcdefgh
W = sha256_W Mblock
T1 = [ sha256_T1 W t e f g h | h <- hs | e <- es | f <- fs | g <- gs | t <- [ 0 .. 63 ] ]
T2 = [ sha256_T2 a b c | a <- as | b <- bs | c <- cs]
hs = [h] # gs
gs = [g] # fs
fs = [f] # es
es = [e] # [d + t1 | d <- ds | t1 <- T1]
ds = [d] # cs
cs = [c] # bs
bs = [b] # as
as = [a] # [t1 + t2 | t1 <- T1 | t2 <- T2]
property sha256_blocks_equiv Mblock H =
sha256_block Mblock H == sha256_block' Mblock H
```
1. Compute the *i*^th^ intermediate hash value *H*^(*i*)^:
> }
```cryptol
sha256_H : [8][32] -> [8][32] -> [8][32]
sha256_H H abcdefgh = zipWith (+) abcdefgh H
sha256_Hblock : [8][32] -> [512] -> [8][32]
sha256_Hblock H Mblock = sha256_H H (sha256_block (split Mblock) H)
sha256_Hblock' : [8][32] -> [512] -> [8][32]
sha256_Hblock' H Mblock = sha256_H H (sha256_block' (split Mblock) H)
property sha256_Hblocks_equiv Mblock H =
sha256_Hblock Mblock H == sha256_Hblock' Mblock H
```
After repeating steps one through four a total of *N* times (i.e., after
processing *M^(N)^*), the resulting 256-bit message digest of the
message, *M*, is
```cryptol
sha256parsed : {blocks} (fin blocks) => [8][32] -> [blocks][512] -> [256]
sha256parsed H0 Mparsed = join (Hs ! 0)
where Hs = [H0] # [ sha256_Hblock H Mblock | H <- Hs | Mblock <- Mparsed]
sha256 : {n} (width (8*n) <= 64) => [n][8] -> [256]
sha256 M = sha256parsed sha256_H0 (padparse512 (join M))
```
6.3 SHA-224
-----------
SHA-224 may be used to hash a message, *M,* having a length of bits,
where . The function is defined in the exact same manner as SHA-256
(Section 6.2), with the following two exceptions:
> 1\. The initial hash value, *H*^(0)^, shall be set as specified in Sec.
> 5.3.2; and
2\. The 224-bit message digest is obtained by truncating the final hash
value, *H*(*N)*, to its left-most 224 bits:
```cryptol
sha224 : {n} (width (8*n) <= 64) => [n][8] -> [224]
sha224 M = take`{224} (sha256parsed sha224_H0 (padparse512 (join M)))
```
6.4 SHA-512
-----------
SHA-512 may be used to hash a message, *M*, having a length of bits,
where . The algorithm uses 1) a message schedule of eighty 64-bit words,
2) eight working variables of 64 bits each, and 3) a hash value of eight
64-bit words. The final result of SHA-512 is a 512-bit message digest.
The words of the message schedule are labeled *W*~0~, *W*~1~,…, *W*~79~.
The eight working variables are labeled ***a***, ***b***, ***c***,
***d***, ***e***, ***f***, ***g***, and ***h***. The words of the hash
value are labeled , which will hold the initial hash value, *H*^(0)^,
replaced by each successive intermediate hash value (after each message
block is processed), *H*^(*i*)^, and ending with the final hash value,
*H*^(*N*)^. SHA-512 also uses two temporary words, *T*~1~ and *T*~2~.
### 6.4.1 SHA-512 Preprocessing
1. Set the initial hash value, *H*^(0)^, as specified in Sec. 5.3.5.
2. The message is padded and parsed as specified in Section 5.
### 6.4.2 SHA-512 Hash Computation
The SHA-512 hash computation uses functions and constants previously
defined in Sec. 4.1.3 and Sec. 4.2.3, respectively. Addition (+) is
performed modulo 2^64^.
Each message block, *M*^(1)^, *M*^(2)^, …, *M*^(*N*)^, is processed in
order, using the following steps:
> For *i*=1 to *N*:
>
> {
1. Prepare the message schedule, {*W~t~*}:
> =
```cryptol
sha512_W : [16][64] -> [80][64]
sha512_W Mblock = W
where W = Mblock # [ s_1_512 (W @ (t - 2)) +
W @ (t - 7) +
s_0_512 (W @ (t - 15)) +
W @ (t - 16)
| t <- [ 16 .. 79 ] ]
```
1. Initialize the eight working variables, ***a***, ***b***, ***c***,
> ***d***, ***e***, ***f***, ***g***, and ***h***, with the
> (*i*-1)^st^ hash value:
2. For *t*=0 to 79:
> {
>
> }
```cryptol
sha512_T1 : [80][64] -> [8] -> [64] -> [64] -> [64] -> [64] -> [64]
sha512_T1 W t e f g h = h + (S_1_512 e) + (Ch e f g) + (K64 @ t) + (W @ t)
sha512_T2 : [64] -> [64] -> [64] -> [64]
sha512_T2 a b c = (S_0_512 a) + (Maj a b c)
sha512_helper : [16][64] -> [80][64] -> [8] -> [10][64] -> [10][64]
sha512_helper Mblock W t T1T2abcdefgh = [ T1', T2', a', b', c', d', e', f', g', h' ]
where [T1, T2, a, b, c, d, e, f, g, h] = T1T2abcdefgh
T1' = sha512_T1 W t e' f' g' h'
T2' = sha512_T2 a' b' c'
h' = g
g' = f
f' = e
e' = d + T1
d' = c
c' = b
b' = a
a' = T1 + T2
```
```cryptol
sha512_block : [16][64] -> [8][64] -> [8][64]
sha512_block Mblock abcdefgh = drop`{2}(T1T2abcdefghs ! 0)
where W = sha512_W Mblock
sha512_h = sha512_helper Mblock W
T1T2abcdefgh0 = [sha512_T1 W 0 e f g h] # [sha512_T2 a b c] # abcdefgh
where [a, b, c, d, e, f, g, h] = abcdefgh
T1T2abcdefghs = [ T1T2abcdefgh0 ] #
[ sha512_h (t+1) (T1T2abcdefghs @ t)
| t <- [ 0 .. 79 ] ]
```
```cryptol
// Another (cleaner) specification of sha256_block processing
sha512_block' : [16][64] -> [8][64] -> [8][64]
sha512_block' Mblock abcdefgh =
[as@80, bs@80, cs@80, ds@80, es@80, fs@80, gs@80, hs@80]
where
[ a, b, c, d, e, f, g, h ] = abcdefgh
W = sha512_W Mblock
T1 = [ sha512_T1 W t e f g h | h <- hs | e <- es | f <- fs | g <- gs | t <- [ 0 .. 79 ] ]
T2 = [ sha512_T2 a b c | a <- as | b <- bs | c <- cs]
hs = [h] # gs
gs = [g] # fs
fs = [f] # es
es = [e] # [d + t1 | d <- ds | t1 <- T1]
ds = [d] # cs
cs = [c] # bs
bs = [b] # as
as = [a] # [t1 + t2 | t1 <- T1 | t2 <- T2]
property sha512_blocks_equiv Mblock H =
sha512_block Mblock H == sha512_block' Mblock H
```
1. Compute the *i*^th^ intermediate hash value *H*^(*i*)^:
> }
```cryptol
sha512_H : [8][64] -> [8][64] -> [8][64]
sha512_H H abcdefgh = zipWith (+) abcdefgh H
sha512_Hblock : [8][64] -> [1024] -> [8][64]
sha512_Hblock H Mblock = sha512_H H (sha512_block (split Mblock) H)
sha512_Hblock' : [8][64] -> [1024] -> [8][64]
sha512_Hblock' H Mblock = sha512_H H (sha512_block' (split Mblock) H)
property sha512_Hblocks_equiv Mblock H =
sha512_Hblock Mblock H == sha512_Hblock' Mblock H
```
After repeating steps one through four a total of *N* times (i.e., after
processing *M^(N)^*), the resulting 512-bit message digest of the
message, *M*, is
```cryptol
sha512parsed : {blocks} (fin blocks) => [8][64] -> [blocks][1024] -> [512]
sha512parsed H0 Mparsed = join (Hs ! 0)
where Hs = [H0] # [ sha512_Hblock H Mblock | H <- Hs | Mblock <- Mparsed]
sha512t : {n} (width (8*n) <= 64) => [n][8] -> [8][64] -> [512]
sha512t M IV = sha512parsed IV (padparse1024 (join M))
sha512 : {n} (width (8*n) <= 64) => [n][8] -> [512]
sha512 M = sha512t M sha512_H0
```
6.5 SHA-384
-----------
SHA-384 may be used to hash a message, *M*, having a length of bits,
where. The algorithm is defined in the exact same manner as SHA-512
(Sec. 6.4), with the following two exceptions:
1. The initial hash value, *H*^(0)^, shall be set as specified in Sec.
> 5.3.4; and
2. The 384-bit message digest is obtained by truncating the final hash
> value, *H*^(*N*)^, to its left-most 384 bits:
```cryptol
sha384 : {n} (width (8*n) <= 64) => [n][8] -> [384]
sha384 M = take`{384} (sha512parsed sha384_H0 (padparse1024 (join M)))
```
6.6 SHA-512/224
---------------
SHA-512/224 may be used to hash a message, *M*, having a length of bits,
where. The algorithm is defined in the exact same manner as SHA-512
(Sec. 6.4), with the following two exceptions:
1. The initial hash value, *H*^(0)^, shall be set as specified in Sec.
5.3.6.1; and
2. The 224-bit message digest is obtained by truncating the final hash
> value, *H*^(*N*)^, to its left-most 224 bits.
```cryptol
sha512_224 : {n} (width (8*n) <= 64) => [n][8] -> [224]
sha512_224 M = take`{224} (sha512parsed sha512_224_H0 (padparse1024 (join M)))
```
6.7 SHA-512/256
---------------
SHA-512/256 may be used to hash a message, *M*, having a length of bits,
where. The algorithm is defined in the exact same manner as SHA-512
(Sec. 6.4), with the following two exceptions:
1. The initial hash value, *H*^(0)^, shall be set as specified in Sec.
5.3.6.2; and
2. The 256-bit message digest is obtained by truncating the final hash
value, *H*^(*N*)^, to its left-most 256 bits.
```cryptol
sha512_256 : {n} (width (8*n) <= 64) => [n][8] -> [256]
sha512_256 M = take`{256} (sha512parsed sha512_256_H0 (padparse1024 (join M)))
```
7. TRUNCATION OF A MESSAGE DIGEST
=================================
Some application may require a hash function with a message digest
length different than those provided by the hash functions in this
Standard. In such cases, a truncated message digest may be used, whereby
a hash function with a larger message digest length is applied to the
data to be hashed, and the resulting message digest is truncated by
selecting an appropriate number of the leftmost bits. For guidelines on
choosing the length of the truncated message digest and information
about its security implications for the cryptographic application that
uses it, see SP 800-107 \[SP 800-107\].
**\
**APPENDIX A: Additional Information
====================================
A.1 Security of the Secure Hash Algorithms
------------------------------------------
The security of the five hash algorithms, SHA-1, SHA-224, SHA-256,
SHA-384, SHA-512, SHA-512/224 and SHA-512/256 is discussed in \[SP
800-107\].
A.2 Implementation Notes
-------------------------
Examples of SHA-1, SHA-224, SHA-256, SHA-384, SHA-512, SHA-512/224 and
SHA-512/256 are available at
<http://csrc.nist.gov/groups/ST/toolkit/examples.html>.
A.3 Object Identifiers
----------------------
Object identifiers (OIDs) for the SHA-1, SHA-224, SHA-256, SHA-384,
SHA-512, SHA-512/224 and SHA-512/256 algorithms are posted at
<http://csrc.nist.gov/groups/ST/crypto_apps_infra/csor/algorithms.html>.
\
APPENDIX B: REFERENCES
======================
\[FIPS 180-3\] NIST, Federal Information Processing Standards
Publication 180-3, *Secure Hash Standards (SHS)*, October 2008.
\[SP 800-57\] NIST Special Publication (SP) 800-57, Part 1,
*Recommendation for Key Management: General*, (Draft) May 2011.
\[SP 800-107\] NIST Special Publication (SP) 800-107, *Recommendation
for Applications Using Approved Hash Algorithms*, (Revised), (Draft)
September 2011.
APPENDIX C: Technical Changes from FIPS 180-3
=============================================
1. In FIPS 180-3, padding was inserted before hash computation begins.
FIPS 140-4 removed this restriction. Padding can be inserted before
hash computation begins or at any other time during the hash
computation prior to processing the message block(s) containing
the padding.
2. FIPS 180-4 adds two additional algorithms: SHA-512/224 and
SHA-512/256 to the Standard and the method for determining the
initial value for SHA-512 / *t* for a given value of *t*.
\
<span id="_Toc261698857" class="anchor"><span id="_Toc261699161" class="anchor"></span></span>ERRATUM
=====================================================================================================
The following change has been incorporated into FIPS 180-4, as of the
date indicated in the table.
**DATE** **TYPE** **CHANGE** **PAGE NUMBER**
---------- ----------- ---------------------------------- --------------------------------
5/9/2014 Editorial Change “*t* < 79” to “*t* 79” Page 10, Section 4.1.1, Line 1
```cryptol
gen_sha1_Hblock : [5][32] -> [64][8] -> [5][32]
gen_sha1_Hblock H Mblock = sha1_Hblock H (join Mblock)
gen_sha1_Hblock' : [5][32] -> [64][8] -> [5][32]
gen_sha1_Hblock' H Mblock = sha1_Hblock' H (join Mblock)
gen_sha1_Hblock_alt : [5][32] -> [64][8] -> [5][32]
gen_sha1_Hblock_alt H Mblock = sha1_Hblock_alt H (join Mblock)
gen_sha256_Hblock : [8][32] -> [64][8] -> [8][32]
gen_sha256_Hblock H Mblock = sha256_Hblock H (join Mblock)
gen_sha256_Hblock' : [8][32] -> [64][8] -> [8][32]
gen_sha256_Hblock' H Mblock = sha256_Hblock' H (join Mblock)
gen_sha512_Hblock : [8][64] -> [128][8] -> [8][64]
gen_sha512_Hblock H Mblock = sha512_Hblock H (join Mblock)
gen_sha512_Hblock' : [8][64] -> [128][8] -> [8][64]
gen_sha512_Hblock' H Mblock = sha512_Hblock' H (join Mblock)
```<file_sep>/specs/aes.py
#!/usr/bin/python3
from lib.speclib import *
blocksize: int = 16
block_t = bytes_t(16)
subblock_t = refine(vlbytes_t, lambda x: array.length(x) <= blocksize)
rowindex_t = range_t(0,4)
expindex_t = range_t(0,44)
word_t = bytes_t(4)
key_t = bytes_t(16)
nonce_t = bytes_t(12)
bytes_144_t = bytes_t(144)
bytes_176_t = bytes_t(176)
index_t = range_t(0,16)
rotval_t = range_t(1,32)
state_t = array_t(uint32_t,16)
sbox_t = array_t(uint8_t,256)
sbox : sbox_t = array([
uint8(0x63), uint8(0x7C), uint8(0x77), uint8(0x7B), uint8(0xF2), uint8(0x6B), uint8(0x6F), uint8(0xC5), uint8(0x30), uint8(0x01), uint8(0x67), uint8(0x2B), uint8(0xFE), uint8(0xD7), uint8(0xAB), uint8(0x76),
uint8(0xCA), uint8(0x82), uint8(0xC9), uint8(0x7D), uint8(0xFA), uint8(0x59), uint8(0x47), uint8(0xF0), uint8(0xAD), uint8(0xD4), uint8(0xA2), uint8(0xAF), uint8(0x9C), uint8(0xA4), uint8(0x72), uint8(0xC0),
uint8(0xB7), uint8(0xFD), uint8(0x93), uint8(0x26), uint8(0x36), uint8(0x3F), uint8(0xF7), uint8(0xCC), uint8(0x34), uint8(0xA5), uint8(0xE5), uint8(0xF1), uint8(0x71), uint8(0xD8), uint8(0x31), uint8(0x15),
uint8(0x04), uint8(0xC7), uint8(0x23), uint8(0xC3), uint8(0x18), uint8(0x96), uint8(0x05), uint8(0x9A), uint8(0x07), uint8(0x12), uint8(0x80), uint8(0xE2), uint8(0xEB), uint8(0x27), uint8(0xB2), uint8(0x75),
uint8(0x09), uint8(0x83), uint8(0x2C), uint8(0x1A), uint8(0x1B), uint8(0x6E), uint8(0x5A), uint8(0xA0), uint8(0x52), uint8(0x3B), uint8(0xD6), uint8(0xB3), uint8(0x29), uint8(0xE3), uint8(0x2F), uint8(0x84),
uint8(0x53), uint8(0xD1), uint8(0x00), uint8(0xED), uint8(0x20), uint8(0xFC), uint8(0xB1), uint8(0x5B), uint8(0x6A), uint8(0xCB), uint8(0xBE), uint8(0x39), uint8(0x4A), uint8(0x4C), uint8(0x58), uint8(0xCF),
uint8(0xD0), uint8(0xEF), uint8(0xAA), uint8(0xFB), uint8(0x43), uint8(0x4D), uint8(0x33), uint8(0x85), uint8(0x45), uint8(0xF9), uint8(0x02), uint8(0x7F), uint8(0x50), uint8(0x3C), uint8(0x9F), uint8(0xA8),
uint8(0x51), uint8(0xA3), uint8(0x40), uint8(0x8F), uint8(0x92), uint8(0x9D), uint8(0x38), uint8(0xF5), uint8(0xBC), uint8(0xB6), uint8(0xDA), uint8(0x21), uint8(0x10), uint8(0xFF), uint8(0xF3), uint8(0xD2),
uint8(0xCD), uint8(0x0C), uint8(0x13), uint8(0xEC), uint8(0x5F), uint8(0x97), uint8(0x44), uint8(0x17), uint8(0xC4), uint8(0xA7), uint8(0x7E), uint8(0x3D), uint8(0x64), uint8(0x5D), uint8(0x19), uint8(0x73),
uint8(0x60), uint8(0x81), uint8(0x4F), uint8(0xDC), uint8(0x22), uint8(0x2A), uint8(0x90), uint8(0x88), uint8(0x46), uint8(0xEE), uint8(0xB8), uint8(0x14), uint8(0xDE), uint8(0x5E), uint8(0x0B), uint8(0xDB),
uint8(0xE0), uint8(0x32), uint8(0x3A), uint8(0x0A), uint8(0x49), uint8(0x06), uint8(0x24), uint8(0x5C), uint8(0xC2), uint8(0xD3), uint8(0xAC), uint8(0x62), uint8(0x91), uint8(0x95), uint8(0xE4), uint8(0x79),
uint8(0xE7), uint8(0xC8), uint8(0x37), uint8(0x6D), uint8(0x8D), uint8(0xD5), uint8(0x4E), uint8(0xA9), uint8(0x6C), uint8(0x56), uint8(0xF4), uint8(0xEA), uint8(0x65), uint8(0x7A), uint8(0xAE), uint8(0x08),
uint8(0xBA), uint8(0x78), uint8(0x25), uint8(0x2E), uint8(0x1C), uint8(0xA6), uint8(0xB4), uint8(0xC6), uint8(0xE8), uint8(0xDD), uint8(0x74), uint8(0x1F), uint8(0x4B), uint8(0xBD), uint8(0x8B), uint8(0x8A),
uint8(0x70), uint8(0x3E), uint8(0xB5), uint8(0x66), uint8(0x48), uint8(0x03), uint8(0xF6), uint8(0x0E), uint8(0x61), uint8(0x35), uint8(0x57), uint8(0xB9), uint8(0x86), uint8(0xC1), uint8(0x1D), uint8(0x9E),
uint8(0xE1), uint8(0xF8), uint8(0x98), uint8(0x11), uint8(0x69), uint8(0xD9), uint8(0x8E), uint8(0x94), uint8(0x9B), uint8(0x1E), uint8(0x87), uint8(0xE9), uint8(0xCE), uint8(0x55), uint8(0x28), uint8(0xDF),
uint8(0x8C), uint8(0xA1), uint8(0x89), uint8(0x0D), uint8(0xBF), uint8(0xE6), uint8(0x42), uint8(0x68), uint8(0x41), uint8(0x99), uint8(0x2D), uint8(0x0F), uint8(0xB0), uint8(0x54), uint8(0xBB), uint8(0x16)
])
@typechecked
def subBytes(state:block_t) -> block_t:
st : block_t = bytes(array.copy(state))
for i in range(16):
st[i] = sbox[uintn.to_int(state[i])]
return st
@typechecked
def shiftRow(i:rowindex_t,shift:rowindex_t,state:block_t) -> block_t:
out : block_t = bytes(array.copy(state))
out[i] = state[i + (4 * (shift % 4))]
out[i+4] = state[i + (4 * ((shift + 1) % 4))]
out[i+8] = state[i + (4 * ((shift + 2) % 4))]
out[i+12] = state[i + (4 * ((shift + 3) % 4))]
return out
@typechecked
def shiftRows(state:block_t) -> block_t:
state : block_t = shiftRow(1,1,state)
state = shiftRow(2,2,state)
state = shiftRow(3,3,state)
return state
@typechecked
def xtime(x:uint8_t) -> uint8_t:
x1 : uint8_t = x << 1
x7 : uint8_t = x >> 7
x71 : uint8_t = x7 & uint8(1)
x711b : uint8_t = x71 * uint8(0x1b)
return x1 ^ x711b
@typechecked
def mixColumn(c:rowindex_t,state:block_t) -> block_t:
i0 : nat_t = 4 * c
s0 : uint8_t = state[i0]
s1 : uint8_t = state[i0+1]
s2 : uint8_t = state[i0+2]
s3 : uint8_t = state[i0+3]
st : block_t = bytes(array.copy(state))
tmp: uint8_t = s0 ^ s1 ^ s2 ^ s3
st[i0] = s0 ^ tmp ^ (xtime (s0 ^ s1))
st[i0+1] = s1 ^ tmp ^ (xtime (s1 ^ s2))
st[i0+2] = s2 ^ tmp ^ (xtime (s2 ^ s3))
st[i0+3] = s3 ^ tmp ^ (xtime (s3 ^ s0))
return st
@typechecked
def mixColumns(state:block_t) -> block_t:
state : block_t = mixColumn(0,state)
state = mixColumn(1,state)
state = mixColumn(2,state)
state = mixColumn(3,state)
return state
@typechecked
def addRoundKey(state:block_t,key:block_t) -> block_t:
out : block_t = bytes(array.copy(state))
for i in range(16):
out[i] ^= key[i]
return out
@typechecked
def aes_enc(state:block_t,round_key:block_t) -> block_t:
state : block_t = subBytes(state)
state = shiftRows(state)
state = mixColumns(state)
state = addRoundKey(state,round_key)
return state
@typechecked
def aes_enc_last(state:block_t,round_key:block_t) -> block_t:
state : block_t = subBytes(state)
state = shiftRows(state)
state = addRoundKey(state,round_key)
return state
@typechecked
def rounds(state:block_t,key:bytes_144_t) -> block_t:
out : block_t = bytes(array.copy(state))
for i in range(9):
out = aes_enc(out,key[16*i:16*i+16])
return out
@typechecked
def block_cipher(input:block_t,key:bytes_176_t) -> block_t:
state : block_t = bytes(array.copy(input))
k0 : block_t = key[0:16]
k : bytes_144_t = key[16:10*16]
kn : block_t = key[10*16:11*16]
state = addRoundKey(state,k0)
state = rounds(state,k)
state = aes_enc_last(state,kn)
return state
@typechecked
def rotate_word(w:word_t) -> word_t:
out : word_t = bytes(array.copy(w))
out[0] = w[1]
out[1] = w[2]
out[2] = w[3]
out[3] = w[0]
return out
@typechecked
def sub_word(w:word_t) -> word_t:
out : word_t = bytes(array.copy(w))
out[0] = sbox[uintn.to_int(w[0])]
out[1] = sbox[uintn.to_int(w[1])]
out[2] = sbox[uintn.to_int(w[2])]
out[3] = sbox[uintn.to_int(w[3])]
return out
rcon_t = bytes_t(11)
rcon : rcon_t = array([uint8(0x8d), uint8(0x01), uint8(0x02), uint8(0x04), uint8(0x08), uint8(0x10), uint8(0x20), uint8(0x40), uint8(0x80), uint8(0x1b), uint8(0x36)])
@typechecked
def aes_keygen_assist(w:word_t,rcon:uint8_t) -> word_t:
k : word_t = rotate_word(w)
k = sub_word(k)
k[0] ^= rcon
return k
@typechecked
def key_expansion_word(w0:word_t, w1:word_t, i:expindex_t) -> word_t:
k : word_t = bytes(array.copy(w1))
if i % 4 == 0:
k = aes_keygen_assist(k,rcon[i//4])
for i in range(4):
k[i] ^= w0[i]
return k
@typechecked
def key_expansion(key:block_t) -> bytes_t(176):
key_ex : bytes_176_t = bytes(array.create(11*16,uint8(0)))
key_ex[0:16] = key
i : nat_t = 0
for j in range(40):
i = j + 4
key_ex[4*i:4*i+4] = key_expansion_word(key_ex[4*i-16:4*i-12],key_ex[4*i-4:4*i],i)
return key_ex
@typechecked
def aes128_encrypt_block(k:key_t,input:bytes_t(16)) -> block_t:
key_ex : bytes_176_t = key_expansion(k)
out : block_t = block_cipher(input,key_ex)
return out
@typechecked
def aes128_ctr_keyblock(k:key_t,n:nonce_t,c:uint32_t) -> block_t:
input : block_t = bytes(array.create(16,uint8(0)))
input[0:12] = n
input[12:16] = bytes.from_uint32_be(c)
return aes128_encrypt_block(k,input)
# Many ways of extending this to CTR
# This version: use first-order CTR function specific to AES128 with a loop
@typechecked
def xor_block(block:block_t, keyblock:block_t) -> block_t:
out : block_t = bytes.copy(block)
for i in range(blocksize):
out[i] ^= keyblock[i]
return out
@typechecked
def aes128_counter_mode(key: key_t, nonce: nonce_t, counter: uint32_t, msg:vlbytes_t) -> vlbytes_t:
blocks : vlarray_t(block_t)
last : subblock_t
blocks, last = array.split_blocks(msg, blocksize)
keyblock : block_t = array.create(blocksize, uint8(0))
last_block : block_t = array.create(blocksize, uint8(0))
ctr : uint32_t = counter
for i in range(array.length(blocks)):
keyblock = aes128_ctr_keyblock(key, nonce, ctr)
blocks[i] = xor_block(blocks[i], keyblock)
ctr += uint32(1)
keyblock = aes128_ctr_keyblock(key, nonce, ctr)
last_block[0:array.length(last)] = last
last_block = xor_block(last_block, keyblock)
last = last_block[0:array.length(last)]
return array.concat_blocks(blocks, last)
@typechecked
def aes128_encrypt(key: key_t, nonce: nonce_t, counter: uint32_t, msg:vlbytes_t) -> vlbytes_t:
return aes128_counter_mode(key,nonce,counter,msg)
@typechecked
def aes128_decrypt(key: key_t, nonce: nonce_t, counter: uint32_t, msg:vlbytes_t) -> vlbytes_t:
return aes128_counter_mode(key,nonce,counter,msg)
<file_sep>/tests/gf128_test.py
from specs.gf128 import *
from sys import exit
import json
def main(x: int) -> None:
file = open('tests/test_vectors/gf128_test_vectors.json')
gf128_test_vectors = json.load(file)
for i in range(len(gf128_test_vectors)):
msg = bytes.from_hex(gf128_test_vectors[i]['input'])
k = bytes.from_hex(gf128_test_vectors[i]['key'])
expected = bytes.from_hex(gf128_test_vectors[i]['output'])
computed = gmac(msg,k)
if (computed == expected):
print("GF128 Test ",i," passed.")
else:
print("GF128 Test ",i," failed:")
print("expected mac:",expected)
print("computed mac:",computed)
exit(1)
if __name__ == "__main__":
main(0)
<file_sep>/tests/testlib.py
import sys
import threading
t = None
def print_dot():
global t
print(".", end="", file=sys.stderr)
sys.stderr.flush()
if t:
t.cancel()
t = threading.Timer(1, print_dot)
t.daemon = True
t.start()
def exit(r):
t.cancel()
sys.exit(r)
<file_sep>/tests/sha3_test.py
from lib.speclib import *
from specs.sha3 import shake128, shake256, sha3_224, sha3_256, sha3_384, sha3_512
from sys import exit
import json
def sha3_test():
file = open('tests/test_vectors/sha3_test_vectors.json')
sha3_test_vectors = json.load(file)
for i in range(len(sha3_test_vectors)):
msg = bytes.from_hex(sha3_test_vectors[i]['msg'])
expected224 = sha3_test_vectors[i]['expected224']
expected256 = sha3_test_vectors[i]['expected256']
expected384 = sha3_test_vectors[i]['expected384']
expected512 = sha3_test_vectors[i]['expected512']
d224 = bytes.to_hex(sha3_224(array.length(msg), msg))
d256 = bytes.to_hex(sha3_256(array.length(msg), msg))
d384 = bytes.to_hex(sha3_384(array.length(msg), msg))
d512 = bytes.to_hex(sha3_512(array.length(msg), msg))
if (expected224 == d224 and expected256 == d256 and expected384 == d384 and expected512 == d512):
print("SHA-3 (224/256/384/512) Test " + str(i) + " successful!")
else:
print("SHA3 Test failed!")
print("Computed: "+d224)
print("Expected: "+expected224)
print("Computed: "+d256)
print("Expected: "+expected256)
print("Computed: "+d384)
print("Expected: "+expected384)
print("Computed: "+d512)
print("Expected: "+expected512)
exit(1)
def shake128_test():
file = open('tests/test_vectors/shake128_test_vectors.json')
shake128_test_vectors = json.load(file)
for i in range(len(shake128_test_vectors)):
msg = bytes.from_hex(shake128_test_vectors[i]['msg'])
output = shake128_test_vectors[i]['output']
res = bytes.to_hex(shake128(array.length(msg), msg, 16))
if (output == res):
print("SHAKE128 Test " + str(i) + " successful!")
else:
print("SHAKE128 Test failed!")
print("Computed: "+res)
print("Expected: "+output)
exit(1)
def shake256_test():
file = open('tests/test_vectors/shake256_test_vectors.json')
shake256_test_vectors = json.load(file)
for i in range(len(shake256_test_vectors)):
msg = bytes.from_hex(shake256_test_vectors[i]['msg'])
output = shake256_test_vectors[i]['output']
res = bytes.to_hex(shake256(array.length(msg), msg, 32))
if (output == res):
print("SHAKE256 Test " + str(i) + " successful!")
else:
print("SHAKE256 Test failed!")
print("Computed: "+res)
print("Expected: "+output)
exit(1)
def main():
sha3_test()
shake128_test()
shake256_test()
if __name__ == '__main__':
main()
<file_sep>/compiler/fstar-compiler/specs/Makefile
FSTAR_HOME?=../../../../FStar
HACL_HOME?=../../../../hacl-star
FSTAR=$(FSTAR_HOME)/bin/fstar.exe --include $(HACL_HOME)/lib --include $(HACL_HOME)/lib/fst --expose_interfaces
SPECS=aes.fst gf128.fst chacha20.fst poly1305.fst curve25519.fst curve448.fst
BROKEN_SPECS=blake2.fst
LIBS = Lib.IntTypes.fst Lib.RawIntTypes.fst Lib.Sequence.fst Lib.ByteSequence.fst
LIBS_FILES= $(addprefix $(HACL_HOME)/lib/fst/, $(LIBS)) speclib.fst
LIBS_ML = Lib_IntTypes.ml Lib_RawIntTypes.ml Lib_Sequence.ml Lib_ByteSequence.ml Speclib.ml
LIBS_EX = --extract_module Speclib --extract_module Lib.IntTypes --extract_module Lib.RawIntTypes --extract_module Lib.Sequence --extract_module Lib.ByteSequence
include $(FSTAR_HOME)/ulib/ml/Makefile.include
SHELL:=/bin/bash
all: $(SPECS)
tests: $(SPECS:.fst=.exe)
# Not all specs pass right now.
check: chacha20.fst.checked poly1305.fst.checked gf128.fst.checked
.SECONDARY: %-ml
.PRECIOUS: %-ml
%.fst: ../../to_fstar.native ../../../specs/%.py speclib.fst
../../to_fstar.native ../../../specs/$*.py > $*_pre.fst
$(FSTAR) --indent $*_pre.fst > $*.fst
rm $*_pre.fst
%.fst.lax: %.fst
$(FSTAR) --lax $*.fst
%-ml: $(LIBS_FILES) %.fst
mkdir -p $*-ml
$(FSTAR) --lax --codegen OCaml $(LIBS_EX) --extract_module $* --odir $*-ml $(LIBS_FILES) $^
touch $*-ml
%.exe: CAP = $(shell echo "$*" | sed 's/.*/\u&/')
%.exe: $(LIBS_FILES) %-ml tests/%_test.ml
cp tests/testutil.ml $*-ml/
cp tests/$*_test.ml $*-ml/
$(OCAMLOPT) -w -8 -w -20 -g -I $*-ml $(addprefix $*-ml/, $(LIBS_ML)) $*-ml/$(CAP).ml $*-ml/testutil.ml $*-ml/$*_test.ml -o $*.exe
./$*.exe
%.fst.checked: %.fst
$(FSTAR) $*.fst
clean:
rm -rf $(SPECS) *_pre.fst *~ *-ml
<file_sep>/specs/aead_aes128gcm.py
#!/usr/bin/python3
from lib.speclib import *
from specs.aes import aes128_ctr_keyblock, aes128_encrypt, aes128_decrypt, xor_block
from specs.gf128 import gmac
key_t = bytes_t(16)
nonce_t = bytes_t(12)
tag_t = bytes_t(16)
block_t = bytes_t(16)
@typechecked
def padded_aad_msg(aad:vlbytes_t,msg:vlbytes_t) -> tuple_t(int,vlbytes_t):
laad : int = array.length(aad)
lmsg : int = array.length(msg)
pad_aad : int = laad
if laad % 16 > 0:
pad_aad = laad + (16 - (laad % 16))
pad_msg : int = lmsg
if lmsg % 16 > 0:
pad_msg = lmsg + (16 - (lmsg % 16))
to_mac : vlbytes_t = bytes(array.create(pad_aad + pad_msg + 16,uint8(0)))
to_mac[0:laad] = aad
to_mac[pad_aad:pad_aad+lmsg] = msg
to_mac[pad_aad+pad_msg:pad_aad+pad_msg+8] = bytes.from_uint64_be(uint64(laad * 8))
to_mac[pad_aad+pad_msg+8:pad_aad+pad_msg+16] = bytes.from_uint64_be(uint64(lmsg * 8))
return pad_aad+pad_msg+16, to_mac
@typechecked
def aead_aes128gcm_encrypt(key:key_t,nonce:nonce_t,aad:vlbytes_t,msg:vlbytes_t) -> tuple_t(vlbytes_t,tag_t):
nonce0 : vlbytes_t = bytes(array.create(12,uint8(0)))
mac_key : block_t = aes128_ctr_keyblock(key,nonce0,uint32(0))
tag_mix : block_t = aes128_ctr_keyblock(key,nonce,uint32(1))
ciphertext : vlbytes_t = aes128_encrypt(key,nonce,uint32(2),msg)
to_mac : vlbytes_t
l: int # TODO: unused variable but _ is not supported by the compiler yet.
l, to_mac = padded_aad_msg(aad,ciphertext)
mac : tag_t = gmac(to_mac,mac_key)
mac = xor_block(mac,tag_mix)
return ciphertext, mac
@typechecked
def aead_aes128gcm_decrypt(key:key_t,nonce:nonce_t,aad:vlbytes_t,
ciphertext:vlbytes_t,tag:tag_t) -> vlbytes_t:
nonce0 : vlbytes_t = bytes(array.create(12,uint8(0)))
mac_key : block_t = aes128_ctr_keyblock(key,nonce0,uint32(0))
tag_mix : block_t = aes128_ctr_keyblock(key,nonce,uint32(1))
to_mac : vlbytes_t
l: int # TODO: unused variable but _ is not supported by the compiler yet.
l, to_mac = padded_aad_msg(aad,ciphertext)
mac : tag_t = gmac(to_mac,mac_key)
mac = xor_block(mac,tag_mix)
if mac == tag:
msg : vlbytes_t = aes128_decrypt(key,nonce,uint32(2),ciphertext)
return msg
else:
fail("mac failed")
<file_sep>/specs/rsapss.py
#!/usr/bin/python3
from lib.speclib import *
from specs.sha2 import sha256
max_size_t : int = 2 ** 32 - 1
size_nat_t,size_nat = refine(nat_t, lambda x: x <= max_size_t)
max_input_len_sha256 : nat_t = nat(2 ** 61)
hLen : nat_t = nat(32)
@contract3(lambda x, m: x > 0 and m > 0,
lambda x, m, res: res > 0 and x <= m * res)
@typechecked
def blocks(x: size_nat_t, m: size_nat_t) -> size_nat_t:
return size_nat(nat((x - 1) // m + 1))
@typechecked
def xor_bytes(b1: vlbytes_t, b2: vlbytes_t) -> vlbytes_t:
res = bytes.copy(b1)
for i in range(array.length(b1)):
res[i] ^= b2[i]
return res
@typechecked
def eq_bytes(b1: vlbytes_t, b2: vlbytes_t) -> bool:
return (b1 == b2)
@contract3(lambda msg: array.length(msg) < max_input_len_sha256,
lambda msg, res: True)
@typechecked
def hash_sha256(msg: vlbytes_t) -> bytes_t(hLen):
return sha256(msg)
# Mask Generation Function
@contract3(lambda mgfseed, maskLen: maskLen < (2 ** 32) * hLen and maskLen > 0
and array.length(mgfseed) + 4 < max_size_t,
lambda mgfseed, maskLen, res: array.length(res) == maskLen)
@typechecked
def mgf_sha256(mgfseed: vlbytes_t, maskLen: size_nat_t) -> vlbytes_t:
counter_max : size_nat_t = blocks(maskLen, hLen)
accLen : size_nat_t = counter_max * hLen
acc: vlbytes_t = array.create(accLen, uint8(0))
mgfseedLen : int = array.length(mgfseed)
mgfseed_counter: vlbytes_t = array.create(mgfseedLen + 4, uint8(0))
mgfseed_counter[0:mgfseedLen] = mgfseed
for i in range(counter_max):
c : bytes_t = bytes.from_uint32_be(uint32(i))
mgfseed_counter[mgfseedLen:(mgfseedLen + 4)] = c
mHash : bytes_t = hash_sha256(mgfseed_counter)
acc[(hLen * i):(hLen * i + hLen)] = mHash
return acc[0:maskLen]
# Convert functions
@typechecked
def os2ip(b: vlbytes_t) -> nat_t:
return bytes.to_nat_be(b)
@typechecked
def i2osp(n: nat_t) -> vlbytes_t:
return bytes.from_nat_be(nat(n))
# RSA-PSS
rsa_pubkey_t = tuple_t(nat_t, nat_t) # (n, e)
rsa_privkey_t = tuple_t(rsa_pubkey_t, nat_t) # ((n, e), d)
@contract3(lambda salt, msg, emBits: array.length(msg) < max_input_len_sha256 and
hLen + array.length(salt) + nat(2) <= blocks(emBits, nat(8)) and
array.length(salt) + hLen + nat(8) < max_size_t and emBits > 0,
lambda salt, msg, emBits, res: array.length(res) == blocks(emBits, nat(8)))
@typechecked
def pss_encode(salt: vlbytes_t, msg: vlbytes_t, emBits: size_nat_t) -> vlbytes_t:
sLen : int = array.length(salt)
emLen : size_nat_t = blocks(emBits, size_nat(nat(8)))
msBits : size_nat_t = emBits % 8
mHash : bytes_t = hash_sha256(msg)
m1Len : size_nat_t = 8 + hLen + sLen
# m1 = [8 * 0x00; mHash; salt]
m1: vlbytes_t = array.create(m1Len, uint8(0))
m1[8:(8 + hLen)] = mHash
m1[(8 + hLen):m1Len] = salt
m1Hash : size_nat_t = hash_sha256(m1)
dbLen : size_nat_t = size_nat(nat(emLen - hLen - 1))
# db = [0x00; ..; 0x00; 0x01; salt]
db: vlbytes_t = array.create(dbLen, uint8(0))
last_before_salt : size_nat_t = dbLen - sLen - 1
db[last_before_salt] = uint8(0x01)
db[(last_before_salt + 1):dbLen] = salt
dbMask : vlbytes_t = mgf_sha256(m1Hash, dbLen)
maskedDB : vlbytes_t = xor_bytes(db, dbMask)
if msBits > 0:
maskedDB[0] = maskedDB[0] & (uint8(0xff) >> (8 - msBits))
em: vlbytes_t = array.create(emLen, uint8(0))
# em = [maskedDB; m1Hash; 0xbc]
em[0:dbLen] = maskedDB
em[dbLen:(dbLen + hLen)] = m1Hash
em[emLen - 1] = uint8(0xbc)
return em
@contract3(lambda sLen, msg, em, emBits: array.length(msg) < max_input_len_sha256 and
array.length(em) == blocks(emBits, nat(8)) and emBits > 0 and
sLen + hLen + 8 < max_size_t,
lambda sLen, msg, em, emBits, res: True)
@typechecked
def pss_verify(sLen: size_nat_t, msg: vlbytes_t, em: vlbytes_t, emBits: size_nat_t) -> bool:
emLen : size_nat_t = blocks(emBits, nat(8))
msBits : size_nat_t = emBits % 8
mHash : size_nat_t = hash_sha256(msg)
em_0 : uint8_t
if msBits > 0:
em_0 = em[0] & (uint8(0xff) << msBits)
else:
em_0 = uint8(0)
res : bool
if emLen < sLen + hLen + nat(2):
res = False
else:
if not (em[emLen - 1] == uint8(0xbc) and em_0 == uint8(0)):
res = False
else:
dbLen : size_nat_t = size_nat(nat(emLen - hLen - 1))
maskedDB : uint8_t = em[0:dbLen]
m1Hash :vlbytes_t = em[dbLen:(dbLen + hLen)]
dbMask : vlbytes_t = mgf_sha256(m1Hash, dbLen)
db : vlbytes_t = xor_bytes(maskedDB, dbMask)
if msBits > 0:
db[0] = db[0] & (uint8(0xff) >> (8 - msBits))
padLen : size_nat_t = emLen - sLen - hLen - 1
pad2: vlbytes_t = array.create(padLen, uint8(0))
pad2[padLen - 1] = uint8(0x01)
pad : vlbytes_t = db[0:padLen]
salt : vlbytes_t = db[padLen:(padLen + sLen)]
if (eq_bytes(pad, pad2)):
m1Len : size_nat_t = 8 + hLen + sLen
m1: vlbytes_t = array.create(m1Len, uint8(0))
m1[8:(8 + hLen)] = mHash
m1[(8 + hLen):m1Len] = salt
m1Hash0 : vlbytes_t = hash_sha256(m1)
res = eq_bytes(m1Hash, m1Hash0)
else:
res = False
return res
@contract3(lambda modBits, skey, salt, msg:
array.length(msg) < max_input_len_sha256 and modBits > nat(1) and
array.length(salt) + hLen + nat(8) < max_size_t and array.length(salt) +
hLen + nat(2) <= blocks(nat(modBits - nat(1)), nat(8)),
lambda modBits, skey, salt, msg, res: array.length(res) == blocks(modBits, nat(8)))
@typechecked
def rsapss_sign(modBits: size_nat_t, skey: rsa_privkey_t, salt: vlbytes_t, msg: vlbytes_t) -> vlbytes_t:
pkey : rsa_pubkey_t
d : nat_t
n : nat_t
e : nat_t
(pkey, d) = skey
(n, e) = pkey
felem_t = natmod_t(n)
em : vlbytes_t = pss_encode(salt, msg, nat(modBits - 1))
m : nat_t = os2ip(em)
mmod: felem_t = natmod(m,n)
smod: felem_t = mmod ** d
s : nat_t = natmod.to_int(smod)
sgnt : vlbytes_t = i2osp(s)
return sgnt
@contract3(lambda modBits, pkey, sLen, msg, sgnt:
array.length(msg) < max_input_len_sha256 and
modBits > 1 and array.length(sgnt) == blocks(modBits, nat(8)) and
sLen + hLen + nat(8) < max_size_t,
lambda modBits, pkey, sLen, msg, sgnt, res: True)
@typechecked
def rsapss_verify(modBits: size_nat_t, pkey: rsa_pubkey_t, sLen: size_nat_t, msg: vlbytes_t, sgnt: vlbytes_t) -> bool:
n : nat_t
e : nat_t
(n, e) = pkey
felem_t = natmod_t(n)
s : nat_t = os2ip(sgnt)
smod: felem_t = natmod(s,n)
mmod: felem_t
m: nat_t
em: vlbytes_t
res : bool
if s < n:
mmod = smod ** e
m = natmod.to_int(mmod)
em = i2osp(m)
res = pss_verify(sLen, msg, em, size_nat(nat(modBits - 1)))
else:
res = False
return res
<file_sep>/docker/travis-compiler/setup-root.sh
#!/usr/bin/env bash
set -v -e -x
# Update packages.
export DEBIAN_FRONTEND=noninteractive
apt-get -qq update
apt-get install -y libssl-dev libsqlite3-dev g++ gcc m4 make pkg-config python \
libgmp3-dev cmake curl libtool-bin autoconf wget locales unzip \
git
locale-gen en_US.UTF-8
dpkg-reconfigure locales
wget https://raw.github.com/ocaml/opam/master/shell/opam_installer.sh -O - | sh -s /usr/local/bin
apt-get autoremove -y
apt-get clean
apt-get autoclean
<file_sep>/tests/blake2_test.py
from specs.blake2 import *
from sys import exit
import json
def main(x:int) -> None:
file = open('tests/test_vectors/blake2b_test_vectors.json')
blake2b_test_vectors = json.load(file)
file = open('tests/test_vectors/blake2s_test_vectors.json')
blake2s_test_vectors = json.load(file)
for i, vec in enumerate(blake2b_test_vectors):
data = bytes.from_hex(vec['data'])
key = bytes.from_hex(vec['key'])
nn = nat(vec['nn'])
expected = bytes.from_hex(vec['output'])
computed = blake2b(data,key,nn)
if computed == expected:
print("Blake2b Test {} passed.".format(i+1))
else:
print("Blake2b Test {} failed.".format(i+1))
print("expected ciphertext:",bytes.to_hex(expected))
print("computed ciphertext:",bytes.to_hex(computed))
exit(1)
for i, vec in enumerate(blake2s_test_vectors):
data = bytes.from_hex(vec['data'])
key = bytes.from_hex(vec['key'])
nn = out_size_t(nat(vec['nn']))
expected = bytes.from_hex(vec['output'])
computed = blake2s(data,key,nn)
if computed == expected:
print("Blake2s Test {} passed.".format(i+1))
else:
print("Blake2s Test {} failed.".format(i+1))
print("expected ciphertext:",bytes.to_hex(expected))
print("computed ciphertext:",bytes.to_hex(computed))
exit(1)
if __name__ == "__main__":
main(0)
<file_sep>/tests/ed25519_test.py
from specs.ed25519 import *
import json
from sys import exit
def main (x: int) -> None :
file = open('tests/test_vectors/ed25519_test_vectors.json')
ed25519_test_vectors = json.load(file)
# RFC 7539 Test Vectors
pk0 = bytes.from_hex('d75a980182b10ab7d54bfed3c964073a0ee172f3daa62325af021a68f707511a')
sk0 = bytes.from_hex('9d61b19deffd5a60ba844af492ec2cc44449c5697b326919703bac031cae7f60')
msg0 = bytes.from_hex('')
sig_expected = bytes.from_hex('e5564300c360ac729086e2cc806e828a84877f1eb8e5d974d873e065224901555fb8821590a33bacc61e39701cf9b46bd25bf5f0595bbe24655141438e7a100b')
sig_computed = sign(sk0,msg0)
verified = verify(pk0,msg0,sig_computed)
if (sig_expected == sig_computed):
print('Ed25519 Test 0 signature succeeded')
else:
print('Ed25519 Test 0 signature failed')
print('expected: '+str(sig_expected))
print('computed: '+str(sig_computed))
exit(1)
if (verified):
print('Ed25519 Test 0 verification succeeded')
else:
print('Ed25519 Test 0 verification failed')
exit(1)
verified = verify(pk0,msg0,sig_expected)
if (verified):
print('Ed25519 Test 0 verification_expected succeeded')
else:
print('Ed25519 Test 0 verification_expected failed')
exit(1)
for i in range(len(ed25519_test_vectors)):
pk = bytes.from_hex(ed25519_test_vectors[i]['public'])
sk = bytes.from_hex(ed25519_test_vectors[i]['private'])
msg = bytes.from_hex(ed25519_test_vectors[i]['input'])
sig_expected = bytes.from_hex(ed25519_test_vectors[i]['output'])
sig_computed = sign(sk,msg)
verified = verify(pk,msg,sig_computed)
if (sig_expected == sig_computed):
print('Ed25519 Test ',i,' signature succeeded')
else:
print('Ed25519 Test ',i,' signature failed')
print('expected: '+str(sig_expected))
print('computed: '+str(sig_computed))
exit(1)
if (verified):
print('Ed25519 Test ',i,' verification succeeded')
else:
print('Ed25519 Test ',i,' verification failed')
exit(1)
verified = verify(pk0,msg0,sig_expected)
if (verified):
print('Ed25519 Test ',i,' verification_expected succeeded')
else:
print('Ed25519 Test ',i,' verification_expected failed')
exit(1)
if __name__ == '__main__':
main(0)
<file_sep>/README.md
⚠️ This repository is outdated. Please check out https://github.com/hacspec/hacspec for further development of hacspec.
---
# hacspec
hacspec is a proposal for a new specification language for crypto primitives that is succinct, that is easy to read and implement, and that lends itself to formal verification.
hacspec aims to formalize the pseudocode used in crypto standards by proposing a formal syntax that can be checked for simple errors. hacspec specifications can then be tested against test vectors specified in a common syntax.
hacspec specifications can also be compiled to cryptol, coq, F\*, easycrypt, and hence can be used as the basis for formal proofs of functional correctness, cryptographic security, and side-channel resistance.
# status
[](https://travis-ci.org/HACS-workshop/hacspec)
[](https://gitter.im/hacspec/Lobby)
This project is still in relatively early stages.
[The wiki](https://github.com/HACS-workshop/hacspec/wiki) contains an overview of the hacspec architecture as well as its current state.
An overview of the current state of hacspec can be found in this [blog post](https://franziskuskiefer.de/post/hacspec2/).
For more details please see the [SSR paper](https://github.com/HACS-workshop/hacspec/blob/master/doc/hacspec-ssr18-paper.pdf) containing the hacspec language definition.
## development
The master branch holds a stable version of hacspec.
Development happens on the [dev branch](https://github.com/HACS-workshop/hacspec/tree/dev).
Please file pull requests against that branch.
## compiler
See [compiler](compiler/) for details.
# How to use
To use hacspec in your project install the hacspec python package as follows.
## Installation via pip
hacspec is distributed as a [pip package](https://pypi.org/project/hacspec/)
pip install hacspec
To install the hacspec package from its source clone this repository and run
make -C build install
Now you can use the speclib in your python code with
from hacspec.speclib import *
The package further provides a tool to check hacpsec files for its correctness
hacspec-check <your-hacspec>
See the `example` directory for a spec using the hacspec python package.
## Development
When working on hacspec itself installation is not necessary.
The makefile has three main targets
make run // disabled type checker
make check // check hacspec compliance
make test // run tests with type checker enabled
make compiler // run the full spec checker and compiler (this requires ocaml)
to run or check specs.
# contact
Discussions are happening on the [mailing list](https://moderncrypto.org/mailman/listinfo/hacspec).
Chat with us on [gitter](https://gitter.im/hacspec/Lobby?utm_source=share-link&utm_medium=link&utm_campaign=share-link).
<file_sep>/tests/curve448_test.py
from specs.curve448 import *
import json
from sys import exit
def main (x: int) -> None :
file = open('tests/test_vectors/curve448_test_vectors.json')
curve448_test_vectors = json.load(file)
for i in range(len(curve448_test_vectors)):
s = bytes.from_hex(curve448_test_vectors[i]['private'])
p = bytes.from_hex(curve448_test_vectors[i]['public'])
expected = bytes.from_hex(curve448_test_vectors[i]['result'])
valid = curve448_test_vectors[i]['valid']
computed = scalarmult(s, p)
if (computed == expected and valid):
print("Curve448 Test ", i, " passed.")
elif (not(computed == expected) and not valid):
print("Curve448 Test ", i, " passed.")
else:
print("Curve448 Test ", i, " failed:")
print("expected: ", expected)
print("computed: ", computed)
exit(1)
main(0)
<file_sep>/specs/vrf.py
from lib.speclib import *
from specs.curve25519 import *
from specs.sha2 import *
from specs.ed25519 import *
n = 16
@typechecked
def normalise_point(s:extended_point_t) -> extended_point_t:
(x,y,z,t) = s
return extended_point(fmul(x, finv(z)), fmul(y, finv(z)), felem(nat(1)), fmul(t, finv (z)))
@typechecked
def OS2ECP(s:serialized_point_t) -> Union[extended_point_t, None]:
return point_decompress(s)
@typechecked
def ECP2OS(p:extended_point_t) -> serialized_point_t :
return point_compress(p)
@typechecked
def I2OSP(value: nat) -> bytes_t(32):
return bytes.from_nat_le(nat(value))
@typechecked
def OS2IP(s: serialized_scalar_t) -> felem_t:
return bytes.to_nat_le(s)
@typechecked
def hash (msg:vlbytes_t) -> serialized_point_t:
return sha256(msg)
curveOrder = (7237005577332262213973186563042994240857116359379907606001950938285454250989)
cofactor = (8)
@typechecked
def ECVRF_hash_to_curve(ctr: nat, pub: serialized_point_t, input: bytes_t(uint32)) -> Union[extended_point_t, None]:
tmp = array.create(array.length(input)+64,uint8(0))
lenMessage = array.length(input)
tmp[:lenMessage] = input
tmp[lenMessage:lenMessage + 32] = pub
tmp[lenMessage + 32:] = I2OSP(ctr)
hashed = hash(tmp)
possiblePoint = OS2ECP(hashed)
if possiblePoint is None:
if ctr == curveOrder:
return None
else:
return ECVRF_hash_to_curve(ctr + 1, pub, input)
else:
p2 = point_mul(I2OSP(8), possiblePoint)
return p2
@typechecked
def ECVRF_decode_proof(pi: bytes_t(5*n)) -> Union[Tuple[extended_point_t, bytes_t(n), bytes_t(2*n)], None]:
gamma = pi[:2*n]
c = pi[2*n: 3*n]
s = pi[3*n: 5*n]
if OS2ECP(gamma) is None:
return None
else:
return (OS2ECP(gamma), c, s)
@typechecked
def ECVRF_hash_points (g: extended_point_t, h: extended_point_t, pub: extended_point_t, gamma: extended_point_t, gk: extended_point_t, hk: extended_point_t) -> felem_t:
tmp = array.create(32*6, uint8(0))
tmp[0:32] = ECP2OS(g)
tmp[32:64] = ECP2OS(h)
tmp[64:96] = ECP2OS(pub)
tmp[96:128] = ECP2OS(gamma)
tmp[128:160] = ECP2OS(gk)
tmp[160:192] = ECP2OS(hk)
hashed = bytes.to_nat_le(hash(tmp))
result = to_felem(hashed)
return result
@typechecked
def ECVRF_prove (input: bytes_t(uint32), pub: serialized_point_t, priv: serialized_scalar_t, k:felem_t) -> Union[(bytes_t(5*n)), None]:
ap = point_decompress(pub)
if ap is None:
return False
h = ECVRF_hash_to_curve(0, pub, input)
if h is None:
return None
gamma = point_mul(priv, h)
kPrime = I2OSP(k)
gk = point_mul(kPrime, g_ed25519)
hk = point_mul(kPrime, h)
c = ECVRF_hash_points(g_ed25519, h, ap, gamma, gk, hk)
c = OS2IP((I2OSP(c))[16:32])
cPrime = c * OS2IP(priv)
s = (k - cPrime + curveOrder) % curveOrder
tmp = array.create(80, uint8(0))
tmp[0:32] = ECP2OS(gamma)
tmp[32:48] = I2OSP(c)
tmp[48:80] = I2OSP(s)
return tmp
@typechecked
def ECVRF_proof_to_hash (pi: bytes_t(5*n)) -> Union[(bytes_t(2*n)), None]:
(gamma, c, s) = ECVRF_decode_proof(pi)
h = hash(ECP2OS(gamma))
return I2OSP(h)
@typechecked
def ECVRF_verify (pub: serialized_point_t, pi: bytes_t(5*n), input: bytes_t(uint32)) -> bool:
ap = point_decompress(pub)
if ap is None:
return False
(gamma, c, s) = ECVRF_decode_proof(pi)
if gamma is None:
return false
yc = point_mul(c, ap)
gs = point_mul(s, g_ed25519)
u = point_add(yc, gs)
h = ECVRF_hash_to_curve(0, pub, input)
if h is None:
false
gammac = point_mul(c, gamma)
hs = point_mul(s, h)
v = point_add(gammac, hs)
c_prime = ECVRF_hash_points(g_ed25519, h, ap, gamma, u, v)
halfC = OS2IP((I2OSP(c_prime))[16:32])
return halfC == OS2IP(c)
<file_sep>/build/setup.py
from distutils.core import setup
setup(name='hacspec',
version='0.0.1.dev3',
description='hacspec is a new specification language for crypto primitives that is succinct, that is easy to read and implement, and that lends itself to formal verification.',
url='https://github.com/HACS-workshop/hacspec',
author='<NAME> et al.',
author_email='<EMAIL>',
license='MIT',
packages=['hacspec'],
install_requires = ['setuptools', 'typeguard'],
entry_points={
'console_scripts': [
'hacspec-check=hacspec.check:main',
],
})
<file_sep>/tests/p256_test.py
from specs.p256 import *
import json
from sys import exit
def main (x: int) -> None :
file = open('tests/test_vectors/p256_test_vectors.json')
p256_test_vectors = json.load(file)
sk0 = to_scalar(0x14)
point_computed0 = point_mul(sk0)
point_expected0 = (to_felem(0x83A01A9378395BAB9BCD6A0AD03CC56D56E6B19250465A94A234DC4C6B28DA9A), to_felem(0x76E49B6DE2F73234AE6A5EB9D612B75C9F2202BB6923F54FF8240AAA86F640B8))
if point_computed0 == point_expected0:
print("P256 Test 0 succeded")
else:
print("P256 Test 0 failed")
(x_e, y_e) = point_expected0
print("expected x: " + str(x_e) + " y: " + str(y_e))
(x_c, y_c) = point_computed0
print("computed x: " + str(x_c) + " y: " + str(y_c))
exit(1)
sk1 = to_scalar(0x018ebbb95eed0e13)
point_computed1 = point_mul(sk1)
point_expected1 = (to_felem(0x339150844EC15234807FE862A86BE77977DBFB3AE3D96F4C22795513AEAAB82F), to_felem(0xB1C14DDFDC8EC1B2583F51E85A5EB3A155840F2034730E9B5ADA38B674336A21))
if point_computed1 == point_expected1:
print("P256 Test 1 succeded")
else:
print("P256 Test 1 failed")
(x_e, y_e) = point_expected1
print("expected x: " + str(x_e) + " y: " + str(y_e))
(x_c, y_c) = point_computed1
print("computed x: " + str(x_c) + " y: " + str(y_c))
exit(1)
sk2 = to_scalar(0xffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632550)
point_computed2 = point_mul(sk2)
point_expected2 = (to_felem(0x6B17D1F2E12C4247F8BCE6E563A440F277037D812DEB33A0F4A13945D898C296), to_felem(0xB01CBD1C01E58065711814B583F061E9D431CCA994CEA1313449BF97C840AE0A))
if point_computed2 == point_expected2:
print("P256 Test 2 succeded")
else:
print("P256 Test 2 failed")
(x_e, y_e) = point_expected2
print("expected x: " + str (x_e) + " y: " + str(y_e))
(x_c, y_c) = point_computed2
print("computed x: " + str (x_c) + " y: " + str(y_c))
exit(1)
for i in range(len(p256_test_vectors)):
sk = to_scalar(int.from_hex(p256_test_vectors[i]['private'],16))
# pk = to_felem(int.from_hex(p256_test_vectors[i]['private'],16))
point_computed = point_mul(sk2)
point_expected = to_felem(int.from_hex(p256_test_vectors[i]['public'],16))
if point_computed2 == point_expected2:
print("P256 Test ",i," succeded")
else:
print("P256 Test ",i," failed")
(x_e, y_e) = point_expected2
print("expected x: " + str (x_e) + " y: " + str(y_e))
(x_c, y_c) = point_computed2
print("computed x: " + str (x_c) + " y: " + str(y_c))
exit(1)
main(0)
<file_sep>/tests/elligator2_test.py
from elligator2 import *
import json
#from test_vectors.elligator2_test_vectors import elligator2_test_vectors
from sys import exit
def main (x: int) -> None :
alpha0 = bytes.from_nat_be(1)
x0 = bytes.from_nat_be(38597363079105398474523661669562635951089994888546854679819194669304376384412)
y0 = bytes.from_nat_be(26903495929791911980624662688598047587608282445491562122320889756226532494818)
(cx0,xy0) = hash2curve25519(alpha0)
print(x0)
print(y0)
print(cx0)
print(cy0)
# if (cx0 == expected == computed):
# print("Curve25519 Test 0 passed.")
# else:
# print("Curve25519 0 failed:")
# print("expected:",expected)
# print("computed:",computed)
# for i in range(len(curve25519_test_vectors)):
# s = bytes.from_hex(curve25519_test_vectors[i]['private'])
# p = bytes.from_hex(curve25519_test_vectors[i]['public'])
# expected = bytes.from_hex(curve25519_test_vectors[i]['result'])
# valid = curve25519_test_vectors[i]['valid']
# computed = scalarmult(s,p)
# if (computed == expected and valid):
# print("Curve25519 Test ",i+1," passed.")
# elif (not(computed == expected) and not valid):
# print("Curve25519 Test ",i+1," passed.")
# else:
# print("Curve25519 Test ",i+1," failed:")
# print("expected mac:",expected)
# print("computed mac:",computed)
# exit(1)
main(0)
<file_sep>/specs/poly1305.py
#!/usr/bin/python3
from lib.speclib import *
blocksize:int = 16
block_t = bytes_t(16)
key_t = bytes_t(32)
tag_t = bytes_t(16)
subblock_t = refine_t(vlbytes_t, lambda x: bytes.length(x) <= 16)
# Define prime field
p130m5 : nat_t = (2 ** 130) - 5
felem_t = natmod_t(p130m5)
@typechecked
def felem(n:nat_t) -> felem_t:
return natmod(n,p130m5)
@typechecked
def encode(block: subblock_t) -> felem_t:
b : block_t = array.create(16, uint8(0))
b[0:bytes.length(block)] = block
welem : felem_t = felem(bytes.to_nat_le(b))
lelem : felem_t = felem(2 ** (8 * array.length(block)))
return lelem + welem
@typechecked
def encode_r(r: block_t) -> felem_t:
ruint : uint128_t = bytes.to_uint128_le(r)
ruint = ruint & uint128(0x0ffffffc0ffffffc0ffffffc0fffffff)
r_nat : nat_t = uintn.to_nat(ruint)
return felem(r_nat)
# There are many ways of writing the polynomial evaluation function
# This version: use a loop to accumulate the result
@typechecked
def poly(text: vlbytes_t, r: felem_t) -> felem_t:
blocks : vlarray_t(block_t)
last : subblock_t
blocks, last = array.split_blocks(text, blocksize)
acc : felem_t = felem(0)
for i in range(array.length(blocks)):
acc = (acc + encode(blocks[i])) * r
if (array.length(last) > 0):
acc = (acc + encode(last)) * r
return acc
@typechecked
def poly1305_mac(text: vlbytes_t, k: key_t) -> tag_t:
r : block_t = k[0:blocksize]
s : block_t = k[blocksize:2*blocksize]
relem : felem_t = encode_r(r)
selem : uint128_t = bytes.to_uint128_le(s)
a : felem_t = poly(text, relem)
n : uint128_t = uint128(natmod.to_nat(a)) + selem
return bytes.from_uint128_le(n)
<file_sep>/lib/check.py
#!/usr/bin/env python3
from typed_ast.ast3 import *
from sys import argv
from os import environ
import os
file_dir = None
variables = []
def fail(s):
print("\n *** " + s + "\n")
exit(1)
def read_function_signature(f):
fun_name = f.name
rt = -1
arg_names = []
arg_types = []
if f.args.args is not None:
for x in f.args.args:
if x.annotation and x is Name:
arg_types.append(x.annotation.id)
elif x.annotation and isinstance(x.annotation, Name):
arg_types.append(x.annotation.id)
elif x.annotation and isinstance(x.annotation, Str):
arg_types.append(x.annotation.s)
elif x.annotation and isinstance(x.annotation, Call):
arg_types.append(x.annotation.func.id)
arg_names.append(x.arg)
if f.returns is not None:
if isinstance(f.returns, Name):
rt = f.returns.id
elif isinstance(f.returns, Subscript):
tmp = f.returns.slice.value
rt = f.returns.value.id
if isinstance(tmp, Name):
rt += "["+tmp.id+"]"
elif isinstance(tmp, list):
tmp2 = []
for x in tmp:
tmp2.append(x.elts)
rt +=str(tmp2)
elif isinstance(tmp, Tuple):
tmp2 = []
for x in tmp.elts:
if isinstance(x, Name):
tmp2.append(x.id)
elif isinstance(x, NameConstant):
tmp2.append(x.value)
else:
fail("Couldn't parse function return values: \"" + fun_name+"\"")
rt += str(tmp2)
else:
rt += str(tmp)
elif isinstance(f.returns, NameConstant):
rt = f.returns.value
elif isinstance(f.returns, Call):
rt = f.returns.func.id
decorators = []
for d in f.decorator_list:
if isinstance(d, Name):
decorators.append(d.id)
elif isinstance(d, Call):
decorators.append(d.func.id)
else:
fail("Function decorators must be names or calls not " + str(d))
# Every function must have a typechecked decorator.
typechecked = False
for decorator in decorators:
if decorator == "typechecked":
typechecked = True
break
if not typechecked:
fail("Every hacpsec function must have a @typechecked decorator: \"" + fun_name+"\"")
try:
arg_names.remove("self")
except:
pass
# Every argument must be typed.
if len(arg_types) != len(arg_names):
fail("Every hacpsec function argument must be typed: \"" + fun_name+"\"")
# Check arg_types.
for arg_type in arg_types:
if not is_valid_type(arg_type):
fail("Invalid argument type in function signature " + fun_name + " - " + str(arg_type))
# Every function must have a return type.
if rt is -1 or not is_valid_type(rt):
fail("Every hacpsec function must have a return type: \"" + fun_name+"\"")
return
def check_variable_is_typed(line):
# Make sure that all local variables are typed by either:
# i) annotated assign or
# ii) typed variable declaration
global variables
# TODO: add all array and byte speclib functions
speclibFunctions = ["array.copy", "refine", "bytes", "bytes.copy", "matrix.createi", "vector.createi"]
if isinstance(line, Assign):
if len(line.targets) > 0 and isinstance(line.targets[0], Tuple):
# This is a tuple assignment. The variables have to be declared
# before use.
for target in line.targets[0].elts:
if not isinstance(target, Name):
fail("Tuple values must be names.")
if not target.id in variables and not target.id is "_":
if isinstance(line.value, Call) and \
isinstance(line.value.func, Name) and \
line.value.func.id is "refine":
return None
fail("Untyped variable used in tuple assignment \"" + str(target.id) + "\"")
elif len(line.targets) > 1:
fail("Only tuple assignment or single variable assignments are allowed")
else:
target = line.targets[0]
if isinstance(target, Subscript):
# Subscripts assign to arrays that are typed in speclib.
return None
if not isinstance(target, Name):
fail("Variable assignment targets have to be variable names.")
if isinstance(line.value, Call) and \
isinstance(line.value.func, Name) and \
(line.value.func.id in ["array"] or line.value.func.id in speclibFunctions):
# No type for arrays needed.
variables.append(target.id)
return None
if isinstance(line.value, Call) and \
isinstance(line.value.func, Attribute) and \
isinstance(line.value.func.value, Name) and \
line.value.func.value.id +"."+ line.value.func.attr in speclibFunctions:
# No type for arrays needed.
variables.append(target.id)
return None
if target.id.endswith("_t"):
# This is a type not a variable. Ignore it.
return None
if not target.id in variables:
fail("Variable assignment doesn't have a type \""+target.id+"\"")
if isinstance(line, AnnAssign):
if line.value is None:
# This is a variable declaration.
variables.append(line.target.id)
else:
if isinstance(line.annotation, Name):
# We could check the type here of line.annotation.id.
# But seeing n annotation here is enough for us atm.
pass
if not isinstance(line.target, Name):
fail("Variable ann-assignment target must be a Name \""+str(line.target)+"\"")
variables.append(line.target.id)
# def read_function_body(body):
# variables = []
# for line in body:
# read_line(line, variables)
def import_is_hacspec(filename):
if filename.endswith("speclib"):
# speclib can always be used.
return True
# TODO: This currently doesn't work with PYTHONPATH set.
return True
if not file_dir:
fail("No file_dir set :/ Something is wrong.")
print(filename.split('.'))
filename = os.path.join(*filename.split('.'))
filename = os.path.join(file_dir, filename + ".py")
try:
with open(filename, 'r', encoding='utf-8') as py_file:
return True
except:
fail("File is not a valid hacspec. Import \"" + filename + "\" is not a local spec.")
return True
def is_valid_binop(op):
# hacspec allows all binops.
return True
def is_valid_compop(op):
# TODO: check
if isinstance(op, In):
# Don't allow in as comparator as it has many meanings in python.
return False
return True
def is_expression(node):
if node is None:
return True
if isinstance(node, Lambda):
# TODO: do we want to check the lambda?
return True
if isinstance(node, BinOp) and is_valid_binop(node.op) and \
is_expression(node.left) and is_expression(node.right):
return True
if isinstance(node, Compare) and is_expression(node.left):
for op in node.ops:
if not is_valid_compop(op):
return False
for comp in node.comparators:
if not is_expression(comp):
return False
return True
if isinstance(node, BoolOp):
for value in node.values:
if not is_expression(value):
return False
return True
if isinstance(node, Attribute) and is_expression(node.value):
return True
if isinstance(node, UnaryOp) and is_expression(node.operand):
return True
if isinstance(node, NameConstant):
if node.value is True or node.value is False or node.value is None:
return True
return False
if not (isinstance(node, Call) or isinstance(node, Subscript) \
or isinstance(node, Name) or isinstance(node, Num) or \
isinstance(node, Tuple)):
# Python lists are only allowed when passing them to hacspec arrays.
return False
return True
def is_statement(node):
if isinstance(node, list):
# Read all the lines.
if len(node) < 1:
return False
for n in node:
if not is_statement(n):
return False
return True
if isinstance(node, Assign):
if len(node.targets) < 1:
return False
for target in node.targets:
if not isinstance(target, Name) \
and not isinstance(target, Tuple) \
and not isinstance(target, Subscript):
return False
return True
if isinstance(node, AugAssign):
if not isinstance(node.target, Name) \
and not isinstance(node.target, Tuple) \
and not isinstance(node.target, Subscript):
return False
return True
if isinstance(node, AnnAssign):
if not isinstance(node.target, Name) \
and not isinstance(node.target, Tuple):
return False
return True
if isinstance(node, Return):
return True
if isinstance(node, Expr) and isinstance(node.value, Call):
return True
if isinstance(node, If):
return True
if isinstance(node, For):
return True
if isinstance(node, Break):
return True
return False
# Check annotation. Must be a type with _t at the end. Can we do better?
def is_valid_type(node):
if isinstance(node, Name):
node = node.id
if node is None:
return True
if isinstance(node, Call):
fun_name = node.func.id
if fun_name in ("vlarray_t", "result_t", "array_t", "array"):
return True
if not isinstance(node, str):
return False
if not (node.endswith("_t") or node is "tuple_t" or node is "FunctionType" \
or node is "int" or node is "bool" or node is "refine_t" \
or node is "vlarray_t"):
return False
return True
def read(node) -> None:
if node is None:
# This can happen when reading a None NameConstant
return
if isinstance(node, Module):
return read(node.body)
if isinstance(node, ImportFrom):
# Check that the imported file is a local hacspec or speclib.
if not import_is_hacspec(node.module):
fail("Import " + f + " is not a local hacspec file or speclib.")
return
if isinstance(node, Tuple):
for e in node.elts:
read(e)
return
# Normal assignments.
if isinstance(node, Assign):
# Allowed targets are variables, tuples, array element update,
# array slice update.
read(node.targets)
valid_left = False
for t in node.targets:
if isinstance(t, Name) or isinstance(t, Subscript) or isinstance(t, Tuple):
valid_left = True
break
else:
fail("Invalid assignment " + str(t))
if not valid_left:
fail("Invalid assign.")
# The right side of the assignment can be any expression.
read(node.value)
if not is_expression(node.value):
fail("Invalid assignment. Right side must be expression not " + str(node.value))
if node.type_comment:
fail("Type comments are not supported by hacspec.")
# Check that types are named _t.
# Types come from _t functions or refine.
if isinstance(node.value, Call):
if isinstance(node.value.func, Name):
fun_name = node.value.func.id
# Check speclib functions that make types.
if fun_name is "range_t" or fun_name is "array_t" or \
fun_name is "bytes_t" or fun_name is "refine_t":
if len(node.targets) > 1:
fail("Custom type assignment must have single assignment target " + str(fun_name))
type_name = node.targets[0]
if not isinstance(type_name, Name) and not isinstance(type_name, Tuple):
fail("Custom type assignment has wrong assignment target " + str(type_name))
type_name_string = ""
if isinstance(type_name, Name):
type_name_string = type_name.id
else:
type_name_string = type_name.elts[0].id
if not type_name_string.endswith("_t"):
fail("Custom type names must end on _t " + str(type_name.id))
check_variable_is_typed(node)
return
if isinstance(node, AugAssign):
# Allowed targets are variables and array element update
read(node.target)
if isinstance(node.target, Name) or \
(isinstance(node.target, Subscript) and isinstance(node.target.slice, Index)):
pass
else:
fail("Invalid aug assignment " + str(node.target))
read(node.op)
if not is_valid_binop(node.op):
fail("Invalid aug assignment " + str(node.target))
read(node.value)
if not is_expression(node.value):
fail("Invalid aug assignment. Right side must be expression " + str(node.value))
return
if isinstance(node, AnnAssign):
# Allowed targets are variables.
read(node.target)
if not isinstance(node.target, Name):
fail("Invalid ann assignment (Name) " + str(node.target))
read(node.annotation)
if not is_valid_type(node.annotation):
fail("Invalid ann assignment (annotation) " + str(node.target.id))
read(node.value)
if not is_expression(node.value):
fail("Invalid ann assignment. Right side must be expression " + str(node.value))
check_variable_is_typed(node)
return
# Lists are ok as long as they aren't assigned directly.
if isinstance(node, List):
for elt in node.elts:
read(elt)
return
if isinstance(node, Attribute):
# TODO: can we check something here?
# This is for example array.length()
# node.attr is str
read(node.value)
return
if isinstance(node, BinOp):
read(node.left)
read(node.op)
if not is_valid_binop(node.op):
fail("Not a valid hacspec with " + str(node.op))
read(node.right)
return
# Primitive types
if isinstance(node, Num):
# node.n
return
if isinstance(node, Name):
# node.id
return
if isinstance(node, NameConstant):
return read(node.value)
# We don't really care about these.
if isinstance(node, Load):
return
if isinstance(node, Store):
return
if isinstance(node, AugStore):
return
if isinstance(node, AugLoad):
return
if isinstance(node, For):
read(node.target)
if node.body:
read(node.body)
if not is_statement(node.body):
fail("For loop body is not a statement "+str(node.body))
if node.orelse:
read(node.orelse)
if node.iter and isinstance(node.iter, Call):
if not (isinstance(node.iter.func, Attribute) and \
node.iter.func.value.id+"."+node.iter.func.attr == "array.zip"):
if not (isinstance(node.iter.func, Name) and node.iter.func.id is "range"):
fail("For loops must use range(max) as iterator "+str(node.iter))
if len(node.iter.args) != 1:
fail("For loops must use range(max) as iterator "+str(node.iter))
read(node.iter)
else:
fail("For loops must use range(max) as iterator "+str(node.iter))
return
if isinstance(node, Break):
return
# Operators
if isinstance(node, Pow):
return
if isinstance(node, Sub):
return
if isinstance(node, Mult):
return
if isinstance(node, Add):
return
if isinstance(node, Mod):
return
if isinstance(node, FloorDiv):
return
if isinstance(node, Div):
return
if isinstance(node, BitXor):
return
if isinstance(node, RShift):
return
if isinstance(node, BitAnd):
return
if isinstance(node, BitOr):
return
if isinstance(node, UnaryOp):
return
if isinstance(node, Or):
return
if isinstance(node, And):
return
if isinstance(node, Compare):
read(node.left)
for c in node.comparators:
read(c)
return
if isinstance(node, LShift):
return
if isinstance(node, BoolOp):
read(node.op)
for ex in node.values:
read(ex)
return
if isinstance(node, Subscript):
# TODO: is there anything we can check for subscript?
read(node.value)
return
# Functions
if isinstance(node, FunctionDef):
read_function_signature(node)
# Reset variables.
global variables
variables = []
read(node.body)
return
if isinstance(node, Return):
read(node.value)
if not is_expression(node.value):
fail("Invalid return statement. " + str(node.value))
return
if isinstance(node, Call):
read(node.func)
if node.args:
read(node.args)
if len(node.keywords) > 0:
fail("Keywords aren't allowed in hacspec function calls.")
return
if isinstance(node, bool):
return
if isinstance(node, Expr):
# A simple function call
read(node.value)
if not is_expression(node.value):
fail("Invalid expression " + str(node))
return
if isinstance(node, If):
read(node.test)
if not is_expression(node.test):
fail("Invalid if statement (test). " + str(node.test))
read(node.body)
if not is_statement(node.body):
fail("Invalid if statement (body). " + str(node.body))
read(node.orelse)
if node.orelse and not is_statement(node.orelse):
fail("Invalid if statement (orelse). " + str(node.orelse))
return
if isinstance(node, While):
fail("While statements are not allowed in hacspec.")
# read(node.test)
# if not is_expression(node.test):
# fail("Invalid expression in while test " + str(node.test))
# read(node.body)
# if not is_statement(node.body):
# fail("Invalid statement in while body " + str(node.body))
# if node.orelse:
# read(node.orelse)
# if not is_statement(node.orelse):
# fail("Invalid statement in while orelse " + str(node.orelse))
# return
if isinstance(node, Str):
# node.s
return
if isinstance(node, arguments):
for a in node.args:
read(a)
if len(node.defaults) != 0:
fail("Default arguments are not supported in hacspec.")
if len(node.kwonlyargs) != 0:
fail("keyword only args are not allowed in hacspec.")
if node.vararg is not None:
fail("varargs are not allowed in hacspec")
if len(node.kw_defaults) != 0:
fail("keyword defaults are not allowed in hacspec")
if node.kwarg is not None:
fail("keyword args are not allowed in hacspec")
return
if isinstance(node, arg):
return
if isinstance(node, Lambda):
# TODO: check lambda for contract and refine.
read(node.args)
read(node.body)
return
# List of nodes, read all of them.
if isinstance(node, list):
for x in node:
read(x)
return
fail(str(type(node)) + " is not allowed in hacspecs.")
def main():
if len(argv) != 2:
fail("Usage: hacspec-check <hacspec>")
path = argv[1]
with open(path, 'r', encoding='utf-8') as py_file:
global file_dir
file_dir = os.path.dirname(os.path.abspath(path))
code = py_file.read()
ast = parse(source=code, filename=path)
parsed = read(ast)
print(path + " is a valid hacspec.")
if __name__ == "__main__":
main()
<file_sep>/specs/curve25519.py
#!/usr/bin/python3
from lib.speclib import *
# Define prime field
p25519 : nat_t = (2 ** 255) - 19
felem_t = natmod_t(p25519)
@typechecked
def to_felem(x: nat_t) -> felem_t:
return natmod(x,p25519)
zero : felem_t = to_felem(0)
one : felem_t = to_felem(1)
@typechecked
def finv(x: felem_t) -> felem_t:
return x ** (p25519 - 2)
point_t = tuple_t(felem_t, felem_t) # projective coordinates
scalar_t = uintn_t(256)
@typechecked
def to_scalar(i:nat_t) -> scalar_t:
return uintn(i,256)
serialized_point_t = bytes_t(32)
serialized_scalar_t = bytes_t(32)
g25519: point_t = (to_felem(9), to_felem(1))
@typechecked
def point(a: nat_t, b: nat_t) -> point_t:
return to_felem(a), to_felem(b)
@typechecked
def decodeScalar(s: serialized_scalar_t) -> scalar_t:
k : serialized_scalar_t = bytes.copy(s)
k[0] &= uint8(248)
k[31] &= uint8(127)
k[31] |= uint8(64)
return to_scalar(bytes.to_nat_le(k))
@typechecked
def decodePoint(u: serialized_point_t) -> point_t:
b : nat_t = bytes.to_nat_le(u)
return point((b % (2 ** 255)) % p25519, 1)
@typechecked
def encodePoint(p: point_t) -> serialized_point_t:
x:felem_t
y:felem_t
(x,y) = p
b : int = natmod.to_int(x * finv(y))
return bytes.from_nat_le(b, 32)
@typechecked
def point_add_and_double(q: point_t, nq: point_t, nqp1: point_t) -> tuple_t(point_t, point_t):
x_1 : felem_t
z_1 : felem_t
x_2 : felem_t
z_2 : felem_t
x_3 : felem_t
z_3 : felem_t
(x_1, z_1) = q
(x_2, z_2) = nq
(x_3, z_3) = nqp1
a : felem_t = x_2 + z_2
aa : felem_t = a ** 2
b : felem_t = x_2 - z_2
bb : felem_t = b * b
e : felem_t = aa - bb
c : felem_t = x_3 + z_3
d : felem_t = x_3 - z_3
da : felem_t = d * a
cb : felem_t = c * b
x_3 = (da + cb) ** 2
z_3 = x_1 * ((da - cb) ** 2)
x_2 = aa * bb
z_2 = e * (aa + (to_felem(121665) * e))
return ((x_2, z_2), (x_3, z_3))
@typechecked
def montgomery_ladder(k: scalar_t, init: point_t) -> point_t:
p0: point_t = point(1, 0)
p1: point_t = init
for i in range(256):
if k[255-i] == bit(1):
(p1, p0) = point_add_and_double(init, p1, p0)
else:
(p0, p1) = point_add_and_double(init, p0, p1)
return p0
@typechecked
def scalarmult(s: serialized_scalar_t, p: serialized_point_t) -> serialized_point_t:
s_:scalar_t = decodeScalar(s)
p_:point_t = decodePoint(p)
r :point_t = montgomery_ladder(s_, p_)
return encodePoint(r)
# ECDH API: we assume a key generation function that generates 32 random bytes for serialized_scalar_t
@typechecked
def is_on_curve(s: serialized_point_t) -> bool:
n : nat_t = bytes.to_nat_le(s)
disallowed : array_t(nat_t,12) = array([0, 1, 325606250916557431795983626356110631294008115727848805560023387167927233504, 39382357235489614581723060781553021112529911719440698176882885853963445705823, 2**255 - 19 - 1, 2**255 - 19, 2**255 - 19 + 1, 2**255 - 19 + 325606250916557431795983626356110631294008115727848805560023387167927233504, 2**255 - 19 + 39382357235489614581723060781553021112529911719440698176882885853963445705823, 2*(2**255 - 19) - 1, 2*(2**255 - 19), 2*(2**255 - 19) + 1])
res : bool = True
for i in range(array.length(disallowed)):
if n == disallowed[i]:
res = False
return res
@typechecked
def private_to_public(s: serialized_scalar_t) -> serialized_point_t:
return scalarmult(s, encodePoint(g25519))
@typechecked
def ecdh_shared_secret(priv: serialized_scalar_t, pub: serialized_point_t) -> result_t(serialized_point_t):
res : result_t(serialized_point_t)
if is_on_curve(pub):
res = result.retval(scalarmult(priv, pub))
else:
res = result.error("public key is not on curve")
return res
<file_sep>/build/hacspec/tests/check_test_run.py
#!/usr/bin/python3
from hacspec.speclib import *
from check_test import *
from check_test_fail import *
from sys import exit
# Run spec checker tests
def main():
test_lists()
fail_lists()
if __name__ == "__main__":
main()
<file_sep>/specs/p256.py
from lib.speclib import *
prime : int = 2**256 - 2**224 + 2**192 + 2**96 - 1
felem_t = natmod_t(prime)
@typechecked
def to_felem(x: nat_t) -> felem_t:
return natmod(x, prime)
@typechecked
def finv(x: felem_t) -> felem_t:
return x ** (prime - 2)
jacobian_t = tuple_t(felem_t, felem_t, felem_t)
affine_t = tuple_t(felem_t, felem_t)
@typechecked
def jacobian(a: int, b: int, c: int) -> jacobian_t:
return to_felem(nat(a)), to_felem(nat(b)), to_felem(nat(c))
scalar_t = uintn_t(256)
@typechecked
def to_scalar(n:int) -> scalar_t:
return uintn(n, 256)
@typechecked
def toAffine(p: jacobian_t) -> affine_t:
x : felem_t
y : felem_t
z : felem_t
(x, y, z) = p
z2 : felem_t = z ** 2
z2i : felem_t = finv(z2)
z3 : felem_t = z * z2
z3i : felem_t = finv(z3)
x = x * z2i
y = y * z3i
return x, y
@typechecked
def pointDouble(p: jacobian_t) -> jacobian_t:
x1 : felem_t
y1 : felem_t
z1 : felem_t
(x1, y1, z1) = p
delta : felem_t = z1 ** 2
gamma : felem_t = y1 ** 2
beta : felem_t = x1 * gamma
alpha_1 : felem_t = x1 - delta
alpha_2 : felem_t = x1 + delta
alpha : felem_t = to_felem(3) * (alpha_1 * alpha_2)
x3 : felem_t = (alpha ** 2) - (to_felem(8) * beta)
z3_ : felem_t = (y1 + z1) ** 2
z3 : felem_t = z3_ - (gamma + delta)
y3_1 : felem_t = (to_felem(4) * beta) - x3
y3_2 : felem_t = to_felem(8) * (gamma ** 2)
y3 : felem_t = (alpha * y3_1) - y3_2
return x3, y3, z3
@typechecked
def isPointAtInfinity(p: jacobian_t) -> bool:
z : felem_t
(_, _, z) = p
return (z == to_felem(0))
@typechecked
def pointAdd(p: jacobian_t, q: jacobian_t) -> jacobian_t:
if isPointAtInfinity(p):
return q
if isPointAtInfinity(q):
return p
x1 : felem_t
y1 : felem_t
z1 : felem_t
x2 : felem_t
y2 : felem_t
z2 : felem_t
(x1, y1, z1) = p
(x2, y2, z2) = q
z1z1 : felem_t = z1 ** 2
z2z2 : felem_t = z2 ** 2
u1 : felem_t = x1 * z2z2
u2 : felem_t = x2 * z1z1
s1 : felem_t = (y1 * z2) * z2z2
s2 : felem_t = (y2 * z1) * z1z1
if u1 == u2:
if s1 == s2:
return pointDouble(p)
else:
return jacobian(0, 1, 0)
h : felem_t = u2 - u1
i : felem_t = (to_felem(2) * h) ** 2
j : felem_t = h * i
r : felem_t = to_felem(2) * (s2 - s1)
v : felem_t = u1 * i
x3_1 : felem_t = to_felem(2) * v
x3_2 : felem_t = (r ** 2) - j
x3 : felem_t = x3_2 - x3_1
y3_1 : felem_t = (to_felem(2) * s1) * j
y3_2 : felem_t = r * (v - x3)
y3 : felem_t = y3_2 - y3_1
z3_ : felem_t = (z1 + z2) ** 2
z3 : felem_t = (z3_ - (z1z1 + z2z2)) * h
return x3, y3, z3
@typechecked
def montgomery_ladder(k: scalar_t, init: jacobian_t) -> jacobian_t:
p0 : jacobian_t = jacobian(0, 1, 0)
p1 : jacobian_t = init
for i in range(256):
if k[255-i] == bit(1):
(p0, p1) = (p1, p0)
xx : jacobian_t = pointDouble(p0)
xp1 : jacobian_t = pointAdd(p0, p1)
if k[255-i] == bit(1):
(p0, p1) = (xp1, xx)
else:
(p0, p1) = (xx, xp1)
return p0
basePoint : jacobian_t = jacobian(0x6B17D1F2E12C4247F8BCE6E563A440F277037D812DEB33A0F4A13945D898C296,
0x4FE342E2FE1A7F9B8EE7EB4A7C0F9E162BCE33576B315ECECBB6406837BF51F5,
1)
@typechecked
def point_mul(k: scalar_t) -> affine_t:
jac : jacobian_t = montgomery_ladder(k, basePoint)
return toAffine(jac)
<file_sep>/doc/website/config.toml
baseURL = "https://hacs-workshop.github.io/hacspec"
languageCode = "en-us"
title = "hacspec"
theme = "ananke"
enableRobotsTXT = true
SectionPagesMenu = "main"
[params]
description = "the new specification language for crypto primitives"
github = "https://github.com/HACS-workshop/hacspec/"
# choose a background color from any on this page: http://tachyons.io/docs/themes/skins/ and preface it with "bg-"
background_color_class = "bg-black"
featured_image = "/images/gohugo-default-sample-hero-image.jpg"
<file_sep>/tests/chacha20_test.py
from specs.chacha20 import *
from sys import exit
import json
# mypy only checks functions that have types. So add an argument :)
def main(x: int) -> None:
file = open('tests/test_vectors/chacha20_test_vectors.json')
chacha20_test_vectors = json.load(file)
# Quarter round test vectors from RFC 7539
a = uint32(0x11111111)
b = uint32(0x01020304)
c = uint32(0x9b8d6f43)
d = uint32(0x01234567)
const_state_t = array_t(uint32_t, 4)
my_state = const_state_t([a, b, c, d])
my_state = quarter_round(0, 1, 2, 3, my_state)
exp_state = const_state_t([uint32(0xea2a92f4), uint32(0xcb1cf8ce), uint32(0x4581472e), uint32(0x5881c4bb)])
if (my_state == exp_state):
print("Quarter round test vector passed.")
else:
print("Quarter round test vector failed!")
print("computed qround = ",str(my_state[0:4]))
print("expected qround = ",str(exp_state))
# Test vector from RFX 7539 section 2.3.2
key = bytes.from_ints([<KEY> 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f])
nonce = bytes.from_ints([0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x4a, 0x00, 0x00, 0x00, 0x00])
counter = uint32(1)
result = chacha20(key, counter, nonce)
expected_state = state_t([
uint32(0xe4e7f110), uint32(0x15593bd1), uint32(0x1fdd0f50), uint32(0xc47120a3),
uint32(0xc7f4d1c7), uint32(0x0368c033), uint32(0x9aaa2204), uint32(0x4e6cd4c3),
uint32(0x466482d2), uint32(0x09aa9f07), uint32(0x05d7c214), uint32(0xa2028bd9),
uint32(0xd19c12b5), uint32(0xb94e16de), uint32(0xe883d0cb), uint32(0x4e3c50a2)
])
if (result == expected_state):
print("Chacha20 core test vector passed.")
else:
print("Chacha20 core test vector failed!")
print("expected state:",expected_state)
print("computed state:",result)
plaintext = bytes.from_ints([0x4c, 0x61, 0x64, 0x69, 0x65, 0x73, 0x20, 0x61,
0x6e, 0x64, 0x20, 0x47, 0x65, 0x6e, 0x74, 0x6c,
0x65, 0x6d, 0x65, 0x6e, 0x20, 0x6f, 0x66, 0x20,
0x74, 0x68, 0x65, 0x20, 0x63, 0x6c, 0x61, 0x73,
0x73, 0x20, 0x6f, 0x66, 0x20, 0x27, 0x39, 0x39,
0x3a, 0x20, 0x49, 0x66, 0x20, 0x49, 0x20, 0x63,
0x6f, 0x75, 0x6c, 0x64, 0x20, 0x6f, 0x66, 0x66,
0x65, 0x72, 0x20, 0x79, 0x6f, 0x75, 0x20, 0x6f,
0x6e, 0x6c, 0x79, 0x20, 0x6f, 0x6e, 0x65, 0x20,
0x74, 0x69, 0x70, 0x20, 0x66, 0x6f, 0x72, 0x20,
0x74, 0x68, 0x65, 0x20, 0x66, 0x75, 0x74, 0x75,
0x72, 0x65, 0x2c, 0x20, 0x73, 0x75, 0x6e, 0x73,
0x63, 0x72, 0x65, 0x65, 0x6e, 0x20, 0x77, 0x6f,
0x75, 0x6c, 0x64, 0x20, 0x62, 0x65, 0x20, 0x69,
0x74, 0x2e])
key = bytes.from_ints([0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08,
0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11,
0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a,
0x1b, 0x1c, 0x1d, 0x1e, 0x1f])
nonce = bytes.from_ints([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x4a,
0x00, 0x00, 0x00, 0x00])
expected = bytes.from_ints([0x6e, 0x2e, 0x35, 0x9a, 0x25, 0x68, 0xf9, 0x80,
0x41, 0xba, 0x07, 0x28, 0xdd, 0x0d, 0x69, 0x81,
0xe9, 0x7e, 0x7a, 0xec, 0x1d, 0x43, 0x60, 0xc2,
0x0a, 0x27, 0xaf, 0xcc, 0xfd, 0x9f, 0xae, 0x0b,
0xf9, 0x1b, 0x65, 0xc5, 0x52, 0x47, 0x33, 0xab,
0x8f, 0x59, 0x3d, 0xab, 0xcd, 0x62, 0xb3, 0x57,
0x16, 0x39, 0xd6, 0x24, 0xe6, 0x51, 0x52, 0xab,
0x8f, 0x53, 0x0c, 0x35, 0x9f, 0x08, 0x61, 0xd8,
0x07, 0xca, 0x0d, 0xbf, 0x50, 0x0d, 0x6a, 0x61,
0x56, 0xa3, 0x8e, 0x08, 0x8a, 0x22, 0xb6, 0x5e,
0x52, 0xbc, 0x51, 0x4d, 0x16, 0xcc, 0xf8, 0x06,
0x81, 0x8c, 0xe9, 0x1a, 0xb7, 0x79, 0x37, 0x36,
0x5a, 0xf9, 0x0b, 0xbf, 0x74, 0xa3, 0x5b, 0xe6,
0xb4, 0x0b, 0x8e, 0xed, 0xf2, 0x78, 0x5e, 0x42,
0x87, 0x4d])
computed = chacha20_encrypt(key,uint32(1),nonce,plaintext)
if (computed == expected):
print("Chacha20 Test 0 passed.")
else:
print("Chacha20 Test 0 failed:")
print("expected ciphertext:",expected)
print("computed ciphertext:",computed)
exit(1)
for i in range(len(chacha20_test_vectors)):
msg = bytes.from_hex(chacha20_test_vectors[i]['input'])
k = bytes.from_hex(chacha20_test_vectors[i]['key'])
n = bytes.from_hex(chacha20_test_vectors[i]['nonce'])
ctr = chacha20_test_vectors[i]['counter']
expected = bytes.from_hex(chacha20_test_vectors[i]['output'])
computed = chacha20_encrypt(key,uint32(ctr),n,msg)
if (computed == expected):
print("Chacha20 Test ",i+1," passed.")
else:
print("Chacha20 Test ",i+1," failed:")
print("expected ciphertext:",expected)
print("computed ciphertext:",computed)
exit(1)
if __name__ == "__main__":
main(0)
<file_sep>/specs/xmss.py
from lib.speclib import *
from specs.wots import *
from specs.sha2 import sha256
h : int = 2 # height -> number of signatures
n_keys : int = 2**h
key2_t = bytes_t(2*n)
key3_t = bytes_t(3*n)
wots_keys_t = array_t(sk_t, n_keys)
# Private key:
# 2^h WOTS+ private keys,
# idx (next WOTS+ sk),
# SK_PRF (n-bytes),
# root (n-bytes),
# public seed (n-bytes)
SK_t = tuple_t(wots_keys_t, nat_t, key_t, key_t, seed_t)
# Public key:
# algorithm oid (uint32_t),
# root node (n-bytes),
# seed (n-bytes)
PK_t = tuple_t(uint32_t, key_t, seed_t)
@typechecked
def get_seed(sk: SK_t) -> seed_t:
sks : wots_keys_t
idx : nat_t
prf_sk : key_t
root : key_t
public_seed : seed_t
sks, idx, prf_sk, root, public_seed = sk
return public_seed
# H: SHA2-256(toByte(1, 32) || KEY || M),
# H_msg: SHA2-256(toByte(2, 32) || KEY || M),
@typechecked
def H_msg(key: key_t, m: vlbytes_t) -> digest_t:
return hash(bytes.from_nat_be(nat(2), nat(n)), key, m)
@typechecked
def H(key: key_t, m: key2_t) -> digest_t:
return hash(bytes.from_nat_be(nat(1), nat(n)), key, m)
@typechecked
def get_wots_sk(k: SK_t, i: nat_t) -> sk_t:
sks : wots_keys_t
idx : nat_t
prf_sk : key_t
root : key_t
public_seed : key_t
sks, idx, prf_sk, root, public_seed = k
return sks[i]
# Algorithm 7: RAND_HASH
# Input: n-byte value LEFT, n-byte value RIGHT, seed SEED,
# address ADRS
# Output: n-byte randomized hash
#
# ADRS.setKeyAndMask(0);
# KEY = PRF(SEED, ADRS);
# ADRS.setKeyAndMask(1);
# BM_0 = PRF(SEED, ADRS);
# ADRS.setKeyAndMask(2);
# BM_1 = PRF(SEED, ADRS);
# return H(KEY, (LEFT XOR BM_0) || (RIGHT XOR BM_1));
@typechecked
def rand_hash(left: key_t, right: key_t, seed: seed_t, adr: address_t) -> digest_t:
adr : address_t = set_key_and_mask(adr, uint32(0))
key : digest_t = PRF(seed, adr)
adr : address_t = set_key_and_mask(adr, uint32(1))
bm_0 : digest_t = PRF(seed, adr)
adr : address_t = set_key_and_mask(adr, uint32(2))
bm_1 : digest_t = PRF(seed, adr)
left_bm_o : digest_t = array.create(n, uint8(0))
right_bm_1 : digest_t = array.create(n, uint8(0))
for i in range(0,n):
left_bm_o[i] = left[i] ^ bm_0[i]
right_bm_1[i] = right[i] ^ bm_1[i]
m = bytes.concat(left_bm_o, right_bm_1)
r : digest_t = H(key, m)
return r
# Algorithm 8: ltree
# Input: WOTS+ public key pk, address ADRS, seed SEED
# Output: n-byte compressed public key value pk[0]
#
# unsigned int len' = len;
# ADRS.setTreeHeight(0);
# while ( len' > 1 ) {
# for ( i = 0; i < floor(len' / 2); i++ ) {
# ADRS.setTreeIndex(i);
# pk[i] = RAND_HASH(pk[2i], pk[2i + 1], SEED, ADRS);
# }
# if ( len' % 2 == 1 ) {
# pk[floor(len' / 2)] = pk[len' - 1];
# }
# len' = ceil(len' / 2);
# ADRS.setTreeHeight(ADRS.getTreeHeight() + 1);
# }
# return pk[0];
@typechecked
def ltree(pk: pk_t, adr: address_t, seed: seed_t) -> key_t:
l : int = uintn.to_int(length)
l_half : int = l//2
adr : address_t = set_tree_height(adr, uint32(0))
pk_i : vlbytes_t
for _ in range(0,l):
for i in range(0,l_half):
adr = set_tree_index(adr, uint32(i))
pk[i] = rand_hash(pk[2*i], pk[2*i+1], seed, adr)
if l % 2 == 1:
pk[l_half] = pk[l-1]
l = speclib.ceil(l/2)
adr = set_tree_height(get_tree_height(adr)+1)
if l <= 1:
break
return pk[0]
# Algorithm 9: treeHash
# Input: XMSS private key SK, start index s, target node height t,
# address ADRS
# Output: n-byte root node - top node on Stack
#
# if( s % (1 << t) != 0 ) return -1;
# for ( i = 0; i < 2^t; i++ ) {
# SEED = getSEED(SK);
# ADRS.setType(0); // Type = OTS hash address
# ADRS.setOTSAddress(s + i);
# pk = WOTS_genPK (getWOTS_SK(SK, s + i), SEED, ADRS);
# ADRS.setType(1); // Type = L-tree address
# ADRS.setLTreeAddress(s + i);
# node = ltree(pk, SEED, ADRS);
# ADRS.setType(2); // Type = hash tree address
# ADRS.setTreeHeight(0);
# ADRS.setTreeIndex(i + s);
# while ( Top node on Stack has same height t' as node ) {
# ADRS.setTreeIndex((ADRS.getTreeIndex() - 1) / 2);
# node = RAND_HASH(Stack.pop(), node, SEED, ADRS);
# ADRS.setTreeHeight(ADRS.getTreeHeight() + 1);
# }
# Stack.push(node);
# }
# return Stack.pop();
@typechecked
def tree_hash(sk: SK_t, s: uint32_t, t: uint32_t, adr: address_t) -> key_t:
x = uint32.to_int(s) % (1 << uintn.to_int(t))
if x != 0:
# TODO: handle this in the caller
return key_t.create(n, uint8(0))
offset: int = 0
stack: array_t = array.create(2**uint32.to_int(t), array.create(n, uint8(0)))
for i in range(0, 2**uint32.to_int(t)):
seed: seed_t = get_seed(sk) # FIXME
adr: address_t = set_type(adr, uint32(0))
a: uint32_t = s + uint32(i)
adr = set_ots_address(adr, a)
pk: pk_t
pk, _ = key_gen_pk(adr, seed, get_wots_sk(sk, uint32.to_int(a)))
adr = set_type(adr, uint32(1))
adr = set_ltree_address(adr, a)
node: key_t = ltree(pk, seed, adr)
adr = set_type(adr, uint32(2))
adr = set_tree_height(adr, uint32(0))
adr = set_tree_index(adr, a)
if offset > 1:
for _ in range(0,t): # The stack has at most t-1 elements.
adr = set_tree_index(adr, uint32(get_tree_index(adr) - 1 // 2))
node = rand_hash(stack[offset-1], node, seed, adr)
adr = set_tree_height(adr, get_tree_height(adr) + 1)
stack[offset] = node
offset += 1
# Algorithm 10: XMSS_keyGen - Generate an XMSS key pair
# Input: No input
# Output: XMSS private key SK, XMSS public key PK
#
# // Example initialization for SK-specific contents
# idx = 0;
# for ( i = 0; i < 2^h; i++ ) {
# wots_sk[i] = WOTS_genSK();
# }
# initialize SK_PRF with a uniformly random n-byte string;
# setSK_PRF(SK, SK_PRF);
#
# // Initialization for common contents
# initialize SEED with a uniformly random n-byte string;
# setSEED(SK, SEED);
# setWOTS_SK(SK, wots_sk));
# ADRS = toByte(0, 32);
# root = treeHash(SK, 0, h, ADRS);
#
# SK = idx || wots_sk || SK_PRF || root || SEED;
# PK = OID || root || SEED;
# return (SK || PK);
@typechecked
def key_gen_xmss() -> tuple_t(SK_t, PK_t):
zero_key: sk_t = sk_t.create(uintn.to_int(length), key_t.create(n, uint8(0)))
wots_keys: wots_keys_t = wots_keys_t.create(n_keys, zero_key)
for i in range(0, n_keys):
wots_sk : sk_t = key_gen_sk()
wots_keys[i] = wots_sk
idx: nat_t = 0
SK_PRF: key_t = bytes.create_random_bytes(n)
seed: seed_t = bytes.create_random_bytes(n)
adr: address_t = array.create(8, uint32(0))
dummy_root: key_t = array.create(n, uint8(0))
xmss_sk_tmp: SK_t = (wots_keys, idx, SK_PRF, dummy_root, seed)
root : key_t = tree_hash(xmss_sk_tmp, uint32(0), uint32(h), adr)
xmss_sk: SK_t = (wots_keys, idx, SK_PRF, root, seed)
xmss_pk: PK_t = (uint32(0), root, seed)
return xmss_sk, xmss_pk
<file_sep>/docker/travis-compiler/Dockerfile
FROM ubuntu:18.04
MAINTAINER <NAME> <<EMAIL>>
# Pinned version of HACL* (F* and KreMLin are pinned as submodules)
ENV haclrepo https://github.com/franziskuskiefer/hacl-star.git
# Define versions of dependencies
ENV opamv 4.05.0
ENV haclversion aa1d94cf2b2fb852cefaadc1b0a5b07f8fe80360
# Create user.
RUN useradd -ms /bin/bash worker
WORKDIR /home/worker
# Install required packages and set versions
# Build F*, HACL*. Install a few more dependencies.
ENV OPAMYES true
ENV PATH "/home/worker/hacl-star/dependencies/z3/bin:/home/worker/hacl-star/dependencies/FStar/bin/:$PATH"
ADD setup.sh /tmp/setup.sh
ADD setup-root.sh /tmp/setup-root.sh
RUN bash /tmp/setup-root.sh
USER worker
RUN bash /tmp/setup.sh
ENV HACL_HOME "/home/worker/hacl-star/"
ENV FSTAR_HOME "/home/worker/hacl-star/dependencies/FStar"
<file_sep>/archive/spec-checker/haskell/examples/salsa20.py
def rotate(x, n):
x &= 0xffffffff
return ((x << n) | (x >> (32 - n))) & 0xffffffff
def step(s, i, j, k, r):
s[i] ^= rotate(s[j] + s[k],r)
def quarterround(s, i0, i1, i2, i3):
step(s, i1, i0, i3, 7)
step(s, i2, i1, i0, 9)
step(s, i3, i2, i1, 13)
step(s, i0, i3, i2, 18)
def rowround(s):
quarterround(s, 0, 1, 2, 3)
quarterround(s, 5, 6, 7, 4)
quarterround(s, 10, 11, 8, 9)
quarterround(s, 15, 12, 13, 14)
def columnround(s):
quarterround(s, 0, 4, 8, 12)
quarterround(s, 5, 9, 13, 1)
quarterround(s, 10, 14, 2, 6)
quarterround(s, 15, 3, 7, 11)
def doubleround(s):
columnround(s)
rowround(s)
def hsalsa20(n,k):
n=''.join([chr(n[i]) for i in range(16)])
n = struct.unpack('<4I', n)
k=''.join([chr(k[i]) for i in range(32)])
k = struct.unpack('<8I', k)
s = [0] * 16
s[::5] = struct.unpack('<4I', 'expand 32-byte k')
s[1:5] = k[:4]
s[6:10] = n
s[11:15] = k[4:]
for i in range(10): doubleround(s)
s = [s[i] for i in [0,5,10,15,6,7,8,9]]
return struct.pack('<8I',*s)<file_sep>/doc/poly-slides/content/_index.md
---
title: "poly1305"
description: "how to write and use hacspec"
outputs: ["Reveal"]
---
[RFC 7539](https://tools.ietf.org/html/rfc7539) Section 2.5.1: Poly1305
```
clamp(r): r &= 0x0ffffffc0ffffffc0ffffffc0fffffff
poly1305_mac(msg, key):
r = (le_bytes_to_num(key[0..15])
clamp(r)
s = le_num(key[16..31])
accumulator = 0
p = (1<<130)-5
for i=1 upto ceil(msg length in bytes / 16)
n = le_bytes_to_num(msg[((i-1)*16)..(i*16)] | [0x01])
a += n
a = (r * a) % p
end
a += s
return num_to_16_le_bytes(a)
end
```
---
## Translating RFC specification to hacpsec
The algorithm can be directly translated into hacspec.
---
### setup
* A 256-bit one-time key
* The output is a 128-bit tag.
* set the constant prime "P" be `2^130-5: 3fffffffffffffffffffffffffffffffb`.
* divide the message into 16-byte blocks
This is translated into the following hacspec definition.
---
### setup hacspec & F\*
```python
blocksize:int = 16
block_t = bytes_t(16)
key_t = bytes_t(32)
tag_t = bytes_t(16)
subblock_t = refine_t(vlbytes_t, lambda x: bytes.length(x) <= 16)
p130m5 : nat_t = (2 ** 130) - 5
felem_t = natmod_t(p130m5)
def felem(n:nat_t) -> felem_t:
return natmod(n,p130m5)
```
```ocaml
let blocksize: int = 16
let block_t: Type0 = array_t uint8_t 16
let key_t: Type0 = array_t uint8_t 32
let tag_t: Type0 = array_t uint8_t 16
let subblock_t: Type0 = (x: vlbytes_t{(bytes_length x) <=. 16})
let p130m5: nat_t = (2 **. 130) -. 5
let felem_t: Type0 = natmod_t p130m5
let felem (n: nat_t) : felem_t = natmod n p130m5
```
---
## bytes to int rfc
```
le_bytes_to_num(msg[((i-1)*16)..(i*16)] | [0x01])
```
---
### bytes to int hacspec & F\*
```python
def encode(block: subblock_t) -> felem_t:
b : block_t = array.create(16, uint8(0))
b[0:bytes.length(block)] = block
welem : felem_t = felem(bytes.to_nat_le(b))
lelem : felem_t = felem(2 ** (8 * array.length(block)))
return lelem + welem
```
```ocaml
let encode (block: subblock_t) : felem_t =
let b = array_create 16 (uint8 0) in
let b = array_update_slice b 0 (bytes_length block) (block) in
let welem = felem (bytes_to_nat_le b) in
let lelem = felem (2 **. (8 *. (array_length block))) in
lelem +. welem
```
---
## clamp rfc
```
clamp(r): r &= 0x0ffffffc0ffffffc0ffffffc0fffffff
```
---
### clamp hacspec & F\*
```python
def encode_r(r: block_t) -> felem_t:
ruint : uint128_t = bytes.to_uint128_le(r)
ruint = ruint & uint128(0x0ffffffc0ffffffc0ffffffc0fffffff)
r_nat : nat_t = uintn.to_nat(ruint)
return felem(r_nat)
```
```ocaml
let encode_r (r: block_t) : felem_t =
let ruint = bytes_to_uint128_le r in
let ruint = ruint &. (uint128 21267647620597763993911028882763415551) in
let r_nat = uintn_to_nat ruint in
felem r_nat
```
---
## poly rfc
```
a = 0
for i=1 upto ceil(msg length in bytes / 16)
n = le_bytes_to_num(msg[((i-1)*16)..(i*16)] | [0x01])
a += n
a = (r * a) % p
end
```
---
### poly hacspec & F\*
```python
def poly(text: vlbytes_t, r: felem_t) -> felem_t:
blocks : vlarray_t(block_t)
last : subblock_t
blocks, last = array.split_blocks(text, blocksize)
acc : felem_t = felem(0)
for i in range(array.length(blocks)):
acc = (acc + encode(blocks[i])) * r
if (array.length(last) > 0):
acc = (acc + encode(last)) * r
return acc
```
```ocaml
let poly (text: vlbytes_t) (r: felem_t) : felem_t =
let blocks, last = array_split_blocks text blocksize in
let acc = felem 0 in
let acc = repeati (array_length blocks)
(fun i acc -> (acc +. (encode blocks.[ i ])) *. r) acc in
let acc = if (array_length last) >. 0 then
(acc +. (encode last)) *. r else acc in
acc
```
---
## poly mac rfc
```
poly1305_mac(msg, key):
r = (le_bytes_to_num(key[0..15])
clamp(r)
s = le_num(key[16..31])
p = (1<<130)-5
a = poly(msg, r)
a += s
return num_to_16_le_bytes(a)
end
```
---
## poly mac hacspec & F\*
```python
def poly1305_mac(text: vlbytes_t, k: key_t) -> tag_t:
r : block_t = k[0:blocksize]
s : block_t = k[blocksize:2*blocksize]
relem : felem_t = encode_r(r) # clamp
selem : uint128_t = bytes.to_uint128_le(s)
a : felem_t = poly(text, relem)
n : uint128_t = uint128(natmod.to_nat(a)) + selem
return bytes.from_uint128_le(n)
```
```ocaml
let poly1305_mac (text: vlbytes_t) (k: key_t) : tag_t =
let r = array_slice k 0 blocksize in
let s = array_slice k blocksize (2 *. blocksize) in
let relem = encode_r r in
let selem = bytes_to_uint128_le s in
let a = poly text relem in
let n = (uint128 ((natmod_to_nat a) % (2 **. 128))) +. selem in
bytes_from_uint128_le n
```
---
## Test and check hacspec
```
λ make poly1305-test
PYTHONPATH=. python tests/poly1305_test.py
Poly1305 Test 0 passed.
Poly1305 Test 1 passed.
...
```
```
λ make poly1305-check
PYTHONPATH=. python lib/check.py specs/poly1305.py
specs/poly1305.py is a valid hacspec.
```
---
## Compile to F\*
Assume `HACL_HOME` and `FSTAR_HOME` are set.
---
## Compile to F\*
```
make -C fstar-compiler/specs/ poly1305.fst
```
```
../../to_fstar.native ../../../specs/poly1305.py > poly1305_pre.fst
/home/franziskus/Code/hacl-star/dependencies/FStar//bin/fstar.exe --include /home/franziskus/Code/hacl-star//lib --include /home/franziskus/Code/hacl-star//lib/fst --expose_interfaces --indent poly1305_pre.fst > poly1305.fst
rm poly1305_pre.fst
```
---
## Run tests in ocaml
```
make -C fstar-compiler/specs/ poly1305.exe
```
```
mkdir -p poly1305-ml
/home/franziskus/Code/hacl-star/dependencies/FStar//bin/fstar.exe --include /home/franziskus/Code/hacl-star//lib --include /home/franziskus/Code/hacl-star//lib/fst --expose_interfaces --lax --codegen OCaml --extract_module Speclib --extract_module Lib.IntTypes --extract_module Lib.RawIntTypes --extract_module Lib.Sequence --extract_module Lib.ByteSequence --extract_module poly1305 --odir poly1305-ml /home/franziskus/Code/hacl-star//lib/fst/Lib.IntTypes.fst /home/franziskus/Code/hacl-star//lib/fst/Lib.RawIntTypes.fst /home/franziskus/Code/hacl-star//lib/fst/Lib.Sequence.fst /home/franziskus/Code/hacl-star//lib/fst/Lib.ByteSequence.fst speclib.fst /home/franziskus/Code/hacl-star//lib/fst/Lib.IntTypes.fst /home/franziskus/Code/hacl-star//lib/fst/Lib.RawIntTypes.fst /home/franziskus/Code/hacl-star//lib/fst/Lib.Sequence.fst /home/franziskus/Code/hacl-star//lib/fst/Lib.ByteSequence.fst speclib.fst poly1305.fst
Extracted module IntTypes
Extracted module RawIntTypes
Extracted module Sequence
Extracted module ByteSequence
Extracted module Speclib
Extracted module Poly1305
All verification conditions discharged successfully
touch poly1305-ml
cp tests/testutil.ml poly1305-ml/
cp tests/poly1305_test.ml poly1305-ml/
OCAMLPATH="/home/franziskus/Code/hacl-star/dependencies/FStar//bin" ocamlfind opt -package fstarlib -linkpkg -g
-w -8 -w -20 -g -I poly1305-ml poly1305-ml/Lib_IntTypes.ml poly1305-ml/Lib_RawIntTypes.ml poly1305-ml/Lib_Sequence.ml poly1305-ml/Lib_ByteSequence.ml poly1305-ml/Speclib.ml poly1305-ml/Poly1305.ml poly1305-ml/testutil.ml poly1305-ml/poly1305_test.ml -o poly1305.exe
File "poly1305-ml/Lib_Sequence.ml", line 356, characters 16-19:
Warning 26: unused variable len.
./poly1305.exe
Poly1305 Test 0 passed.
Poly1305 Test 1 passed.
```
---
## Typecheck
```
make -C fstar-compiler/specs/ poly1305.fst.checked
```
```
make: Entering directory '/mnt/c/Users/Franziskus/Code/hacspec/compiler/fstar-compiler/specs'
/home/franziskus/Code/hacl-star/dependencies/FStar//bin/fstar.exe --include /home/franziskus/Code/hacl-star//lib --include /home/franziskus/Code/hacl-star//lib/fst --expose_interfaces poly1305.fst
Verified module: Poly1305 (10932 milliseconds)
All verification conditions discharged successfully
make: Leaving directory '/mnt/c/Users/Franziskus/Code/hacspec/compiler/fstar-compiler/specs'
```
<file_sep>/specs/sha2.py
from lib.speclib import *
# Four variants of SHA-2
variant_t = refine_t(nat_t, lambda x: x == 224 or x == 256 or x == 384 or x == 512)
i_range_t = range_t(0, 4)
op_range_t = range_t(0, 1)
# Generic SHA-2 spec parameterized by variant_t
@typechecked
def sha2(v:variant_t) -> FunctionType:
# Initializing types and constants for different variants
if v == 224 or v == 256:
blockSize : int = 64
block_t = bytes_t(blockSize)
lenSize : int = 8
len_t = uint64_t
to_len : FunctionType = uint64
len_to_bytes : FunctionType = bytes.from_uint64_be
word_t = uint32_t
to_word : FunctionType = uint32
bytes_to_words : FunctionType = bytes.to_uint32s_be
words_to_bytes : FunctionType = bytes.from_uint32s_be
kSize : int = 64
k_t = array_t(word_t,kSize)
opTableType_t = array_t(int,12)
opTable : opTableType_t = opTableType_t([
2, 13, 22,
6, 11, 25,
7, 18, 3,
17, 19, 10])
kTable : k_t = k_t([
uint32(0x428a2f98), uint32(0x71374491), uint32(0xb5c0fbcf), uint32(0xe9b5dba5),
uint32(0x3956c25b), uint32(0x59f111f1), uint32(0x923f82a4), uint32(0xab1c5ed5),
uint32(0xd807aa98), uint32(0x12835b01), uint32(0x243185be), uint32(0x550c7dc3),
uint32(0x72be5d74), uint32(0x80deb1fe), uint32(0x9bdc06a7), uint32(0xc19bf174),
uint32(0xe49b69c1), uint32(0xefbe4786), uint32(0x0fc19dc6), uint32(0x240ca1cc),
uint32(0x2de92c6f), uint32(0x4a7484aa), uint32(0x5cb0a9dc), uint32(0x76f988da),
uint32(0x983e5152), uint32(0xa831c66d), uint32(0xb00327c8), uint32(0xbf597fc7),
uint32(0xc6e00bf3), uint32(0xd5a79147), uint32(0x06ca6351), uint32(0x14292967),
uint32(0x27b70a85), uint32(0x2e1b2138), uint32(0x4d2c6dfc), uint32(0x53380d13),
uint32(0x650a7354), uint32(0x766a0abb), uint32(0x81c2c92e), uint32(0x92722c85),
uint32(0xa2bfe8a1), uint32(0xa81a664b), uint32(0xc24b8b70), uint32(0xc76c51a3),
uint32(0xd192e819), uint32(0xd6990624), uint32(0xf40e3585), uint32(0x106aa070),
uint32(0x19a4c116), uint32(0x1e376c08), uint32(0x2748774c), uint32(0x34b0bcb5),
uint32(0x391c0cb3), uint32(0x4ed8aa4a), uint32(0x5b9cca4f), uint32(0x682e6ff3),
uint32(0x748f82ee), uint32(0x78a5636f), uint32(0x84c87814), uint32(0x8cc70208),
uint32(0x90befffa), uint32(0xa4506ceb), uint32(0xbef9a3f7), uint32(0xc67178f2)])
else:
blockSize = 128
block_t = bytes_t(blockSize)
lenSize = 16
len_t = uint128_t
to_len = uint128
len_to_bytes = bytes.from_uint128_be
word_t = uint64_t
to_word = uint64
bytes_to_words : FunctionType = bytes.to_uint64s_be
words_to_bytes : FunctionType = bytes.from_uint64s_be
kSize : int = 80
k_t = array_t(word_t,kSize)
opTableType_t = array_t(int,12)
opTable = opTableType_t([
28, 34, 39,
14, 18, 41,
1, 8, 7,
19, 61, 6])
kTable = k_t([
uint64(0x428a2f98d728ae22), uint64(0x7137449123ef65cd), uint64(0xb5c0fbcfec4d3b2f), uint64(0xe9b5dba58189dbbc),
uint64(0x3956c25bf348b538), uint64(0x59f111f1b605d019), uint64(0x923f82a4af194f9b), uint64(0xab1c5ed5da6d8118),
uint64(0xd807aa98a3030242), uint64(0x12835b0145706fbe), uint64(0x243185be4ee4b28c), uint64(0x550c7dc3d5ffb4e2),
uint64(0x72be5d74f27b896f), uint64(0x80deb1fe3b1696b1), uint64(0x9bdc06a725c71235), uint64(0xc19bf174cf692694),
uint64(0xe49b69c19ef14ad2), uint64(0xefbe4786384f25e3), uint64(0x0fc19dc68b8cd5b5), uint64(0x240ca1cc77ac9c65),
uint64(0x2de92c6f592b0275), uint64(0x4a7484aa6ea6e483), uint64(0x5cb0a9dcbd41fbd4), uint64(0x76f988da831153b5),
uint64(0x983e5152ee66dfab), uint64(0xa831c66d2db43210), uint64(0xb00327c898fb213f), uint64(0xbf597fc7beef0ee4),
uint64(0xc6e00bf33da88fc2), uint64(0xd5a79147930aa725), uint64(0x06ca6351e003826f), uint64(0x142929670a0e6e70),
uint64(0x27b70a8546d22ffc), uint64(0x2e1b21385c26c926), uint64(0x4d2c6dfc5ac42aed), uint64(0x53380d139d95b3df),
uint64(0x650a73548baf63de), uint64(0x766a0abb3c77b2a8), uint64(0x81c2c92e47edaee6), uint64(0x92722c851482353b),
uint64(0xa2bfe8a14cf10364), uint64(0xa81a664bbc423001), uint64(0xc24b8b70d0f89791), uint64(0xc76c51a30654be30),
uint64(0xd192e819d6ef5218), uint64(0xd69906245565a910), uint64(0xf40e35855771202a), uint64(0x106aa07032bbd1b8),
uint64(0x19a4c116b8d2d0c8), uint64(0x1e376c085141ab53), uint64(0x2748774cdf8eeb99), uint64(0x34b0bcb5e19b48a8),
uint64(0x391c0cb3c5c95a63), uint64(0x4ed8aa4ae3418acb), uint64(0x5b9cca4f7763e373), uint64(0x682e6ff3d6b2b8a3),
uint64(0x748f82ee5defb2fc), uint64(0x78a5636f43172f60), uint64(0x84c87814a1f0ab72), uint64(0x8cc702081a6439ec),
uint64(0x90befffa23631e28), uint64(0xa4506cebde82bde9), uint64(0xbef9a3f7b2c67915), uint64(0xc67178f2e372532b),
uint64(0xca273eceea26619c), uint64(0xd186b8c721c0c207), uint64(0xeada7dd6cde0eb1e), uint64(0xf57d4f7fee6ed178),
uint64(0x06f067aa72176fba), uint64(0x0a637dc5a2c898a6), uint64(0x113f9804bef90dae), uint64(0x1b710b35131c471b),
uint64(0x28db77f523047d84), uint64(0x32caab7b40c72493), uint64(0x3c9ebe0a15c9bebc), uint64(0x431d67c49c100d4c),
uint64(0x4cc5d4becb3e42b6), uint64(0x597f299cfc657e2a), uint64(0x5fcb6fab3ad6faec), uint64(0x6c44198c4a475817)])
hashSize : int = v // 8
hash_t = array_t(word_t,8)
digest_t = bytes_t(hashSize)
h0_t = bytes_t(8)
h0: h0_t = array.create(8,to_word(0))
if v == 224:
h0 = hash_t([
uint32(0xc1059ed8), uint32(0x367cd507), uint32(0x3070dd17), uint32(0xf70e5939),
uint32(0xffc00b31), uint32(0x68581511), uint32(0x64f98fa7), uint32(0xbefa4fa4)])
elif v == 256:
h0 = hash_t([
uint32(0x6a09e667), uint32(0xbb67ae85), uint32(0x3c6ef372), uint32(0xa54ff53a),
uint32(0x510e527f), uint32(0x9b05688c), uint32(0x1f83d9ab), uint32(0x5be0cd19)])
elif v == 384:
h0 = hash_t([
uint64(0xcbbb9d5dc1059ed8), uint64(0x629a292a367cd507), uint64(0x9159015a3070dd17), uint64(0x152fecd8f70e5939),
uint64(0x67332667ffc00b31), uint64(0x8eb44a8768581511), uint64(0xdb0c2e0d64f98fa7), uint64(0x47b5481dbefa4fa4)])
else:
h0 = hash_t([
uint64(0x6a09e667f3bcc908), uint64(0xbb67ae8584caa73b), uint64(0x3c6ef372fe94f82b), uint64(0xa54ff53a5f1d36f1),
uint64(0x510e527fade682d1), uint64(0x9b05688c2b3e6c1f), uint64(0x1f83d9abfb41bd6b), uint64(0x5be0cd19137e2179)])
# Initialization complete: SHA-2 spec begins
@typechecked
def ch(x:word_t,y:word_t,z:word_t) -> word_t:
return (x & y) ^ ((~ x) & z)
@typechecked
def maj(x:word_t,y:word_t,z:word_t) -> word_t:
return (x & y) ^ ((x & z) ^ (y & z))
@typechecked
def sigma(x:word_t,i:i_range_t,op:op_range_t) -> word_t:
tmp : uintn_t
if op == 0:
tmp = x >> opTable[3*i+2]
else:
tmp = uintn.rotate_right(x,opTable[3*i+2])
return (uintn.rotate_right(x,opTable[3*i]) ^
uintn.rotate_right(x,opTable[3*i+1]) ^
tmp)
@typechecked
def schedule(block:block_t) -> k_t:
b : bytes_t = bytes_to_words(block)
s : vlbytes_t = array.create(kSize,to_word(0))
for i in range(kSize):
if i < 16:
s[i] = b[i]
else:
t16 : word_t = s[i-16]
t15 : word_t = s[i-15]
t7 : word_t = s[i-7]
t2 : word_t = s[i-2]
s1 : word_t = sigma(t2,3,0)
s0 : word_t = sigma(t15,2,0)
s[i] = s1 + t7 + s0 + t16
return s
@typechecked
def shuffle(ws:k_t,hashi:hash_t) -> hash_t:
h = array.copy(hashi)
for i in range(kSize):
a0 : word_t = h[0]
b0 : word_t = h[1]
c0 : word_t = h[2]
d0 : word_t = h[3]
e0 : word_t = h[4]
f0 : word_t = h[5]
g0 : word_t = h[6]
h0 : word_t = h[7]
t1 : word_t = h0 + sigma(e0,1,1) + ch(e0,f0,g0) + kTable[i] + ws[i]
t2 : word_t = sigma(a0,0,1) + maj(a0,b0,c0)
h[0] = t1 + t2
h[1] = a0
h[2] = b0
h[3] = c0
h[4] = d0 + t1
h[5] = e0
h[6] = f0
h[7] = g0
return h
@typechecked
def compress(block:block_t,hIn:hash_t) -> hash_t:
s : k_t = schedule(block)
h : hash_t = shuffle(s,hIn)
for i in range(8):
h[i] += hIn[i]
return h
@typechecked
def truncate(b:bytes_t(v)) -> digest_t:
result: vlbytes_t = array.create(hashSize, uint8(0))
for i in range(hashSize):
result[i] = b[i]
return digest_t((result))
@typechecked
def hash(msg:vlbytes_t) -> digest_t:
blocks : array(block_t)
last : block_t
blocks,last = array.split_blocks(msg, blockSize)
nblocks : int = array.length(blocks)
h:hash_t = h0
for i in range(nblocks):
h = compress(blocks[i],h)
last_len : int = array.length(last)
len_bits : int = array.length(msg) * 8
pad: vlbytes_t = array.create(2*blockSize,uint8(0))
pad[0:last_len] = last
pad[last_len] = uint8(0x80)
if last_len < blockSize - lenSize:
pad[blockSize-lenSize:blockSize] = len_to_bytes(to_len(len_bits))
h = compress(pad[0:blockSize],h)
else:
pad[(2*blockSize)-lenSize:2*blockSize] = len_to_bytes(to_len(len_bits))
h = compress(pad[0:blockSize],h)
h = compress(pad[blockSize:2*blockSize],h)
result : bytes_t = words_to_bytes(h)
return truncate(result)
return hash
# Specific instances of SHA-2
sha224 : FunctionType = sha2(224)
sha256 : FunctionType = sha2(256)
sha384 : FunctionType = sha2(384)
sha512 : FunctionType = sha2(512)
<file_sep>/specs/ed25519.py
#!/usr/bin/python3
from lib.speclib import *
from specs.curve25519 import felem_t, to_felem, finv, serialized_scalar_t, serialized_point_t, scalar_t, p25519,to_scalar, zero, one
from specs.sha2 import sha512
# Define prime field
d25519: felem_t = (to_felem(
37095705934669439343138083508754565189542113879843219016388785533085940283555))
# TODO: << isn't supported by the compiler yet.
q25519 : nat_t = nat((1 << 252) + 27742317777372353535851937790883648493)
qelem_t = natmod_t(q25519)
@typechecked
def to_qelem(n:nat_t) -> qelem_t:
return natmod(n,q25519)
extended_point_t = tuple_t(felem_t, felem_t, felem_t, felem_t)
@typechecked
def extended_point(a: felem_t, b: felem_t, c: felem_t, d: felem_t) -> extended_point_t:
return (a,b,c,d)
@typechecked
def sha512_modq(s: vlbytes_t) -> qelem_t:
h : digest_t = sha512(s)
return to_qelem(bytes.to_nat_le(h))
@typechecked
def point_add(p: extended_point_t, q: extended_point_t) -> extended_point_t:
if p == extended_point(zero, one, one, zero):
return q
if q == extended_point(zero, one, one, zero):
return p
x1 : felem_t
x2 : felem_t
y1 : felem_t
y2 : felem_t
z1 : felem_t
z2 : felem_t
t1 : felem_t
t2 : felem_t
(x1, y1, z1, t1) = p
(x2, y2, z2, t2) = q
a : felem_t = (y1 - x1) * (y2 - x2)
b : felem_t = (y1 + x1) * (y2 + x2)
c : felem_t = to_felem(2) * d25519 * t1 * t2
d : felem_t = to_felem(2) * z1 * z2
e : felem_t = b - a
f : felem_t = d - c
g : felem_t = d + c
h : felem_t = b + a
x3 : felem_t = e * f
y3 : felem_t = g * h
t3 : felem_t = e * h
z3 : felem_t = f * g
return extended_point(x3, y3, z3, t3)
@typechecked
def point_double(p: extended_point_t) -> extended_point_t:
if p == extended_point(zero, one, one, zero):
return p
x1 : felem_t
y1 : felem_t
z1 : felem_t
t1 : felem_t
(x1, y1, z1, t1) = p
a : felem_t = x1 ** 2
b : felem_t = y1 ** 2
c : felem_t = to_felem(2) * (z1 ** 2)
h : felem_t = a + b
e : felem_t = h - ((x1 + y1) ** 2)
g : felem_t = a - b
f : felem_t = c + g
x3 : felem_t = e * f
y3 : felem_t = g * h
t3 : felem_t = e * h
z3 : felem_t = f * g
return extended_point(x3, y3, z3, t3)
@typechecked
def montgomery_ladder(k: scalar_t, init: extended_point_t) -> extended_point_t:
p0: extended_point_t = extended_point(zero, one, one, zero)
p1: extended_point_t = init
for i in range(256):
if k[255-i] == bit(1):
(p0, p1) = (p1, p0)
xx : extended_point_t = point_double(p0)
xp1 : extended_point_t = point_add(p0, p1)
if k[255-i] == bit(1):
(p0, p1) = (xp1, xx)
else:
(p0, p1) = (xx, xp1)
return p0
@typechecked
def point_mul(s: serialized_scalar_t, p: extended_point_t) -> extended_point_t:
s_ : scalar_t = to_scalar(bytes.to_nat_le(s))
Q : extended_point_t = extended_point(zero, one, one, zero)
Q1 : extended_point_t = montgomery_ladder(s_, p)
return Q1
@typechecked
def point_compress(p: extended_point_t) -> serialized_point_t:
px : felem_t
py : felem_t
pz : felem_t
pt : felem_t
(px, py, pz, pt) = p
zinv : felem_t = finv(pz)
x : felem_t = px * zinv
y : felem_t = py * zinv
r : int = (2**255 * (natmod.to_int(x) % 2)) + natmod.to_int(y)
# TODO: default arguments aren't supported by the compiler yet.
return bytes.from_nat_le(r)
fsqrt_m1: felem_t = (to_felem(pow(2, ((p25519 - 1) // 4), p25519)))
@typechecked
def recover_x_coordinate(y:nat_t,sign:bool) -> result_t(felem_t):
if y >= p25519:
return result.error("y >= p25519")
else:
y : felem_t = to_felem(y)
p1 : felem_t = d25519 * (y ** 2)
p1_1 : felem_t = p1 + one
x2 : felem_t = ((y ** 2) - one) * finv(p1_1)
if x2 == zero and sign:
return result.error("x == -0")
elif x2 == zero and not sign:
return result.retval(to_felem(0))
else:
x : felem_t = x2 ** ((p25519 + 3)//8)
if (x ** 2) - x2 != zero:
x = x * fsqrt_m1
if ((x ** 2) - x2) != zero:
return result.error("x**2 - x2 is not 0")
else:
if (natmod.to_int(x) % 2 == 1) != sign:
return result.retval(to_felem(p25519 - natmod.to_int(x)))
else:
return result.retval(x)
@typechecked
def point_decompress(s:serialized_point_t) -> result_t(extended_point_t) :
y : nat_t = bytes.to_nat_le(s)
sign : bool = (y // (1 << 255)) % 2 == 1
y : nat_t = y % (1 << 255)
x : result_t(felem_t) = recover_x_coordinate(y, sign)
if result.is_valid(x):
x : felem_t = result.get_value(x)
y_felemt : felem_t = to_felem(y)
r : extended_point_t = extended_point(x, y_felemt, one, x * y_felemt)
return result.retval(r)
return result.error("Couldn't recover X coordinate.")
@typechecked
def expand_secret(s: serialized_scalar_t) -> tuple_t(serialized_scalar_t, serialized_scalar_t):
h : digest_t = sha512(s)
h_low : bytes_t = h[0:32]
h_high : bytes_t = h[32:64]
h_low[0] &= uint8(0xf8)
h_low[31] &= uint8(127)
h_low[31] |= uint8(64)
return serialized_scalar_t(h_low), serialized_scalar_t(h_high)
_g_x: felem_t = (to_felem(
15112221349535400772501151409588531511454012693041857206046113283949847762202))
_g_y: felem_t = (to_felem(
46316835694926478169428394003475163141307993866256225615783033603165251855960))
g_ed25519: extended_point_t = extended_point(_g_x, _g_y, one, _g_x * _g_y)
sigval_t = bytes_t(64)
@typechecked
def private_to_public(s: serialized_scalar_t) -> serialized_point_t:
a : serialized_scalar_t
(a, _) = expand_secret(s)
return point_compress(point_mul(s, g_ed25519))
@typechecked
def sign(priv: serialized_scalar_t, msg: vlbytes_t) -> sigval_t:
a : serialized_scalar_t
prefix : serialized_scalar_t
a, prefix = expand_secret(priv)
ap : serialized_point_t = point_compress(point_mul(a, g_ed25519))
tmp = bytes(array.create(array.length(msg)+64, uint8(0)))
tmp[32:64] = prefix
tmp[64:array.length(msg)+64] = msg
pmsg : bytes_t = tmp[32:array.length(msg)+64]
r : qelem_t = sha512_modq(pmsg)
rp : serialized_point_t = point_compress(point_mul(bytes.from_nat_le(natmod.to_int(r)), g_ed25519))
tmp[0:32] = rp
tmp[32:64] = ap
h : qelem_t = sha512_modq(tmp)
s : qelem_t = r + (h * to_qelem(bytes.to_nat_le(a)))
tmp[32:64] = bytes.from_nat_le(natmod.to_int(s))
return tmp[0:64]
@typechecked
def point_equal(p: extended_point_t, q: extended_point_t) -> bool:
px : felem_t
py : felem_t
pz : felem_t
pt : felem_t
qx : felem_t
qy : felem_t
qz : felem_t
qt : felem_t
(px, py, pz, pt) = p
(qx, qy, qz, qt) = q
return (px * qz == qx * pz) and (py * qz == qy * pz)
@typechecked
def verify(pub: serialized_point_t, msg: vlbytes_t, sigval: sigval_t) -> bool:
ap : result_t(felem_t) = point_decompress(pub)
if not result.is_valid(ap):
return False
ap : felem_t = result.get_value(ap)
rs : bytes_t = sigval[0:32]
rp : result_t(felem_t) = point_decompress(rs)
if not result.is_valid(rp):
return False
rp : felem_t = result.get_value(rp)
s : nat_t = bytes.to_nat_le(sigval[32:64])
if s >= q25519:
return False
else:
tmp = bytes(array.create(array.length(msg)+64, uint8(0)))
tmp[0:32] = rs
tmp[32:64] = pub
tmp[64:array.length(msg)+64] = msg
h : qelem_t = sha512_modq(tmp)
sB : extended_point_t = point_mul(bytes.from_nat_le(s), g_ed25519)
hA : extended_point_t = point_mul(bytes.from_nat_le(natmod.to_int(h)), ap)
return point_equal(sB, point_add(rp, hA))
<file_sep>/tests/frodo_test.py
from lib.speclib import *
from specs.frodo import Frodo
import json
from tests.testlib import print_dot, exit
def main ():
file = open('tests/test_vectors/frodo_test_vectors.json')
frodo_test_vectors = json.load(file)
print_dot()
for i in range(len(frodo_test_vectors)):
frodo_kem = frodo_test_vectors[i]['frodo_kem']
gen_a = frodo_test_vectors[i]['gen_a']
keypaircoins = bytes.from_hex(frodo_test_vectors[i]['keypaircoins'])
enccoins = bytes.from_hex(frodo_test_vectors[i]['enccoins'])
pk_expected = bytes.from_hex(frodo_test_vectors[i]['pk_expected'])
ct_expected = bytes.from_hex(frodo_test_vectors[i]['ct_expected'])
ss_expected = bytes.from_hex(frodo_test_vectors[i]['ss_expected'])
(crypto_kem_keypair, crypto_kem_enc, crypto_kem_dec) = Frodo(frodo_kem, gen_a)
pk, sk = crypto_kem_keypair(keypaircoins)
ct, ss1 = crypto_kem_enc(enccoins, pk)
ss2 = crypto_kem_dec(ct, sk)
if (ss1 == ss2 and ss1 == ss_expected and pk == pk_expected and ct == ct_expected):
print("Frodo Test "+str(i)+" successful!")
else:
print("Frodo Test failed!")
if (ss1 != ss_expected or ss1 != ss2):
print("Computed shared secret 1: " + str(ss1))
print("Computed shared secret 2: " + str(ss2))
print("Expected shared secret: " + str(ss_expected))
if (pk != pk_expected):
print("Computed public key: " + str(pk))
print("Expected public key: " + str(pk_expected))
if (ct != ct_expected):
print("Computed cipher text: " + str(ct))
print("Expected cipher text: " + str(ct_expected))
exit(1)
exit(0)
main()
<file_sep>/specs/chacha20.py
#!/usr/bin/python3
from lib.speclib import *
blocksize:int = 64
index_t = range_t(0,16)
rotval_t = range_t(1,32)
state_t = array_t(uint32_t,16)
key_t = bytes_t(32)
nonce_t = bytes_t(12)
block_t = bytes_t(64)
subblock_t = refine_t(vlbytes_t, lambda x: array.length(x) <= blocksize)
constants_t = array_t(uint32_t,4)
@typechecked
def line(a: index_t, b: index_t, d: index_t, s: rotval_t, m: state_t) -> state_t:
m = array.copy(m)
m[a] = m[a] + m[b]
m[d] = m[d] ^ m[a]
m[d] = uintn.rotate_left(m[d],s)
return m
@typechecked
def quarter_round(a: index_t, b: index_t, c:index_t, d: index_t, m: state_t) -> state_t :
m: state_t = line(a, b, d, 16, m)
m = line(c, d, b, 12, m)
m = line(a, b, d, 8, m)
m = line(c, d, b, 7, m)
return m
@typechecked
def double_round(m: state_t) -> state_t :
m: state_t = quarter_round(0, 4, 8, 12, m)
m = quarter_round(1, 5, 9, 13, m)
m = quarter_round(2, 6, 10, 14, m)
m = quarter_round(3, 7, 11, 15, m)
m = quarter_round(0, 5, 10, 15, m)
m = quarter_round(1, 6, 11, 12, m)
m = quarter_round(2, 7, 8, 13, m)
m = quarter_round(3, 4, 9, 14, m)
return m
constants : constants_t = array(
[uint32(0x61707865), uint32(0x3320646e),
uint32(0x79622d32), uint32(0x6b206574)])
@typechecked
def chacha20_init(k: key_t, counter: uint32_t, nonce: nonce_t) -> state_t:
st : state_t
st : state_t = array.create(16,uint32(0))
st[0:4] = constants
st[4:12] = bytes.to_uint32s_le(k)
st[12] = counter
st[13:16] = bytes.to_uint32s_le(nonce)
return st
@typechecked
def chacha20_core(st:state_t) -> state_t:
# working_state : state_t
working_state : state_t = array.copy(st)
for x in range(10):
working_state = double_round(working_state)
for i in range(16):
working_state[i] += st[i]
return working_state
@typechecked
def chacha20(k: key_t, counter: uint32_t, nonce: nonce_t) -> state_t:
return chacha20_core(chacha20_init(k,counter,nonce))
@typechecked
def chacha20_block(k: key_t, counter:uint32_t, nonce: nonce_t) -> block_t:
st : state_t
block : block_t
st = chacha20(k,counter,nonce)
block = bytes.from_uint32s_le(st)
return block
# Many ways of extending this to CTR
# This version: use first-order CTR function specific to Chacha20 with a loop
@typechecked
def xor_block(block:block_t, keyblock:block_t) -> block_t:
out : block_t = bytes.copy(block)
for i in range(blocksize):
out[i] ^= keyblock[i]
return out
@typechecked
def chacha20_counter_mode(key: key_t, counter: uint32_t, nonce: nonce_t, msg:vlbytes_t) -> vlbytes_t:
blocks : vlarray_t(block_t)
last : subblock_t
blocks, last = array.split_blocks(msg, blocksize)
keyblock : block_t = array.create(blocksize, uint8(0))
last_block : block_t = array.create(blocksize, uint8(0))
ctr : uint32_t = counter
for i in range(array.length(blocks)):
keyblock = chacha20_block(key, ctr, nonce)
blocks[i] = xor_block(blocks[i], keyblock)
ctr += uint32(1)
keyblock = chacha20_block(key, ctr, nonce)
last_block[0:array.length(last)] = last
last_block = xor_block(last_block, keyblock)
last = last_block[0:array.length(last)]
return array.concat_blocks(blocks, last)
@typechecked
def chacha20_encrypt(key: key_t, counter: uint32_t, nonce: nonce_t, msg:vlbytes_t) -> vlbytes_t:
return chacha20_counter_mode(key,counter,nonce,msg)
@typechecked
def chacha20_decrypt(key: key_t, counter: uint32_t, nonce: nonce_t, msg:vlbytes_t) -> vlbytes_t:
return chacha20_counter_mode(key,counter,nonce,msg)
<file_sep>/specs/argon2i.py
from lib.speclib import *
from specs.blake2 import blake2b
version_number = uint8(0x13)
argon_type = nat(1)
block_size = nat(1024)
line_size = nat(128)
max_size_t = 2**64 - 1
size_nat_t,size_nat = refine(nat_t, lambda x: x <= max_size_t)
output_size_t,output_size = refine(nat_t, lambda x: x <= 64)
j_range_t = range_t(0, 8)
lanes_t = range_t(1, 2**24)
segment_t = range_t(0, 4)
t_len_t = range_t(1, max_size_t - 65)
idx_t,idx = refine(size_nat_t, lambda x: x <= 15)
working_vector_t = array_t(uint64_t, 16)
@typechecked
def h(a: refine(vlbytes_t, lambda x: array.length(x) < max_size_t - 2 * line_size), nn: output_size_t) \
-> contract(vlbytes_t, lambda a, nn: True, lambda a, nn, res: array.length(res) == nn):
res = blake2b(a, bytes([]), nn)
return res
@typechecked
def ceil32(x: size_nat_t) -> size_nat_t:
if x % 32 == 0:
return size_nat(nat(x // 32))
else:
return size_nat(nat(x // 32 + 1))
@typechecked
def compute_variable_length_output_size(t_len: refine(size_nat_t, lambda x: x + 64 <= max_size_t)) -> size_nat_t:
if t_len <= 64:
return t_len
else:
r = ceil32(t_len) - 2
return size_nat(nat(32 * r + 64))
@typechecked
def h_prime(t_len: refine(size_nat_t, lambda x: 1 <= t_len and t_len + 64 <= max_size_t),
x: refine(vlbytes_t, lambda x: array.length(x) + 4 <= max_size_t - 2 * line_size)) \
-> contract(vlbytes_t,
lambda t_len, x: True,
lambda t_len, x, res: array.length(x) == compute_variable_length_output_size(t_len)):
t_with_x = bytes(array.create(array.length(x) + 4, uint8(0)))
t_with_x[0:4] = bytes.from_uint32_le(uint32(t_len))
t_with_x[4:] = x
if t_len <= 64:
return h(t_with_x, t_len)
else:
r = ceil32(t_len) - 2
output = bytes(array.create(
compute_variable_length_output_size(t_len), uint8(0)))
previous = h(t_with_x, nat(64))
output[0:32] = previous[0:32]
for i in range(r):
i = i + 1
v = h(previous, nat(64))
output[i * 32:(i + 1) * 32] = v[0:32]
previous = v
output[r * 32:r * 32 + 64] = previous
return output
@typechecked
def low_bits(x: uint64_t) -> uint64_t:
return uint64(uint32(x))
@typechecked
def g(v: working_vector_t, a: idx_t, b: idx_t, c: idx_t, d: idx_t) -> working_vector_t:
v_res = array.copy(v)
v_res[a] = v_res[a] + v_res[b] + \
uint64(2) * low_bits(v_res[a]) * low_bits(v_res[b])
v_res[d] = uintn.rotate_right(v_res[d] ^ v_res[a], 32)
v_res[c] = v_res[c] + v_res[d] + \
uint64(2) * low_bits(v_res[c]) * low_bits(v_res[d])
v_res[b] = uintn.rotate_right(v_res[b] ^ v_res[c], 24)
v_res[a] = v_res[a] + v_res[b] + \
uint64(2) * low_bits(v_res[a]) * low_bits(v_res[b])
v_res[d] = uintn.rotate_right(v_res[d] ^ v_res[a], 16)
v_res[c] = v_res[c] + v_res[d] + \
uint64(2) * low_bits(v_res[c]) * low_bits(v_res[d])
v_res[b] = uintn.rotate_right(v_res[b] ^ v_res[c], 63)
return v_res
@typechecked
def P(input: bytes_t(line_size)) -> bytes_t(line_size):
v = array.create(16, uint64(0))
for i in range(8):
v[2 * i] = bytes.to_uint64_le(input[i * 16:i * 16 + 8])
v[2 * i + 1] = bytes.to_uint64_le(input[i * 16 + 8:(i + 1) * 16])
v = g(v, size_nat(nat(0)), size_nat(nat(4)), size_nat(nat(8)), size_nat(nat(12)))
v = g(v, size_nat(nat(1)), size_nat(nat(5)), size_nat(nat(9)), size_nat(nat(13)))
v = g(v, size_nat(nat(2)), size_nat(nat(6)), size_nat(nat(10)), size_nat(nat(14)))
v = g(v, size_nat(nat(3)), size_nat(nat(7)), size_nat(nat(11)), size_nat(nat(15)))
v = g(v, size_nat(nat(0)), size_nat(nat(5)), size_nat(nat(10)), size_nat(nat(15)))
v = g(v, size_nat(nat(1)), size_nat(nat(6)), size_nat(nat(11)), size_nat(nat(12)))
v = g(v, size_nat(nat(2)), size_nat(nat(7)), size_nat(nat(8)), size_nat(nat(13)))
v = g(v, size_nat(nat(3)), size_nat(nat(4)), size_nat(nat(9)), size_nat(nat(14)))
return bytes.from_uint64s_le(v)
@typechecked
def xor_blocks(X: bytes_t(block_size), Y: bytes_t(block_size)) -> bytes_t(block_size):
output = bytes(array.create(block_size, uint8(0)))
for i in range(block_size // 8):
output[i * 8:(i + 1) * 8] = bytes.from_uint64_be(bytes.to_uint64_be(
X[8 * i:8 * (i + 1)]) ^ bytes.to_uint64_be(Y[8 * i:8 * (i + 1)]))
return output
@typechecked
def extract_block_column(j: j_range_t, block: bytes_t(block_size)) -> bytes_t(line_size):
col = bytes(array.create(line_size, uint8(0)))
for i in range(8):
offset = i * line_size + j * 16
col[i * 16:(i + 1) * 16] = block[offset:offset + 16]
return col
@typechecked
def update_block_column(j: j_range_t, col: bytes_t(line_size), block: bytes_t(block_size)) -> bytes_t(block_size):
output = bytes(array.copy(block))
for i in range(8):
offset = i * line_size + j * 16
output[offset:offset + 16] = col[i * 16:(i + 1) * 16]
return output
@typechecked
def G(X: bytes_t(block_size), Y: bytes_t(block_size)) -> bytes_t(block_size):
R = bytes(array.create(block_size, uint8(0)))
R[:] = xor_blocks(X, Y)
Q = bytes(array.copy(R))
for i in range(8):
row = bytes(array.copy(Q[i * line_size:(i + 1) * line_size]))
row = P(row)
Q[line_size * i:line_size * (i + 1)] = row
for j in range(8):
col = extract_block_column(j, Q)
col = P(col)
Q = update_block_column(j, col, Q)
return xor_blocks(Q, R)
@typechecked
def extend_to_block(input: refine(vlbytes_t, lambda x: array.length(x) <= block_size)) -> bytes_t(block_size):
output = array.create(block_size, uint8(0))
output[:array.length(intput)] = input
return output
@typechecked
def block_offset(lanes: lanes_t, columns: size_nat_t, i: size_nat_t, j: size_nat_t) \
-> contract(size_nat_t,
lambda lanes, columns, i, j: columns <= 4 and lanes *
columns * block_size <= max_size_t and
i < lanes and j < columns,
lambda lanes, columns, i, j, res: res <= (columns * lanes - 1) * block_size):
return nat(block_size * (columns * i + j))
@typechecked
def xor_last_column(lanes: lanes_t, columns: size_nat_t, memory: vlbytes_t) \
-> contract(vlbytes_t,
lambda lanes, columns, memory: columns <= 4 and columns *
columns * block_size <= max_size_t and
array.length(memory) == lanes * columns * block_size,
lambda lanes, columns, memory, res: array.length(res) ==
lanes * columns * block_size):
output = array.create(block_size, uint8(0))
offset = block_offset(lanes, columns, nat(0), nat(columns - 1))
output = memory[offset:offset + block_size]
for i in range(lanes - 1):
offset = block_offset(lanes, columns, nat(i + 1), nat(columns - 1))
output = xor_blocks(
output, memory[offset:offset + block_size])
return vlbytes_t(output)
@typechecked
def pseudo_random_generation(j1: size_nat_t, r_size: size_nat_t) \
-> contract(size_nat_t,
lambda j1, r_size: r_size != 0,
lambda j1, r_size, res: res < r_size):
tmp = (j1 * j1) // (2**32)
tmp = (tmp * r_size) // (2**32)
return nat(r_size - 1 - tmp)
@typechecked
def seeds_length(lanes: lanes_t, columns: size_nat_t) \
-> contract(size_nat_t,
lambda lanes, columns: columns <= 4 and lanes *
columns * block_size <= max_size_t,
lambda lanes, columns, res: True):
segment_length = columns // 4
tmp = segment_length // line_size + 1
return nat(tmp * line_size * 2)
@typechecked
def generate_seeds(lanes: lanes_t, columns: size_nat_t, i: size_nat_t, iterations: size_nat_t, t: size_nat_t, segment: segment_t) \
-> contract(vlarray_t(uint32),
lambda lanes, columns, i, iterations, t, segment: columns <= 4 and lanes *
columns * block_size <= max_size_t and i < lanes and
t < iterations,
lambda lanes, columns, i, iterations, t, segment, res:
array.length(res) == seeds_length(lanes, columns)):
segment_length = columns // 4
pseudo_rands_rounds = segment_length // line_size + 1
pseudo_rands_size = pseudo_rands_rounds * line_size * 2
pseudo_rands = array.create(pseudo_rands_size, uint32(0))
for ctr in range(pseudo_rands_rounds):
zero_block = array.create(block_size, uint8(0))
concat_block = array.create(block_size, uint8(0))
concat_block[0:8] = bytes.from_uint64_le(uint64(t))
concat_block[8:16] = bytes.from_uint64_le(uint64(i))
concat_block[16:24] = bytes.from_uint64_le(uint64(segment))
concat_block[24:32] = bytes.from_uint64_le(uint64(lanes * columns))
concat_block[32:40] = bytes.from_uint64_le(uint64(iterations))
concat_block[40:48] = bytes.from_uint64_le(uint64(argon_type))
concat_block[48:56] = bytes.from_uint64_le(uint64(ctr + 1))
arg_block = G(bytes(zero_block), bytes(concat_block))
address_block = G(bytes(zero_block), bytes(arg_block))
addresses_list = bytes.to_uint32s_le(address_block)
pseudo_rands[ctr * line_size *
2:(ctr + 1) * line_size * 2] = addresses_list
return pseudo_rands
@typechecked
def map_indexes(t: size_nat_t, segment: segment_t, lanes: lanes_t, columns: size_nat_t,
idx: size_nat_t, i: size_nat_t, j: size_nat_t, j1: uint32_t, j2: uint32_t) \
-> contract(Tuple[size_nat_t, size_nat_t],
lambda t, segment, lanes, columns, idx, i, j, j1, j2:
columns <= 4 and lanes * columns * block_size <= max_size_t and
idx < columns // 4 and i < lanes and j < columns and
j > 2 if t == 0 else True and j == segment *
(columns // 4) + idx,
lambda t, segment, lanes, columns, idx, i, j, j1, j2, res:
res[0] < lanes and res[1] < columns):
segment_length = columns // 4
if t == 0 and segment == 0:
i_prime = nat(i)
else:
i_prime = nat(uintn.to_int(j2) % lanes)
if t == 0:
if segment == 0 or i == i_prime:
r_size = j - 1
elif idx == 0:
r_size = segment * segment_length - 1
else:
r_size = segment * segment_length
elif i == i_prime: # same_lane
r_size = columns - segment_length + idx - 1
elif idx == 0:
r_size = columns - segment_length - 1
else:
r_size = columns - segment_length
if t != 0 and segment != 3:
r_start = (segment + 1) * segment_length
else:
r_start = 0
j_prime_tmp = pseudo_random_generation(uintn.to_nat(j1), nat(r_size))
j_prime = nat((r_start + j_prime_tmp) % columns)
return (i_prime, j_prime)
@typechecked
def fill_segment(h0: bytes_t(64), iterations: size_nat_t, segment: segment_t, t_len: t_len_t,
lanes: lanes_t, columns: size_nat_t, t: size_nat_t, i: size_nat_t, memory: vlbytes_t) \
-> contract(vlbytes_t,
lambda h0, iterations, segment, t_len, lanes, columns, t, i, memory:
columns <= 4 and lanes * columns * block_size <= max_size_t and
i < lanes and j < columns and
array.length(memory) == lanes * columns * block_size,
lambda h0, iterations, segment, t_len, lanes, columns, t, i, memory, res:
array.length(res) == lanes * columns * block_size):
output = bytes(array.copy(memory))
segment_length = columns // 4
counter = 0
pseudo_rands_size = seeds_length(lanes, columns)
pseudo_rands = generate_seeds(lanes, columns, i, iterations, t, segment)
for idx in range(segment_length):
j = size_nat(nat(segment * segment_length + idx))
if t == 0 and j < 2:
h0_i_j = bytes(array.create(72, uint8(0)))
h0_i_j[0:64] = h0
h0_i_j[64:68] = bytes.from_uint32_le(uint32(j))
h0_i_j[68:72] = bytes.from_uint32_le(uint32(i))
new_block = h_prime(size_nat(block_size), h0_i_j)
offset = block_offset(lanes, columns, i, j)
output[offset:offset + block_size] = new_block
else:
j1 = pseudo_rands[2 * idx]
j2 = pseudo_rands[2 * idx + 1]
(i_prime, j_prime) = map_indexes(
t, segment, lanes, columns, nat(idx), i, j, j1, j2)
offset = block_offset(lanes, columns, i, nat((j - 1) % columns))
arg1 = output[offset:offset + block_size]
offset = block_offset(lanes, columns, i_prime, j_prime)
arg2 = output[offset:offset + block_size]
new_block = G(arg1, arg2)
if t != 0:
offset = block_offset(lanes, columns, i, j)
old_block = output[offset:offset + block_size]
output[offset:offset +
block_size] = xor_blocks(new_block, old_block)
else:
offset = block_offset(lanes, columns, i, j)
output[offset:offset + block_size] = new_block
return output
@typechecked
def argon2i(p: vlbytes_t, s: vlbytes_t, lanes: lanes_t, t_len: t_len_t, m: size_nat_t,
iterations: size_nat_t, x: vlbytes_t, k: vlbytes_t) \
-> contract(vlbytes_t,
lambda p, s, lanes, t_len, m, iterations, x, k: array.length(s >= 8) and
m >= 8 * lanes and (m + 4 * lanes) * block_size <= max_size_t and
iterations >= 1 and array.length(x) + 4 <= max_size_t - 2 * line_size and
array.length(p) + array.length(s) + array.length(x) +
array.length(k) + 11 * 4 <= max_size_t - 2 * line_size,
lambda p, s, lanes, t_len, m, iterations, x, k, res:
array.length(res) == compute_variable_length_output_size(t_len)):
h0_arg:vlbytes_t = vlbytes_t(array.create(10 * 4 + array.length(p) +
array.length(k) + array.length(s) + array.length(x), uint8(0)))
h0_arg[0:4] = bytes.from_uint32_le(uint32(lanes))
h0_arg[4:8] = bytes.from_uint32_le(uint32(t_len))
h0_arg[8:12] = bytes.from_uint32_le(uint32(m))
h0_arg[12:16] = bytes.from_uint32_le(uint32(iterations))
h0_arg[16:20] = bytes.from_uint32_le(uint32(version_number))
h0_arg[20:24] = bytes.from_uint32_le(uint32(argon_type))
h0_arg[24:28] = bytes.from_uint32_le(uint32(array.length(p)))
offset = 28 + array.length(p)
h0_arg[28:offset] = p
h0_arg[offset:offset +
4] = bytes.from_uint32_le(uint32(array.length(s)))
h0_arg[offset + 4:offset + 4 + array.length(s)] = s
offset = offset + 4 + array.length(s)
h0_arg[offset:offset +
4] = bytes.from_uint32_le(uint32(array.length(k)))
h0_arg[offset + 4:offset + 4 + array.length(k)] = k
offset = offset + 4 + array.length(k)
h0_arg[offset:offset +
4] = bytes.from_uint32_le(uint32(array.length(x)))
h0_arg[offset + 4:offset + 4 + array.length(x)] = x
offset = offset + 4 + array.length(x)
h0 = h(h0_arg, nat(64))
columns = size_nat(nat(4 * (m // (4 * lanes))))
number_of_blocks = lanes * columns
memory_size = block_size * number_of_blocks
memory = array.create(memory_size, uint8(0))
for t in range(iterations):
for segment in range(4):
for i in range(lanes):
memory = fill_segment(h0, iterations, segment_t(nat(segment)),
t_len, lanes, columns, size_nat(nat(t)), size_nat(nat(i)), memory)
final_block = xor_last_column(lanes, columns, bytes(memory))
return h_prime(size_nat(nat(t_len)), final_block)
<file_sep>/doc/poly-slides/config.toml
baseURL = "https://hacs-workshop.github.io/hacspec/poly-slides"
languageCode = "en-us"
title = "hacspec"
theme = "reveal-hugo"
SectionPagesMenu = "main"
enableRobotsTXT = true
[outputFormats.Reveal]
baseName = "index"
mediaType = "text/html"
isHTML = true
[params.reveal_hugo]
theme = "moon"
highlight_theme = "solarized-light"
slide_number = false
#transition = "zoom"
reveal_cdn = "reveal-js"
<file_sep>/tests/aes_test.py
from specs.aes import *
from sys import exit
import json
# mypy only checks functions that have types. So add an argument :)
def main(x: int) -> None:
file = open('tests/test_vectors/aes128_test_vectors.json')
aes128_test_vectors = json.load(file)
for i in range(len(aes128_test_vectors)):
msg = bytes.from_hex(aes128_test_vectors[i]['input'])
k = bytes.from_hex(aes128_test_vectors[i]['key'])
n = bytes.from_hex(aes128_test_vectors[i]['nonce'])
ctr = int(aes128_test_vectors[i]['counter'],16)
expected = bytes.from_hex(aes128_test_vectors[i]['output'])
computed = aes128_encrypt(k,n,uint32(ctr),msg)
if (computed == expected):
print("Aes128 Test ",i," passed.")
else:
print("Aes128 Test ",i," failed:")
print("expected ciphertext:",expected)
print("computed ciphertext:",computed)
exit(1)
# rng = open("/dev/urandom","rb")
# msgs = bytes.from_hex(rng.read(16 * 1024).hex())
# key = bytes.from_hex(rng.read(16).hex())
# nonce = bytes.from_hex(rng.read(12).hex())
# ctr = uint32(1)
# res = uint8(0)
# for i in range(16):
# msg = msgs[i*1024:i*1024+1024]
# computed = aes128_encrypt(key,nonce,ctr,msg)
# res ^= computed[0]
# print(res)
if __name__ == "__main__":
main(0)
<file_sep>/lib/speclib.py
from typing import Any, NewType, List, TypeVar, Generic, Iterator, Iterable, Union, Generator, Sequence, Tuple, Callable, Type, cast
from types import FunctionType
from random import SystemRandom as rand
from random import choices as random_string
from string import ascii_uppercase, ascii_lowercase
from math import ceil, log, floor
from importlib import import_module
import builtins
from typeguard import typechecked
from inspect import getfullargspec
from os import environ
from inspect import getsource
from copy import copy
DEBUG = environ.get('HACSPEC_DEBUG')
class Error(Exception):
pass
@typechecked
def fail(s: str) -> None:
raise Error(s)
T = TypeVar('T')
U = TypeVar('U')
V = TypeVar('V')
W = TypeVar('W')
X = TypeVar('X')
@typechecked
def tuple_t(*args) -> type:
return tuple
# TODO: Python 3.7 changes the way generics work, i.e. they are no classes
# anymore and therefore no type. We have to find a way around that.
# https://www.python.org/dev/peps/pep-0560/
# if len(args) == 1:
# return Tuple[args[0]]
# elif len(args) == 2:
# return Tuple[args[0],args[1]]
# elif len(args) == 3:
# return Tuple[args[0],args[1],args[2]]
# elif len(args) == 4:
# return Tuple[args[0],args[1],args[2],args[3]]
# elif len(args) == 5:
# return Tuple[args[0],args[1],args[2],args[3],args[4]]
# elif len(args) == 6:
# return Tuple[args[0],args[1],args[2],args[3],args[4],args[5]]
# else:
# fail("only implemented tuples up to size 6")
class _result(Generic[T]):
@typechecked
def __init__(self, is_valid:bool, value:Union[T,str]) -> None:
self.is_valid = True;
self.value = value
@staticmethod
@typechecked
def retval(v:T) -> '_result[T]':
return _result(True,v)
@staticmethod
@typechecked
def error(v:str) -> '_result[T]':
return _result(False,v)
@staticmethod
@typechecked
def is_valid(a:'_result[T]') -> bool:
return a.is_valid
@staticmethod
@typechecked
def get_value(a:'_result[T]') -> T:
if a.is_valid:
return a.value
else:
fail ("cannot call get_value on error result")
@staticmethod
@typechecked
def get_error(a:'_result[T]') -> T:
if a.is_valid:
fail ("cannot call get_error on valid result")
else:
return a.value
result = _result
@typechecked
def result_t(T: type):
return _result
@typechecked
def option_t(T: type) -> Union[T, None]:
return Union[T, None]
@typechecked
def refine(t: type, f: Callable[[T], bool]) -> Tuple[type,Callable[[T],T]]:
def refine_check(x):
if not isinstance(x,t) or not f(x):
print("got :"+str(x))
print("expected : x:"+str(t)+"{"+str(f)+"}")
fail("refinement check failed")
return x
return (t,refine_check)
def refine_t(t:type, f:Callable[[T], bool]) -> type:
(t,f) = refine(t,f)
return t
nat_t,nat = refine(int, lambda x: x >= 0)
pos_t,pos = refine(nat_t, lambda x: x > 0)
@typechecked
def range_t(min: int, max: int) -> type:
return refine_t(int, lambda x: x >= min and x < max)
# TODO: make this actually do something.
@typechecked
def contract(t: type, pre: Callable[..., bool], post: Callable[..., bool]) -> type:
return t
@typechecked
def contract3(pre: Callable[..., bool], post: Callable[..., bool]) -> FunctionType:
@typechecked
def decorator(func: Callable[..., Any]) -> Any:
# **kwargs are not allowed in hacspec.
def wrapper(*args):
pr = pre(*args)
if not pr:
fail("Precondition for " + func.__name__ + " failed.")
res = func(*args)
unpacked_args = list(args)
unpacked_args.append(res)
po = post(*unpacked_args)
if not po:
fail("Postcondition for " + func.__name__ + " failed.")
return res
return wrapper
return decorator
class _natmod:
__slots__ = ['v', 'modulus']
@typechecked
def __init__(self, x: Union[int,'_natmod'], modulus: int) -> None:
if modulus < 1:
fail("cannot create _natmod with modulus <= 0")
else:
xv = 0
if isinstance(x,_natmod):
xv = x.v
else:
xv = x
self.modulus = modulus
self.v = xv % modulus
@typechecked
def __str__(self) -> str:
return hex(self.v)
@typechecked
def __repr__(self) -> str:
return hex(self.v)
@typechecked
def __int__(self) -> int:
return self.v
@typechecked
def __eq__(self, other) -> bool:
if not isinstance(other, _natmod):
print(type(other))
fail("You can only compare two natmods.")
return (self.modulus == other.modulus and
self.v == other.v)
@typechecked
def __int__(self) -> int:
return self.v
@typechecked
def __add__(self, other: '_natmod') -> '_natmod':
if not isinstance(other, _natmod) or \
other.__class__ != self.__class__ or \
other.modulus != self.modulus:
fail("+ is only valid for two _natmod of same modulus.")
return _natmod.set_val(self, (self.v+other.v) % self.modulus)
@typechecked
def __sub__(self, other: '_natmod') -> '_natmod':
if not isinstance(other, _natmod) or \
other.__class__ != self.__class__ or \
other.modulus != self.modulus:
fail("- is only valid for two _natmod of same modulus.")
return _natmod.set_val(self, (self.modulus + self.v - other.v) % self.modulus)
@typechecked
def __mul__(self, other: '_natmod') -> '_natmod':
if not isinstance(other, _natmod) or \
other.__class__ != self.__class__ or \
other.modulus != self.modulus:
fail("* is only valid for two _natmod of same modulus.")
return _natmod.set_val(self, (self.v*other.v) % self.modulus)
@typechecked
def __pow__(self, other: nat_t) -> '_natmod':
if not isinstance(other, nat_t) or other < 0:
fail("* is only valid for two positive exponents")
if other == 0:
return _natmod.set_val(self, 1)
elif other == 1:
return copy(self)
if other == 2:
return self * self
return _natmod.set_val(self, pow(self.v,other,self.modulus))
@staticmethod
@typechecked
def to_int(x: '_natmod') -> nat_t:
if not isinstance(x, _natmod):
fail("to_int is only valid for _natmod.")
return x.v
@staticmethod
@typechecked
def set_val(x: '_natmod',v:int) -> '_natmod':
_t = x.__class__
result = _t.__new__(_t)
# Do manual initialisation. This is faster than iterating slots.
# for s in result.__slots__:
# setattr(result, s, copy(getattr(x, s)))
try:
result.bits = x.bits
except:
pass
result.modulus = x.modulus
result.v = v
return result
def __copy__(self):
result = self.__class__.__new__(self.__class__)
for s in self.__slots__:
setattr(result, s, copy(getattr(self, s)))
return result
@staticmethod
@typechecked
def to_nat(x: '_natmod') -> nat_t:
if not isinstance(x, _natmod):
fail("to_nat is only valid for _natmod.")
return x.v
class _uintn(_natmod):
__slots__ = ['v', 'modulus', 'bits']
@typechecked
def __init__(self, x: Union[int,'_uintn'], bits: int) -> None:
modulus = 1 << bits
_natmod.__init__(self,x,modulus)
self.bits = bits
def __eq__(self, other) -> bool:
if not isinstance(other, _uintn):
fail("You can only compare two uints.")
return (self.bits == other.bits and
self.v == other.v)
@staticmethod
@typechecked
def num_bits(x: '_uintn') -> int:
if not isinstance(x, _uintn):
fail("num_bits is only valid for _uintn.")
return x.bits
@typechecked
def __inv__(self) -> '_uintn':
return _uintn.set_val(self,~self.v)
@typechecked
def __invert__(self) -> '_uintn':
return _uintn.set_val(self,~self.v)
@typechecked
def __or__(self, other: '_uintn') -> '_uintn':
if not isinstance(other, _uintn) or \
other.__class__ != self.__class__ or \
other.bits != self.bits:
fail("| is only valid for two _uintn of same bits.")
return _uintn.set_val(self,self.v | other.v)
@typechecked
def __and__(self, other: '_uintn') -> '_uintn':
if not isinstance(other, _uintn) or \
other.__class__ != self.__class__ or \
other.bits != self.bits:
fail("& is only valid for two _uintn of same bits.")
return _uintn.set_val(self,self.v & other.v)
@typechecked
def __xor__(self, other: '_uintn') -> '_uintn':
if not isinstance(other, _uintn) or \
other.__class__ != self.__class__ or \
other.bits != self.bits:
fail("^ is only valid for two _uintn of same bits.")
return _uintn.set_val(self,self.v ^ other.v)
@typechecked
def __lshift__(self, other: int) -> '_uintn':
if not isinstance(other, int) or other < 0 or other > self.bits:
fail("lshift value has to be an int between 0 and bits")
return _uintn.set_val(self,self.v << other & (self.modulus - 1))
@typechecked
def __rshift__(self, other: int) -> '_uintn':
if not isinstance(other, int) or other < 0 or other > self.bits:
fail("lshift value has to be an int between 0 and bits")
return _uintn.set_val(self,self.v >> other & (self.modulus - 1))
@typechecked
def __getitem__(self, key: Union[int, slice]) -> '_uintn':
try:
if isinstance(key, slice):
return _uintn(self.v >> key.start,
key.stop - key.start)
else:
return _uintn(self.v >> key,1)
except:
print('_uintn content:', self.v)
print('desired index:', key)
fail('_uintn bit access error')
@typechecked
def __getslice__(self, i: int, j: int) -> '_uintn':
return _uintn(self.v >> i, j - i)
@staticmethod
@typechecked
def rotate_left(x: '_uintn', other: int) -> '_uintn':
if not isinstance(x, _uintn) or \
not isinstance(other, int) or \
other <= 0 or other >= x.bits:
fail("rotate_left value has to be an int strictly between 0 and bits")
return (x << other) | (x >> (x.bits - other))
@staticmethod
@typechecked
def rotate_right(x: '_uintn', other: int) -> '_uintn':
if not isinstance(x, _uintn) or \
not isinstance(other, int) or \
other <= 0 or other >= x.bits:
fail("rotate_left value has to be an int strictly between 0 and bits")
return (x >> other) | (x << (x.bits - other))
@staticmethod
@typechecked
def reverse(x: '_uintn') -> '_uintn':
if not isinstance(x, _uintn):
fail("reverse only works for _uintn")
b = '{:0{width}b}'.format(x.v, width=x.bits)
return _uintn.set_val(x,int(b[::-1],2))
@staticmethod
@typechecked
def bit_count(x:'_uintn') -> int:
if isinstance(x,_uintn):
cnt = 0
for i in range(x.bits):
cnt += uintn.to_int(x[i])
return cnt
else:
fail("bit_count arg must be a uintn")
@staticmethod
@typechecked
def get_bit(x:'_uintn', index:int) -> '_uintn':
if isinstance(x,_uintn) and isinstance(index,int) \
and index >= 0 and index < x.bits:
return x[index]
else:
fail("get_bit index has to be an int between 0 and bits - 1")
@staticmethod
@typechecked
def set_bit(x:'_uintn', index:int, value:int) -> '_uintn':
if isinstance(x,_uintn) and isinstance(index,int) \
and index >= 0 and index < x.bits \
and value >= 0 and value < 2:
tmp1 = ~ (_uintn(1,x.bits) << index)
tmp2 = _uintn(value,x.bits) << index
return (x & tmp1) | tmp2
else:
fail("set_bit index has to be an int between 0 and bits - 1")
@staticmethod
@typechecked
def set_bits(x:'_uintn', start:int, end:int, value:'_uintn') -> '_uintn':
if isinstance(x,_uintn) and isinstance(start,int) \
and isinstance(end,int) and isinstance(value,_uintn) \
and start >= 0 and start <= end \
and end <= x.bits and start < x.bits \
and value.bits == end - start:
tmp1 = ~ (_uintn((1 << (end - start)) - 1,x.bits) << start)
tmp2 = _uintn(value,x.bits) << start
return (x & tmp1) | tmp2
else:
fail("set_bits has to be an interval between 0 and bits - 1")
def uintn_t(bits:int) -> type:
return refine_t(_uintn,lambda x: x.bits == bits)
uintn = _uintn
def natmod_t(modulus:int) -> type:
return refine_t(_natmod,lambda x: x.modulus == modulus)
natmod = _natmod
bitvector_t = uintn_t
bitvector = uintn
class bit(_uintn):
# __slots__ = []
def __init__(self, x: Union[int,'_uintn']) -> None:
_uintn.__init__(self,x,1)
bit_t = uintn_t(1)
class uint8(_uintn):
# __slots__ = []
def __init__(self, x: Union[int,'_uintn']) -> None:
_uintn.__init__(self,x,8)
uint8_t = uintn_t(8)
class uint16(_uintn):
# __slots__ = []
def __init__(self, x: Union[int,'_uintn']) -> None:
_uintn.__init__(self,x,16)
uint16_t = uintn_t(16)
class uint32(_uintn):
# __slots__ = []
def __init__(self, x: Union[int,'_uintn']) -> None:
_uintn.__init__(self,x,32)
uint32_t = uintn_t(32)
class uint64(_uintn):
# __slots__ = []
def __init__(self, x: Union[int,'_uintn']) -> None:
_uintn.__init__(self,x,64)
uint64_t = uintn_t(64)
class uint128(_uintn):
# __slots__ = []
def __init__(self, x: Union[int,'_uintn']) -> None:
_uintn.__init__(self,x,128)
uint128_t = uintn_t(128)
class _array(Generic[T]):
__slots__ = ['l','len']
@typechecked
def __init__(self, x: Union[Sequence[T], List[T], '_array[T]']) -> None:
if (not isinstance(x, Sequence)) and (not isinstance(x, _array)) and (not isinstance(x,List)):
fail("_array() takes a list or sequence or _array as first argument.")
if isinstance(x,_array):
self.l = x.l
elif isinstance(x,list):
self.l = x
else:
self.l = list(x)
self.len = len(self.l)
@typechecked
def __len__(self) -> int:
return self.len
@typechecked
def __str__(self) -> str:
return str(self.l)
@typechecked
def __repr__(self) -> str:
return repr(self.l)
@typechecked
def __iter__(self) -> Iterator[T]:
return iter(self.l)
@typechecked
def __eq__(self, other: '_array[T]') -> bool:
if isinstance(other, _array):
return self.l == other.l
fail("_array.__eq__ only works on two _arrays.")
@typechecked
def __ne__(self, other: '_array[T]') -> bool:
if isinstance(other, _array):
return self.l != other.l
fail("_array.__ne__ only works on two _arrays.")
@typechecked
def __getitem__(self, key: Union[int, slice]) -> Union['_array[T]', T]:
try:
if isinstance(key, slice):
return _array(self.l[key.start:key.stop])
elif isinstance(key,int) and key >= 0 and key < self.len:
return self.l[key]
except:
print('array access error:')
print('array content:', self.l)
print('array index:', key)
fail('array index error')
@typechecked
def __getslice__(self, i: int, j: int) -> '_array[T]':
if i >= 0 and i < self.len and \
i <= j and j <= self.len:
return _array(self.l[i:j])
@typechecked
def __setitem__(self, key: Union[int, slice], v) -> None:
if isinstance(key, slice):
self.l[key.start:key.stop] = v
else:
self.l[key] = v
@typechecked
def __copy__(self) -> '_array[T]':
result = self.__class__.__new__(self.__class__)
for s in self.__slots__:
setattr(result, s, copy(getattr(self, s)))
return result
@staticmethod
@typechecked
def create(l: int, default:T) -> '_array[T]':
res = _array([default] * l)
return res
@staticmethod
@typechecked
def empty() -> '_array[T]':
return _array([])
@staticmethod
@typechecked
def singleton(x:T) -> '_array[T]':
return _array([x])
@staticmethod
@typechecked
def createi(l: int, f:Callable[[int],T]) -> '_array[T]':
return _array([f(i) for i in range(l)])
@staticmethod
@typechecked
def length(a: '_array[T]') -> int:
if not isinstance(a, _array):
fail("array.length takes a _array.")
return len(a)
@staticmethod
@typechecked
def copy(x: '_array[T]') -> '_array[T]':
return copy(x)
@staticmethod
@typechecked
def concat(x: '_array[T]', y: '_array[T]') -> '_array[T]':
res1 = copy(x)
res1.l = res1.l + list([copy(yi) for yi in y.l])
res1.len += y.len
return res1
@staticmethod
@typechecked
def split(x: '_array[T]', len:int) -> Tuple['_array[T]','_array[T]']:
res1 = copy(x)
res2 = copy(x)
res1.len = len
res2.len = x.len - len
res1.l = x.l[0:len]
res2.l = x.l[len:x.len]
return res1,res2
@staticmethod
@typechecked
def zip(x: '_array[T]', y: '_array[U]') -> '_array[Tuple[T,U]]':
return _array(list(zip(x.l, y.l)))
@staticmethod
@typechecked
def enumerate(x: '_array[T]') -> '_array[Tuple[int,T]]':
return _array(list(enumerate(x.l)))
@staticmethod
@typechecked
def split_blocks(a: '_array[T]', blocksize: int) -> 'Tuple[_array[_array[T]],_array[T]]':
if not isinstance(a, _array):
fail("split_blocks takes a _array as first argument.")
if not isinstance(blocksize, int):
fail("split_blocks takes an int as second argument.")
nblocks = array.length(a) // blocksize
rem = array.length(a) % blocksize
blocks = _array([a[x*blocksize:(x+1)*blocksize]
for x in range(nblocks)])
last = _array(a[array.length(a)-rem:array.length(a)])
return blocks, last
@staticmethod
@typechecked
def concat_blocks(blocks: '_array[_array[T]]', last: '_array[T]') -> '_array[T]':
res = _array.concat(_array([b for block in blocks for b in block]),last)
return res
@staticmethod
@typechecked
def map(f: Callable[[T], U], a: '_array[T]') -> '_array[U]':
res = copy(a)
res.l = list(map(f,res.l))
return res
@staticmethod
@typechecked
def create_random(l: nat_t, t: Type[_uintn]) -> '_array[_uintn]':
if not isinstance(l, nat_t):
fail("array.create_random's first argument has to be of type nat_t.")
r = rand()
return _array(list([t(r.randint(0, 2 << _uintn.num_bits(t(0)))) for _ in range(0, l)]))
def vlarray_t(t:type) -> type:
return refine_t(_array,lambda x: all(isinstance(z,t) for z in x))
vlarray = _array
def array_t(t: type, l:int) -> type:
return refine_t(vlarray_t(t),lambda x: x.len == l)
array = _array
vlbytes_t = vlarray_t(uint8_t)
def bytes_t(l:int) -> type:
return array_t(uint8_t,l)
class bytes(_array):
# __slots__ = []
@staticmethod
@typechecked
def from_ints(x: List[int]) -> 'vlbytes_t':
res = vlbytes_t([uint8(i) for i in x])
return res
@staticmethod
@typechecked
def concat_bytes(blocks: '_array[vlbytes_t]') -> 'vlbytes_t':
concat = [b for block in blocks for b in block]
return vlbytes_t(concat)
@staticmethod
@typechecked
def from_hex(x: str) -> 'vlbytes_t':
return vlbytes_t([uint8(int(x[i:i+2], 16)) for i in range(0, len(x), 2)])
@staticmethod
@typechecked
def to_hex(a: 'vlbytes_t') -> str:
return "".join(['{:02x}'.format(uintn.to_int(x)) for x in a])
@staticmethod
@typechecked
def from_nat_le(x: int, l: int=0) -> 'vlbytes_t':
if not isinstance(x, int):
fail("bytes.from_nat_le's argument has to be of type nat, not "+str(type(x)))
b = x.to_bytes((x.bit_length() + 7) // 8, 'little') or b'\0'
pad = _array([uint8(0) for i in range(0, max(0, l-len(b)))])
result = vlbytes_t([uint8(i) for i in b])
return vlbytes_t(array.concat(pad, result))
@staticmethod
@typechecked
def to_nat_le(x: 'vlbytes_t') -> nat_t:
b = builtins.bytes([uintn.to_int(u) for u in x])
return int.from_bytes(b, 'little')
@staticmethod
@typechecked
def from_nat_be(x: int, l: int=0) -> 'vlbytes_t':
if not isinstance(x, int):
fail("bytes.from_nat_be's first argument has to be of type nat_t.")
if not isinstance(l, int):
fail("bytes.from_nat_be's second argument has to be of type nat_t.")
b = x.to_bytes((x.bit_length() + 7) // 8, 'big') or b'\0'
pad = _array([uint8(0) for i in range(0, max(0, l-len(b)))])
result = _array([uint8(i) for i in b])
return vlbytes_t(array.concat(pad, result))
@staticmethod
@typechecked
def to_nat_be(x: 'vlbytes_t') -> nat_t:
b = builtins.bytes([uintn.to_int(u) for u in x])
return int.from_bytes(b, 'big')
@staticmethod
@typechecked
def from_uintn_le(x: uintn_t) -> 'vlbytes_t':
nbytes = (x.bits - 1) // 8 + 1
by = bytes.create(nbytes,uint8(0))
xv = uintn.to_nat(x)
for i in range(nbytes):
by[i] = uint8(xv & 255)
xv = xv >> 8
return by
@staticmethod
@typechecked
def to_uintn_le(x: 'vlbytes_t') -> uintn_t:
nbits = 8 * bytes.length(x)
xv = uintn(0,nbits)
for i in range(bytes.length(x)):
xv += uintn(x[i],nbits) << (i * 8)
return xv
@staticmethod
@typechecked
def from_uintn_be(x: uintn_t) -> 'vlbytes_t':
nbytes = (x.bits - 1) // 8 + 1
by = bytes.create(nbytes,uint8(0))
xv = uintn.to_nat(x)
for i in range(nbytes):
by[nbytes-i-1] = uint8(xv)
xv = xv // 256
return by
@staticmethod
@typechecked
def to_uintn_be(x: 'vlbytes_t') -> uintn_t:
nbits = 8 * bytes.length(x)
xv = uintn(0,nbits)
nbytes = bytes.length(x)
for i in range(nbytes):
xv += uintn(x[nbytes - i - 1],nbits) << (i * 8)
return xv
@staticmethod
@typechecked
def from_uintns_le(x: 'vlarray_t(uintn_t)') -> 'vlbytes_t':
by = _array([bytes.from_uintn_le(i) for i in x])
return bytes.concat_bytes(by)
@staticmethod
@typechecked
def from_uintns_be(x: 'vlarray_t(uintn_t)') -> 'vlbytes_t':
by = _array([bytes.from_uintn_be(i) for i in x])
return bytes.concat_bytes(by)
@staticmethod
@typechecked
def to_uintns_le(x: 'vlbytes_t',bits:int) -> vlarray_t(uintn_t):
if bits % 8 != 0 or len(x) * 8 % bits != 0:
fail("bytearray length not a multiple of bits//8")
nums, x = array.split_blocks(x, bits//8)
return(_array([bytes.to_uintn_le(i) for i in nums]))
@staticmethod
@typechecked
def to_uintns_be(x: 'vlbytes_t',bits:int) -> vlarray_t(uintn_t):
if bits % 8 != 0 or len(x) * 8 % bits != 0:
fail("bytearray length not a multiple of bits/8")
nums, x = array.split_blocks(x, bits//8)
return(_array([bytes.to_uintn_be(i) for i in nums]))
@staticmethod
@typechecked
def to_uint16_le(x: 'vlbytes_t') -> uint16_t:
return uint16(bytes.to_uintn_le(x))
@staticmethod
@typechecked
def to_uint32_le(x: 'vlbytes_t') -> uint32_t:
return uint32(bytes.to_uintn_le(x))
@staticmethod
@typechecked
def to_uint64_le(x: 'vlbytes_t') -> uint64_t:
return uint64(bytes.to_uintn_le(x))
@staticmethod
@typechecked
def to_uint128_le(x: 'vlbytes_t') -> uint128_t:
return uint128(bytes.to_uintn_le(x))
@staticmethod
@typechecked
def to_uint16_be(x: 'vlbytes_t') -> uint16_t:
return uint16(bytes.to_uintn_be(x))
@staticmethod
@typechecked
def to_uint32_be(x: 'vlbytes_t') -> uint32_t:
return uint32(bytes.to_uintn_be(x))
@staticmethod
@typechecked
def to_uint64_be(x: 'vlbytes_t') -> uint64_t:
return uint64(bytes.to_uintn_be(x))
@staticmethod
@typechecked
def to_uint128_be(x: 'vlbytes_t') -> uint128_t:
return uint128(bytes.to_uintn_be(x))
@staticmethod
@typechecked
def from_uint16_le(x: 'uint16_t') -> vlbytes_t:
return bytes.from_uintn_le(x)
@staticmethod
@typechecked
def from_uint32_le(x: 'uint32_t') -> vlbytes_t:
return bytes.from_uintn_le(x)
@staticmethod
@typechecked
def from_uint64_le(x: 'uint64_t') -> vlbytes_t:
return bytes.from_uintn_le(x)
@staticmethod
@typechecked
def from_uint128_le(x: 'uint128_t') -> vlbytes_t:
return bytes.from_uintn_le(x)
@staticmethod
@typechecked
def from_uint16_be(x: 'uint16_t') -> vlbytes_t:
return bytes.from_uintn_be(x)
@staticmethod
@typechecked
def from_uint32_be(x: 'uint32_t') -> vlbytes_t:
return bytes.from_uintn_be(x)
@staticmethod
@typechecked
def from_uint64_be(x: 'uint64_t') -> vlbytes_t:
return bytes.from_uintn_be(x)
@staticmethod
@typechecked
def from_uint128_be(x: 'uint128_t') -> vlbytes_t:
return bytes.from_uintn_be(x)
@staticmethod
@typechecked
def to_uint16s_le(x: 'vlbytes_t') -> _array[uint16_t]:
return array.map(uint16,bytes.to_uintns_le(x,16))
@staticmethod
@typechecked
def to_uint32s_le(x: 'vlbytes_t') -> _array[uint32_t]:
return array.map(uint32,bytes.to_uintns_le(x,32))
@staticmethod
@typechecked
def to_uint64s_le(x: 'vlbytes_t') -> _array[uint64_t]:
return array.map(uint64,bytes.to_uintns_le(x,64))
@staticmethod
@typechecked
def to_uint128s_le(x: 'vlbytes_t') -> _array[uint128_t]:
return array.map(uint128,bytes.to_uintns_le(x,128))
@staticmethod
@typechecked
def to_uint16s_be(x: 'vlbytes_t') -> _array[uint16_t]:
return array.map(uint16,bytes.to_uintns_be(x,16))
@staticmethod
@typechecked
def to_uint32s_be(x: 'vlbytes_t') -> _array[uint32_t]:
return array.map(uint32,bytes.to_uintns_be(x,32))
@staticmethod
@typechecked
def to_uint64s_be(x: 'vlbytes_t') -> _array[uint64_t]:
return array.map(uint64,bytes.to_uintns_be(x,64))
@staticmethod
@typechecked
def to_uint128s_be(x: 'vlbytes_t') -> _array[uint128_t]:
return array.map(uint128,bytes.to_uintns_be(x,128))
@staticmethod
@typechecked
def from_uint16s_le(x: '_array[uint16_t]') -> vlbytes_t:
return bytes.from_uintns_le(x)
@staticmethod
@typechecked
def from_uint32s_le(x: '_array[uint32_t]') -> vlbytes_t:
return bytes.from_uintns_le(x)
@staticmethod
@typechecked
def from_uint64s_le(x: '_array[uint64_t]') -> vlbytes_t:
return bytes.from_uintns_le(x)
@staticmethod
@typechecked
def from_uint128s_le(x: '_array[uint128_t]') -> vlbytes_t:
return bytes.from_uintns_le(x)
@staticmethod
@typechecked
def from_uint16s_be(x: '_array[uint16_t]') -> vlbytes_t:
return bytes.from_uintns_be(x)
@staticmethod
@typechecked
def from_uint32s_be(x: '_array[uint32_t]') -> vlbytes_t:
return bytes.from_uintns_be(x)
@staticmethod
@typechecked
def from_uint64s_be(x: '_array[uint64_t]') -> vlbytes_t:
return bytes.from_uintns_be(x)
@staticmethod
@typechecked
def from_uint128s_be(x: '_array[uint128_t]') -> vlbytes_t:
return bytes.from_uintns_be(x)
@staticmethod
@typechecked
def create_random_bytes(len: nat) -> 'vlbytes_t':
r = rand()
return vlbytes_t(list([uint8(r.randint(0, 0xFF)) for _ in range(0, len)]))
class _vector(_array[T]):
# __slots__ = []
def __init__(self, x: _array[T], zero:T) -> None:
self.l = x.l
self.len = x.len
self.zero = zero
if not (isinstance(zero,int) or
isinstance(zero,_natmod) or
isinstance(zero,_vector)):
fail("vector must have values of numeric type")
if not (all(v.__class__ == zero.__class__ for v in self.l)):
for v in self.l:
print(str(v.__class__) + " - " + str(zero.__class__))
fail("vector must have all valus of same type as zero")
@staticmethod
@typechecked
def create(l: int, zero:T) -> '_vector[T]':
a = _array([zero] * l)
res = _vector(a,zero)
return res
@staticmethod
@typechecked
def createi(l: int, zero:T, f:Callable[[int],T]) -> '_vector[T]':
return _vector(_array.createi(l,f),zero)
@typechecked
def __add__(self, other: '_vector[T]') -> '_vector[T]':
if not isinstance(other, _vector) or \
other.__class__ != self.__class__ or \
other.len != self.len:
fail("+ is only valid for two _vectors of same length")
res = copy(self)
res.l = [x + y for (x,y) in zip(self.l,other.l)]
return res
@typechecked
def __sub__(self, other: '_vector[T]') -> '_vector[T]':
if not isinstance(other, _vector) or \
other.__class__ != self.__class__ or \
other.len != self.len:
fail("/ is only valid for two _vectors of same length")
res = copy(self)
res.l = [x - y for (x,y) in zip(self.l,other.l)]
return res
@typechecked
def __mul__(self, other: '_vector[T]') -> '_vector[T]':
if not isinstance(other, _vector) or \
other.__class__ != self.__class__ or \
other.len != self.len:
fail("* is only valid for two _vectors of same length")
res = copy(self)
res.l = [x * y for (x,y) in zip(self.l,other.l)]
return res
@staticmethod
@typechecked
def poly_mul(x:'_vector[T]', other: '_vector[T]', zero:T) -> '_vector[T]':
if not isinstance(other, _vector) or \
other.__class__ != x.__class__ or \
other.len != x.len:
fail("poly_mul is only valid for two _vectors of same length")
res = _vector.create(x.len + other.len, zero)
for i in range(x.len):
for j in range(other.len):
res[i+j] += x[i] * other[j]
return res
@staticmethod
@typechecked
def mapz(f: Callable[[T], U], a: '_vector[T]', z:U) -> '_vector[U]':
return _vector(array.map(f,a),z)
@typechecked
def vlvector_t(t:type) -> type:
return vlarray_t(t)
@typechecked
def vector_t(t:type,len:nat) -> type:
return array_t(t,len)
vector = _vector
class _matrix(_vector[_vector[T]]):
# __slots__ = ['rows','cols']
@typechecked
def __init__(self, x: _vector[_vector[T]]) -> None:
self.l = x.l
self.len = x.len
if x.len == 0 or x.l[0].len == 0:
fail("matrix must be non-empty and have non-empty vectors")
self.rows = x.len
self.cols = x[0].len
self.zero = x[0].zero
if any(not isinstance(v,_vector) or v.len != self.cols or v.zero != self.zero for v in self.l):
fail("matrix must have columns that are vectors of same lengths and type")
def __matmul__(self,other:'_matrix[T]') -> '_matrix[T]':
if not isinstance(other, _matrix) or \
other.__class__ != self.__class__ or \
other.rows != self.cols or \
other.zero != self.zero :
fail("@ is only valid for matrices of size M*N and N*K")
res = _matrix.create(self.rows,other.cols,self.zero)
for i in range(res.rows):
for k in range(res.cols):
tmp = res.zero
for j in range(self.cols):
tmp += self[i][j] * other[j][k]
res[i][k] = tmp
return res
@staticmethod
@typechecked
def create(r: int, c:int, default:T) -> '_matrix[T]':
col = _vector.create(c,default)
mat = _vector.create(r,col)
for i in range(r):
mat[i] = _vector.create(c,default)
return _matrix(mat)
@staticmethod
@typechecked
def createi(r: int, c:int, f:Callable[[int,int],T]) -> '_matrix[T]':
v = f(0,0)
res = _matrix.create(r,c,v)
for i in range(r):
for j in range(c):
res[i][j] = f(i,j)
return res
@staticmethod
@typechecked
def copy(x: '_matrix[T]') -> '_matrix[T]':
return _matrix.createi(x.rows,x.cols,lambda ij: x[ij[0]][ij[1]])
def matrix_t(t:type,rows:nat,columns:nat) -> type:
return vector_t(vector_t(t,columns),rows)
matrix = _matrix
# Typed versions of all python functions that can be used in specs.
class speclib:
@typechecked
def ceil(x: int) -> nat_t:
return nat(ceil(x))
@typechecked
def log(x: int, b: int) -> float:
return log(x, b)
@typechecked
def floor(x: float) -> int:
return floor(x)
<file_sep>/tools/list.py
from ast import *
from sys import argv
def print_function(functionNode, indent=" "):
print(indent + functionNode.name + "(", end="")
for arg in functionNode.args.args:
annotation = ":"
if isinstance(arg.annotation, Name):
annotation += arg.annotation.id
elif isinstance(arg.annotation, Str):
annotation += arg.annotation.s
elif not arg.annotation:
annotation = ""
elif isinstance(arg.annotation, Subscript):
annotation += arg.annotation.value.id
print(arg.arg + annotation + " ", end="")
return_type = ") -> "
if isinstance(functionNode.returns, NameConstant):
return_type += str(functionNode.returns.value)
elif isinstance(functionNode.returns, Name):
return_type += str(functionNode.returns.id)
elif isinstance(functionNode.returns, Str):
return_type += functionNode.returns.s
print(return_type, end="")
print("")
def print_constant(node):
target = str(node.targets)
if isinstance(node.targets[0], Name):
target = node.targets[0].id
if isinstance(node.targets[0], Tuple):
elts = node.targets[0].elts
target = ", ".join([e.id for e in elts])
value = str(node.value)
if isinstance(node.value, Name):
value = node.value.id
if isinstance(node.value, Call):
if isinstance(node.value.func, Attribute):
value = node.value.func.value.id
if isinstance(node.value.func, Name):
value = node.value.func.id
print(target + " := " + value)
if len(argv) != 2:
print("Usage: list.py <file>")
exit(1)
filename = argv[1]
with open(filename) as file:
node = parse(file.read())
functions = [n for n in node.body if isinstance(n, FunctionDef)]
classes = [n for n in node.body if isinstance(n, ClassDef)]
constants = [n for n in node.body if isinstance(n, Assign)]
print(" === Functions ===")
for function in functions:
print_function(function, "")
print(" === Classes ===")
for class_ in classes:
print(class_.name + ":")
methods = [n for n in class_.body if isinstance(n, FunctionDef)]
for method in methods:
print_function(method)
print(" === Constants & Aliases ===")
for const in constants:
print_constant(const)
<file_sep>/build/hacspec/tests/check_test.py
# Testing spec checker
from hacspec.speclib import *
@typechecked
def test_lists() -> None:
my_vlarray_t = vlarray_t(uint32_t)
x : my_vlarray_t = my_vlarray_t([uint32(0), uint32(1)])
my_array_t = array_t(uint32_t, 2)
x = my_array_t([uint32(0), uint32(1)])
x = vlbytes_t([uint8(0), uint8(1)])
my_bytes_t = bytes_t(3)
x = my_bytes_t([uint8(0), uint8(1), uint8(255)])
print("hacspec arrays are working.")
<file_sep>/specs/sha3.py
from lib.speclib import *
state_t = array_t(uint64_t, 25)
index_t = range_t(0, 5)
max_size_t = 2**64 - 1
size_nat_t, size_nat = refine(int, lambda x: x <= max_size_t and x >= 0)
size_nat_200_t, size_nat_200 = refine(int, lambda x: x <= 200 and x >= 0)
size_nat_1600_t, size_nat_1600 = refine(int, lambda x: x <= 1600 and x %
8 == 0 and x // 8 > 0 and x >= 0)
@typechecked
def lfsr86540(lfsr: uint8_t) -> tuple_t(uint8_t, bool):
lfsr1 : uint8_t = lfsr & uint8(1)
result : bool = not (lfsr1 == uint8(0))
lfsr2 : uint8_t = lfsr << 1
if (lfsr & uint8(0x80)) != uint8(0):
return (lfsr2 ^ uint8(0x71), result)
else:
return (lfsr2, result)
@typechecked
def readLane(s: state_t, x: index_t, y: index_t) -> uint64_t:
return s[x + 5 * y]
@typechecked
def writeLane(s: state_t, x: index_t, y: index_t, v: uint64_t) -> state_t:
s[x + 5 * y] = v
return s
@typechecked
def state_permute1(s: state_t, lfsr: uint8_t) -> tuple_t(state_t, uint8_t):
_C: array_t = array.create(5, uint64(0))
for x in range(5):
_C[x] = readLane(s, x, 0) ^ readLane(s, x, 1) ^ readLane(
s, x, 2) ^ readLane(s, x, 3) ^ readLane(s, x, 4)
s_theta = array.copy(s)
_D : uint64_t
for x in range(5):
_D = _C[(x + 4) % 5] ^ uintn.rotate_left(_C[(x + 1) % 5], 1)
for y in range(5):
s_theta = writeLane(s_theta, x, y, readLane(s_theta, x, y) ^ _D)
x : int = 1
y : int = 0
current : uint64_t = readLane(s_theta, x, y)
s_pi_rho = array.copy(s_theta)
for t in range(24):
r : int = ((t + 1) * (t + 2)//2) % 64
_Y : index_t = (2 * x + 3 * y) % 5
x : index_t = y
y : index_t = _Y
temp : uint64_t = readLane(s_pi_rho, x, y)
s_pi_rho : state_t = writeLane(s_pi_rho, x, y, uintn.rotate_left(current, r))
current : uint64_t = temp
temp = array.copy(s_pi_rho)
s_chi = array.copy(s_pi_rho)
for y in range(5):
for x in range(5):
s_chi = writeLane(s_chi, x, y, readLane(temp, x, y) ^ (
(~(readLane(temp, (x + 1) % 5, y)) & readLane(temp, (x + 2) % 5, y))))
s_iota = array.copy(s_chi)
for j in range(7):
bitPosition : int = 2 ** j - 1
lfsr : uint8_t
result : bool
lfsr, result = lfsr86540(lfsr)
if result == True:
s_iota = writeLane(s_iota, 0, 0, readLane(
s_iota, 0, 0) ^ (uint64(1) << bitPosition))
return (s_iota, lfsr)
@typechecked
def state_permute(s: state_t) -> state_t:
lfsr : uint8_t = uint8(0x01)
for i in range(24):
s : state_t
lfsr : uint8_t
s, lfsr = state_permute1(s, lfsr)
return s
@contract3(lambda rateInBytes, input_b, s:
array.length(input_b) == rateInBytes,
lambda rateInBytes, input_b, s, res:
True)
@typechecked
def loadState(rateInBytes: size_nat_200_t, input_b: vlbytes_t, s: state_t) -> state_t:
block: bytes_t = array.create(200, uint8(0))
block[0:rateInBytes] = input_b
for j in range(25):
nj : uint64_t = bytes.to_uint64_le(block[(j * 8):(j * 8 + 8)])
s[j] = s[j] ^ nj
return s
@contract3(lambda rateInBytes, s:
True,
lambda rateInBytes, s, res:
array.length(res) == rateInBytes)
@typechecked
def storeState(rateInBytes: size_nat_200_t, s: state_t) -> vlbytes_t:
block: bytes_t = bytes(array.create(200, uint8(0)))
for j in range(25):
block[(j * 8):(j * 8 + 8)] = bytes.from_uint64_le(s[j])
return block[0:rateInBytes]
@contract3(lambda s, rateInBytes, inputByteLen, input_b, delimitedSuffix:
0 < rateInBytes and rateInBytes <= 200 and array.length(input_b) == inputByteLen,
lambda s, rateInBytes, inputByteLen, input_b, delimitedSuffix, res:
True)
@typechecked
def absorb(s: state_t,
rateInBytes: nat_t,
inputByteLen: size_nat_t,
input_b: vlbytes_t,
delimitedSuffix: uint8_t) -> state_t:
n : int = inputByteLen // rateInBytes
s : state_t
for i in range(n):
s = loadState(rateInBytes, input_b[(
i*rateInBytes):(i*rateInBytes + rateInBytes)], s)
s = state_permute(s)
rem : int = inputByteLen % rateInBytes
last : uint8_t = input_b[(inputByteLen - rem):inputByteLen]
lastBlock: bytes_t = array.create(rateInBytes, uint8(0))
lastBlock[0:rem] = last
lastBlock[rem] = delimitedSuffix
s = loadState(rateInBytes, lastBlock, s)
if not(delimitedSuffix & uint8(0x80) == uint8(0)) and (rem == rateInBytes - 1):
s = state_permute(s)
nextBlock: bytes_t = array.create(rateInBytes, uint8(0))
nextBlock[rateInBytes - 1] = uint8(0x80)
s = loadState(rateInBytes, nextBlock, s)
s = state_permute(s)
return s
@contract3(lambda s, rateInBytes, outputByteLen:
0 < rateInBytes and rateInBytes <= 200,
lambda s, rateInBytes, outputByteLen, res:
array.length(res) == outputByteLen)
@typechecked
def squeeze(s: state_t,
rateInBytes: nat_t,
outputByteLen: size_nat_t) -> vlbytes_t:
output: bytes_t = array.create(outputByteLen, uint8(0))
outBlocks : int = outputByteLen // rateInBytes
for i in range(outBlocks):
block : vlbytes_t = storeState(rateInBytes, s)
output[(i*rateInBytes):(i*rateInBytes + rateInBytes)] = block
s : state_t = state_permute(s)
remOut : int = outputByteLen % rateInBytes
outBlock : state_t = storeState(remOut, s)
output[(outputByteLen - remOut):outputByteLen] = outBlock
return output
@typechecked
def keccak(rate: size_nat_1600_t, capacity: size_nat_t, inputByteLen: size_nat_t,
input_b: vlbytes_t, delimitedSuffix: uint8_t, outputByteLen: size_nat_t) -> vlbytes_t:
rateInBytes : nat_t = nat(rate // 8)
s: array_t = array.create(25, uint64(0))
s = absorb(s, rateInBytes, inputByteLen, input_b, delimitedSuffix)
output : vlbytes_t = squeeze(s, rateInBytes, outputByteLen)
return output
@contract3(lambda inputByteLen, input_b, outputByteLen:
array.length(input_b) == inputByteLen,
lambda inputByteLen, input_b, outputByteLen, res:
array.length(res) == outputByteLen)
@typechecked
def shake128(inputByteLen: size_nat_t,
input_b: vlbytes_t,
outputByteLen: size_nat_t) -> vlbytes_t:
return keccak(1344, 256, inputByteLen, input_b, uint8(0x1F), outputByteLen)
@contract3(lambda inputByteLen, input_b, outputByteLen:
array.length(input_b) == inputByteLen,
lambda inputByteLen, input_b, outputByteLen, res:
array.length(res) == outputByteLen)
@typechecked
def shake256(inputByteLen: size_nat_t,
input_b: vlbytes_t,
outputByteLen: size_nat_t) -> vlbytes_t:
return keccak(1088, 512, inputByteLen, input_b, uint8(0x1F), outputByteLen)
@contract3(lambda inputByteLen, input_b:
array.length(input_b) == inputByteLen,
lambda inputByteLen, input_b, res:
array.length(res) == 28)
@typechecked
def sha3_224(inputByteLen: size_nat_t,
input_b: vlbytes_t) -> vlbytes_t:
return keccak(1152, 448, inputByteLen, input_b, uint8(0x06), 28)
@contract3(lambda inputByteLen, input_b:
array.length(input_b) == inputByteLen,
lambda inputByteLen, input_b, res:
array.length(res) == 32)
@typechecked
def sha3_256(inputByteLen: size_nat_t,
input_b: vlbytes_t) -> vlbytes_t:
return keccak(1088, 512, inputByteLen, input_b, uint8(0x06), 32)
@contract3(lambda inputByteLen, input_b:
array.length(input_b) == inputByteLen,
lambda inputByteLen, input_b, res:
array.length(res) == 48)
@typechecked
def sha3_384(inputByteLen: size_nat_t,
input_b: vlbytes_t) -> vlbytes_t:
return keccak(832, 768, inputByteLen, input_b, uint8(0x06), 48)
@contract3(lambda inputByteLen, input_b:
array.length(input_b) == inputByteLen,
lambda inputByteLen, input_b, res:
array.length(res) == 64)
@typechecked
def sha3_512(inputByteLen: size_nat_t,
input_b: vlbytes_t) -> vlbytes_t:
return keccak(576, 1024, inputByteLen, input_b, uint8(0x06), 64)
<file_sep>/specs/aead_chacha20poly1305.py
#!/usr/bin/python3
from lib.speclib import *
from specs.chacha20 import chacha20_block, chacha20_encrypt, chacha20_decrypt
from specs.poly1305 import poly1305_mac
key_t = bytes_t(32)
nonce_t = bytes_t(12)
tag_t = bytes_t(16)
block_t = bytes_t(64)
@typechecked
def padded_aad_msg(aad:vlbytes_t,msg:vlbytes_t) -> tuple_t(int,vlbytes_t):
laad: int = array.length(aad)
lmsg: int = array.length(msg)
pad_aad: int = 16 * (laad // 16 + 1)
if laad % 16 == 0:
pad_aad = laad
pad_msg: int = 16 * (lmsg // 16 + 1)
if lmsg % 16 == 0:
pad_msg = lmsg
to_mac: vlbytes_t = array.create(pad_aad + pad_msg + 16,uint8(0))
to_mac[0:laad] = aad
to_mac[pad_aad:pad_aad+lmsg] = msg
to_mac[pad_aad+pad_msg:pad_aad+pad_msg+8] = bytes.from_uint64_le(uint64(laad))
to_mac[pad_aad+pad_msg+8:pad_aad+pad_msg+16] = bytes.from_uint64_le(uint64(lmsg))
return pad_aad+pad_msg+16, to_mac
@typechecked
def aead_chacha20poly1305_encrypt(key:key_t,nonce:nonce_t,aad:vlbytes_t,msg:vlbytes_t) -> tuple_t(vlbytes_t,tag_t):
keyblock0: block_t = chacha20_block(key,uint32(0),nonce)
mac_key: bytes_t = keyblock0[0:32]
ciphertext: vlbytes_t = chacha20_encrypt(key,uint32(1),nonce,msg)
len: int
to_mac: vlbytes_t
len, to_mac = padded_aad_msg(aad, ciphertext)
mac: tag_t = poly1305_mac(to_mac,mac_key)
return ciphertext, mac
@typechecked
def aead_chacha20poly1305_decrypt(key:key_t,nonce:nonce_t,
aad:vlbytes_t,
ciphertext:vlbytes_t,
tag:tag_t) -> vlbytes_t:
keyblock0: block_t = chacha20_block(key, uint32(0), nonce)
mac_key: bytes_t = keyblock0[0:32]
to_mac: vlbytes_t
l: int # TODO: unused variable but _ is not supported by the compiler yet.
l, to_mac = padded_aad_msg(aad,ciphertext)
mac: tag_t = poly1305_mac(to_mac,mac_key)
if mac == tag:
msg: vlbytes_t = chacha20_decrypt(key, uint32(1), nonce, ciphertext)
return msg
else:
fail("mac failed")
<file_sep>/archive/spec-checker/haskell/README.md
The most convenient way to build is with `stack build --fast`.
Note: using happy versions newer than 1.19.5 can result in impractical
compile times. The stack configuration provided uses an old enough
snapshot to avoid that problem.
<file_sep>/build/docs/hacspec.rst
hacspec package
===============
Submodules
----------
hacspec.speclib module
----------------------
.. automodule:: hacspec.speclib
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: hacspec
:members:
:undoc-members:
:show-inheritance:
<file_sep>/docker/travis-compiler/run.sh
#!/usr/bin/env bash
set -v -x
# Travis is weird.
cp -r /home/worker/_hacspec /home/worker/hacspec
# Docker doesn't give us a login shell.
. /home/worker/.opam/opam-init/init.sh > /dev/null 2> /dev/null || true
# Run compilers and type check.
cd /home/worker/hacspec
make compiler
# Run F* targets
cd compiler
make -C fstar-compiler/specs
make -C fstar-compiler/specs check
make -C fstar-compiler/specs tests
<file_sep>/tests/aead_chacha20poly1305_test.py
from specs.aead_chacha20poly1305 import *
from sys import exit
import json
def main(x: int) -> None :
k = bytes.from_ints([0x80,0x81,0x82,0x83,0x84,0x85,0x86,0x87,
0x88,0x89,0x8a,0x8b,0x8c,0x8d,0x8e,0x8f,
0x90,0x91,0x92,0x93,0x94,0x95,0x96,0x97,
0x98,0x99,0x9a,0x9b,0x9c,0x9d,0x9e,0x9f])
n = bytes.from_ints([0x07,0x00,0x00,0x00,0x40,0x41,0x42,0x43,
0x44,0x45,0x46,0x47])
p = bytes.from_ints([0x4c, 0x61, 0x64, 0x69, 0x65, 0x73, 0x20, 0x61,
0x6e, 0x64, 0x20, 0x47, 0x65, 0x6e, 0x74, 0x6c,
0x65, 0x6d, 0x65, 0x6e, 0x20, 0x6f, 0x66, 0x20,
0x74, 0x68, 0x65, 0x20, 0x63, 0x6c, 0x61, 0x73,
0x73, 0x20, 0x6f, 0x66, 0x20, 0x27, 0x39, 0x39,
0x3a, 0x20, 0x49, 0x66, 0x20, 0x49, 0x20, 0x63,
0x6f, 0x75, 0x6c, 0x64, 0x20, 0x6f, 0x66, 0x66,
0x65, 0x72, 0x20, 0x79, 0x6f, 0x75, 0x20, 0x6f,
0x6e, 0x6c, 0x79, 0x20, 0x6f, 0x6e, 0x65, 0x20,
0x74, 0x69, 0x70, 0x20, 0x66, 0x6f, 0x72, 0x20,
0x74, 0x68, 0x65, 0x20, 0x66, 0x75, 0x74, 0x75,
0x72, 0x65, 0x2c, 0x20, 0x73, 0x75, 0x6e, 0x73,
0x63, 0x72, 0x65, 0x65, 0x6e, 0x20, 0x77, 0x6f,
0x75, 0x6c, 0x64, 0x20, 0x62, 0x65, 0x20, 0x69,
0x74, 0x2e])
aad = bytes.from_ints([0x50,0x51,0x52,0x53,0xc0,0xc1,0xc2,0xc3,
0xc4,0xc5,0xc6,0xc7])
exp_cipher = bytes.from_ints([0xd3,0x1a,0x8d,0x34,0x64,0x8e,0x60,0xdb,
0x7b,0x86,0xaf,0xbc,0x53,0xef,0x7e,0xc2,
0xa4,0xad,0xed,0x51,0x29,0x6e,0x08,0xfe,
0xa9,0xe2,0xb5,0xa7,0x36,0xee,0x62,0xd6,
0x3d,0xbe,0xa4,0x5e,0x8c,0xa9,0x67,0x12,
0x82,0xfa,0xfb,0x69,0xda,0x92,0x72,0x8b,
0x1a,0x71,0xde,0x0a,0x9e,0x06,0x0b,0x29,
0x05,0xd6,0xa5,0xb6,0x7e,0xcd,0x3b,0x36,
0x92,0xdd,0xbd,0x7f,0x2d,0x77,0x8b,0x8c,
0x98,0x03,0xae,0xe3,0x28,0x09,0x1b,0x58,
0xfa,0xb3,0x24,0xe4,0xfa,0xd6,0x75,0x94,
0x55,0x85,0x80,0x8b,0x48,0x31,0xd7,0xbc,
0x3f,0xf4,0xde,0xf0,0x8e,0x4b,0x7a,0x9d,
0xe5,0x76,0xd2,0x65,0x86,0xce,0xc6,0x4b,
0x61,0x16])
exp_mac = bytes.from_ints([0x1a,0xe1,0x0b,0x59,0x4f,0x09,0xe2,0x6a,
0x7e,0x90,0x2e,0xcb,0xd0,0x60,0x06,0x91])
cipher, mac = aead_chacha20poly1305_encrypt(k,n,aad,p)
decrypted_msg = aead_chacha20poly1305_decrypt(k,n,aad,cipher,mac)
if (exp_cipher == cipher and exp_mac == mac and decrypted_msg == p):
print("AEAD-ChachaPoly Test 0 passed.")
else:
print("AEAD-ChachaPoly Test 0 failed.")
print("expected cipher: ", exp_cipher)
print("computed cipher: ", cipher)
print("expected mac: ", exp_mac)
print("computed mac: ", mac)
exit(1)
file = open('tests/test_vectors/aead_chacha20poly1305_test_vectors.json')
aead_chacha20poly1305_test_vectors = json.load(file)
for i in range(len(aead_chacha20poly1305_test_vectors)):
msg = bytes.from_hex(aead_chacha20poly1305_test_vectors[i]['input'])
k = bytes.from_hex(aead_chacha20poly1305_test_vectors[i]['key'])
n = bytes.from_hex(aead_chacha20poly1305_test_vectors[i]['nonce'])
aad = bytes.from_hex(aead_chacha20poly1305_test_vectors[i]['aad'])
expected = bytes.from_hex(aead_chacha20poly1305_test_vectors[i]['output'])
exp_mac = expected[len(msg):len(expected)]
exp_cipher = expected[0:len(msg)]
cipher, mac = aead_chacha20poly1305_encrypt(k,n,aad,msg)
decrypted_msg = aead_chacha20poly1305_decrypt(k,n,aad,cipher,mac)
if (exp_cipher == cipher and exp_mac == mac and decrypted_msg == msg):
print("AEAD-ChachaPoly Test ",i+1," passed.")
else:
print("AEAD-ChachaPoly Test ",i+1," failed:")
print("expected cipher: ", exp_cipher)
print("computed cipher: ", cipher)
print("expected mac: ", exp_mac)
print("computed mac: ", mac)
exit(1)
if __name__ == "__main__":
main(0)
<file_sep>/tests/argon2i_test.py
from specs.argon2i import *
from sys import exit
from tests.testlib import print_dot, exit
import json
def main():
file = open('tests/test_vectors/argon2i_test_vectors.json')
argon2i_test_vectors = json.load(file)
print_dot()
for i, vec in enumerate(argon2i_test_vectors):
p = bytes.from_hex(vec['p'])
s = bytes.from_hex(vec['s'])
x = bytes.from_hex(vec['x'])
k = bytes.from_hex(vec['k'])
lanes = lanes_t(vec['lanes'])
t_len = t_len_t(vec['t_len'])
m = size_nat_t(nat(vec['m']))
iterations = size_nat_t(nat(vec['iterations']))
expected = bytes.from_hex(vec['output'])
computed = argon2i(p,s,lanes,t_len,m,iterations,x,k)
# computed = array([])
if computed == expected:
print("Argon2i Test {} passed.".format(i+1))
else:
print("Argon2i Test {} failed.".format(i+1))
print("expected hash:",bytes.to_hex(expected))
print("computed hash:",bytes.to_hex(computed))
exit(1)
exit(0)
if __name__ == "__main__":
main()
<file_sep>/docker/travis-compiler/setup.sh
#!/usr/bin/env bash
set -v -e -x
export PATH="$PATH:/usr/local/bin"
# Prepare build (OCaml packages)
opam init --comp ${opamv}
echo ". /home/worker/.opam/opam-init/init.sh > /dev/null 2> /dev/null || true" >> .bashrc
# opam switch -v ${opamv}
opam install ocamlfind batteries sqlite3 fileutils yojson ppx_deriving_yojson zarith pprint menhir ulex process fix wasm stdint ocaml-migrate-parsetree
# Get the HACL* code
git clone ${haclrepo} hacl-star
git -C hacl-star checkout ${haclversion}
# Prepare submodules
opam config exec -- make -C hacl-star prepare -j10
# Cleanup.
rm -rf ~/.ccache ~/.cache
<file_sep>/build/Makefile
#To run these specs in Python you need to install Python >= 3.6
PYTHON?=python3.6
# CHECK_TEST_FILES: check_lists.py
.PHONY: test all speclib_test build publish
all:
test: install speclib_test check_test check_fail_test
speclib_test:
$(PYTHON) hacspec/tests/speclib_test.py
$(PYTHON) hacspec/tests/check_test_run.py
check_test:
hacspec-check hacspec/tests/check_test.py
check_fail_test:
if hacspec-check hacspec/tests/check_test_fail.py; then \
echo check_fail should have failed!; \
exit 1; \
else \
echo check_fail failed as expected; \
fi
copy: clean
cp ../lib/check.py hacspec
cp ../lib/speclib.py hacspec
build: copy
$(PYTHON) setup.py sdist
install: build
pip install .
publish: build
twine upload dist/*
docs: copy
sphinx-apidoc -o docs hacspec hacspec/check.py -f
$(MAKE) -C docs html
clean:
# pip uninstall hacspec
rm -rf dist hacspec/check.py hacspec/speclib.py
rm -rf hacspec.egg-info
rm -rf MANIFEST
rm -rf hacspec/__pycache__
rm -rf hacspec/tests/__pycache__
rm -rf docs/_build
rm -rf __pycache__
<file_sep>/specs/gf128.py
#!/usr/bin/python3
from lib.speclib import *
blocksize : int = 16
# TODO: can't use blocksize as argument here due to compiler bug.
block_t = bytes_t(16)
key_t = bytes_t(16)
tag_t = bytes_t(16)
subblock_t,subblock = refine(vlbytes_t, lambda x: array.length(x) <= 16)
elem_t = uint128_t
# Define galois field
@typechecked
def elem(x:nat_t) -> elem_t:
return uint128(x)
irred : elem_t = elem(0xE1000000000000000000000000000000)
@typechecked
def elem_from_bytes(b:bytes_t(16)) -> elem_t:
return bytes.to_uint128_be(b)
@typechecked
def elem_to_bytes(e:elem_t) -> bytes_t(16):
return bytes.from_uint128_be(e)
@typechecked
def fadd(x:elem_t,y:elem_t) -> elem_t:
return x ^ y
@typechecked
def fmul(x:elem_t,y:elem_t) -> elem_t:
res : elem_t = elem(0)
sh : elem_t = x
for i in range(128):
if y[127-i] != bit(0):
res ^= sh
if sh[0] != bit(0):
sh = (sh >> 1) ^ irred
else:
sh = sh >> 1
return res
# Define GMAC
@typechecked
def encode(block:subblock_t) -> elem_t:
b : block_t = bytes(array.create(16,uint8(0)))
b[0:array.length(block)] = block
return elem_from_bytes(b)
@typechecked
def decode(e:elem_t) -> block_t:
return elem_to_bytes(e)
@typechecked
def update(r:elem_t,block:subblock_t,acc:elem_t) -> elem_t:
return fmul(fadd(encode(block),acc),r)
@typechecked
def poly(text:vlbytes_t,r:elem_t) -> elem_t:
blocks : vlarray_t(vlbytes_t)
last : vlbytes_t
blocks,last = array.split_blocks(text,blocksize)
acc : elem_t = elem(0)
for i in range(array.length(blocks)):
acc = update(r,blocks[i],acc)
if (array.length(last) > 0):
acc = update(r,bytes(last),acc)
return acc
@typechecked
def gmac(text:vlbytes_t,k:key_t) -> tag_t :
s : block_t = bytes(array.create(blocksize,uint8(0)))
r : elem_t = encode(k)
a : elem_t = poly(text,r)
m : block_t = decode(fadd(a,encode(s)))
return m
<file_sep>/lib/speclib.pyi
# We assume the following builtin functions and types
#
# fail: str -> None
# tuple_t: t1,t2,...,tn -> type
# option_t: t -> type
# vlarray_t: t -> type
# refine_t: t,f -> type treated as t
# contract_t: t,f,g -> type treated as t
# numeric types:
# int
# natmod_t: m -> type
# uintn_t: bits -> type
# vector_t: t,l -> type
# matrix_t: t,r,c -> type
# range_t: x,y -> type
nat_t = int
pos_t = int
bitvector_t = uintn_t
bit_t = uintn_t(1)
uint8_t = uintn_t(8)
uint16_t = uintn_t(16)
uint32_t = uintn_t(32)
uint64_t = uintn_t(64)
uint128_t = uintn_t(128)
array_t = vlarray_t
vlbytes_t = vlarray_t(uint8_t)
bytes_t = vlbytes_t
#operators
# +, - , *: natmod_t, natmod_t -> natmod_t
# : uintn_t(i), uintn_t(i) -> uintn_t(i)
# : int,int -> int
# : vector_t(t),vector_t(t) -> vector_t(t)
# : matrix_t(t),matrix_t(t) -> matrix_t(t)
# ** : natmod_t, int -> natmod_t
# : uintn_t(i), int -> uintn_t(i)
# : int,int -> int
# <<,>> : natmod_t, int -> natmod_t
# : uintn_t(i), int -> uintn_t(i)
# : int,int -> int
# ~ : uintn_t(i), int -> uintn_t(i)
# |,&,^ : uintn_t(i), int -> uintn_t(i)
# [] : uintn_t(i), int -> bit
# : vlarray_t(t), int -> t
# [ : ] : uintn_t(i), i:int, j:int -> uintn_t(j-i)
#decl natmod(x:int,m:int) -> natmod_t(m)
#decl natmod.to_int(x:natmod_t) -> int
#decl uintn.rotate_left(x:uintn_t(i),r:int) -> uintn_t(i)
#decl uintn.rotate_right(x:uintn_t(i),r:int) -> uintn_t(i)
#decl uintn.set_bits(x:uintn_t(i),start:int,end:int,value:uintn_t(end-start)) -> uintn_t(i)
#
#decl array.create(l:int,d:t) -> array_t(t,l)
#decl array.empty() -> array_t(t,0)
#decl array.singleton(d:t) -> array_t(t,1)
#decl array.createi(l:int,f:int->t) -> array_t(t,l)
#decl array.length(a) -> int
#decl array.copy(x:array_t(t,l)) -> array_t(t,l)
#decl array.concat(x:array_t(t,l),y:array_t(t,m)) -> array_t(t,l+m)
#decl array.split(x:array_t(t,l),i:int) -> tuple_t(array_t(t,i),array_t(t,l-i))
#decl array.zip(x:array_t(t,l),y:array_t(u,l)) -> array_t(tuple_t(t,u),l)
#decl array.enumerate(x:array_t(t,l)) -> array_t(tuple_t(int,t),l)
#decl array.split_blocks(x:array_t(t,l),b:int) -> array_t(array_t(t,b),l/b)
#decl array.concat_blocks(x:array_t(array_t(t,b),n)) -> array_t(t,b*n)
#decl array.map(f:t -> u,x:array_t(t,l)) -> array_t(u,l)
#
#decl bytes.from_nat_le(x:int) -> vlbytes_t
#decl bytes.to_nat_le(x:vlbytes_t) -> int
#decl bytes.from_nat_be(x:int) -> vlbytes_t
#decl bytes.to_nat_be(x:vlbytes_t) -> int
#
#decl bytes.from_uintn_le(x:uintn_t(i)) -> vlbytes_t
#decl bytes.to_uintn_le(x:vlbytes_t) -> uintn_t(i)
#decl bytes.from_uintn_be(x:uintn_t(i)) -> vlbytes_t
#decl bytes.to_uintn_be(x:vlbytes_t) -> uintn_t(i)
#
#decl bytes.from_uintns_le(x:vlarray_t(uintn_t(i))) -> vlbytes_t
#decl bytes.to_uintns_le(x:vlbytes_t) -> vlarray_t(uintn_t(i))
#decl bytes.from_uintns_be(x:vlarray_t(uintn_t(i))) -> vlbytes_t
#decl bytes.to_uintns_be(x:vlbytes_t) -> vlarray_t(uintn_t(i))
<file_sep>/LANGUAGE.md
The hacspec language is a typed subset of python. This file provides a rough description of the current language design.
There's a library `speclib.py` that provides common functionality that can be used in any specification written in hacspec.
hacspec files MUST NOT include any non-hacspec modules other than `speclib`.
We describe some of the functions provided by this library below (but their documentation needs to be improved.)
```
Types t ::= int | bool | str
| bit_t | uint8_t | uint16_t | uint32_t | uint64_t | uint128_t
| tuple2_t(t1,t2) | tuple3_t(t1,t2,t3) | tuple4_t(t1,t2,t3,t4) | tuple5_t(t1,t2,t3,t4,t5)
| vlarray_t(t)
| refine(t,pred)
| bitvector_t(len)
| range_t(min,max)
Derived Types:
nat := refine(int, lambda x: x >= 0)
array_t(t,len) := refine(vlarray_t(t), lambda x: length(x) == len)
vlbytes_t := vlarray_t(uint8_t)
bytes_t(len) := vlarray_t(uint8_t, len)
pfelem_t(prime) := refine(nat,lambda x: x < prime)
gfelem_t(len,irred) := bitvector_t(len)
```
```
Expressions e ::=
| x (variables)
| n (integer constants in hex or decimal)
| f(e1,...en) (call builtin or user-defined function)
| e1 binop e2 (operators on int and uintN, overloaded)
(binop includes arithmetic: +, -, *, //, %, **
bit manipulations: <<, >>, &, |
comparison: ==, !=, <, >, <=, >= )
| uintN(e) (convert int to uintN)
| (e1,...,en) (make tuple)
| array([e1,...,en]) (make array)
| e[e0] (array access)
| e[e0:e1] (array slice)
| fail("...") (stop execution with error)
```
```
Statements s ::=
| x : t = e (variable declaration)
| def f(x1:t1,...,xn:tn) -> t :
s (function declaration)
| x = e (assignment)
| (x1,..,xn) = e (tuple assignment)
| x[i] = e (array update)
| x[i:j] = e (array slice update)
| return e (return)
| if e:
s
else:
s (conditional)
| for i in range(e):
s (for loop)
| s
s (sequential composition)
| from x import x1,x2,...,xn (import from other module, only speclib or hacspec)
```
## Conventions
Hacspecs must adhere to the following coding conventions:
* All functions arguments must be typed. *(checked)*
* All functions must have a return type. *(checked)*
* Custom type names end with `_t`, e.g. `new_type_t`. If there exists a function casting to this type the function must us the type name, e.g. `new_type()`.
* Every function must be annotated with `@typechecked`. *(checked)*
* Functions can be nested.
* Classes are not allowed. *(checked)*
* Only `import from` import statements of local spec files and `speclib` are allowed. *(checked)*
* Type annotations can be used but are not checked by `hacspec-check` or when running the code. *(maybe not allow)*
* Type comments are not supported. *(checked)*
## Library functions
Builtin functions (hacspec library in speclib.py):
```
bit, uint8, uint32, uint63, uint128:
constructor:
uintN(x:int) -> uintN
uintN(x:uintN) -> uintN
member functions:
to_int(u:uintN) -> int (convert uintN to int)
operators:
+ - * ~ | & ^ << >>
static methods:
uintN.num_bits(u:uintN) -> int (get bit size of u)
uintN.rotate_left(u:uintN,o:int) -> uintN (rotate u by o bits to the left)
uintN.rotate_right(u:uintN,o:int) -> uintN (rotate u by o bits to the right)
```
```
array(T,len):
copy(e:array[T]) -> array[T] (copy array)
create(len:int,d:T) -> array[T] (make array with len elements, each equal to d)
create_type(x:Iterable[U],t:type) -> array[t] (create a new array type)
len(a:array) (get length of array)
concat(x:array[T],y:array[U]) -> array[T] (concatenate two arrays)
concat_blocks(array[array[T]]) -> array[T]
(flatten array of arrays by concatenation)
split_blocks(a:array[T],blocksize:int) -> Tuple[array[array[T]], array[T]]
(split array into blocks of size blocksize;
last element may have size < blocksize)
zip(a:array[T],b:array[U]) -> array[Tuple[T,U]]
(zip two arrays into an array of tuples;
if the two arrays have different lengths,
truncate to shorter length)
enumerate(a:array[T]) -> array[Tuple[int,U]]
(convert each element x at index i into a pair (i,x))
create_random(len:nat, t:type) -> array[t] (create array with len random elements of type t)
bytes(len):
to_uintNs_le(b:bytes_t(4*len)) -> array_t(uintN,len)
(create array of uintNs from bytes)
from_uintNs_le(us:array_t(uintN,len)) -> bytes_t(4 * len)
(create bytes from array of uintNs)
```
# Test Vectors
Test vectors are define in JSON following some schema.
Every schema can be checked either with `mypy` or with [`jsonschema`](http://json-schema.org/specification.html).
Note that `jsonschema` is stricter as it checks the JSON key names while `mypy` only checks types.
Test vector schemata MUST NOT use types other than "string" and "int"/"number".
Integers MUST be base 10.
## MAC
### Mypy TypedDict
Usage example:
`mypy specs/test_vectors/poly1305_test_vectors.py`
```
mac_test = TypedDict('mac_test', {
'input_len': str,
'input': str,
'key' : str,
'tag' : str}
)
```
### JSON Schema
Usage example:
`python spec-checker/check_schema.py mac specs/test_vectors/poly1305_test_vectors.json`
```
{
"type": "array",
"items": {
"type": "object",
"properties": {
"input_len": {
"type": "string"
},
"input": {
"type": "string"
},
"key": {
"type": "string"
},
"tag": {
"type": "string"
}
},
"required": [
"input_len",
"input",
"key",
"tag"
]
},
"maxProperties": 4
}
```
## Symmetric Encryption
### Mypy TypedDict
Usage example:
`mypy specs/test_vectors/chacha20_test_vectors.py`
```
enc_test = TypedDict('enc_test', {
'input_len': int,
'input': str,
'key' : str,
'nonce' : str,
'counter' : int,
'output' : str})
```
### JSON Schema
Usage example:
`python spec-checker/check_schema.py enc specs/test_vectors/chacha20_test_vectors.json`
```
enc_schema = {
"type": "array",
"items": {
"type": "object",
"properties": {
"input_len": {
"type": "number"
},
"input": {
"type": "string"
},
"key": {
"type": "string"
},
"nonce": {
"type": "string"
},
"counter": {
"type": "number"
},
"output": {
"type": "string"
}
},
"required": [
"input_len",
"input",
"key",
"nonce",
"counter",
"output"
]
},
"maxProperties": 6
}
```
<file_sep>/tests/xmss_test.py
#!/usr/bin/env python3
from specs.xmss import *
from lib.speclib import *
from tests.testlib import print_dot, exit
def test_xmss_self():
adr = array.create_random(nat(8), uint32)
seed = bytes.create_random_bytes(n)
msg = bytes.from_ints([0xFF, 0xFF, 0xFF, 0xFF, 0xFF])
msg_h = sha256(msg)
xmss_sk : SK_t = key_gen_xmss()
def main():
print_dot()
test_xmss_self()
exit(0)
if __name__ == "__main__":
main()
<file_sep>/tests/curve25519_test.py
from specs.curve25519 import *
import json
from sys import exit
def main (x: int) -> None :
# RFC 7539 Test Vectors
k0 = bytes.from_hex('a546e36bf0527c9d3b16154b82465edd62144c0ac1fc5a18506a2244ba449ac4')
u0 = bytes.from_hex('e6db6867583030db3594c1a424b15f7c726624ec26b3353b10a903a6d0ab1c4c')
expected = bytes.from_hex('c3da55379de9c6908e94ea4df28d084f32eccf03491c71f754b4075577a28552')
computed = scalarmult(k0, u0)
if not is_on_curve(u0):
print("u0 is not on curve")
if expected == computed:
print("Curve25519 Test 0 passed.")
else:
print("Curve25519 0 failed:")
print("expected:",expected)
print("computed:",computed)
file = open('tests/test_vectors/curve25519_test_vectors.json')
curve25519_test_vectors = json.load(file)
for i in range(len(curve25519_test_vectors)):
s = bytes.from_hex(curve25519_test_vectors[i]['private'])
p = bytes.from_hex(curve25519_test_vectors[i]['public'])
expected = bytes.from_hex(curve25519_test_vectors[i]['result'])
valid = curve25519_test_vectors[i]['valid']
computed = scalarmult(s,p)
if not is_on_curve(p):
if valid:
print("Curve25519 Test ",i+1," failed (public key is not on curve).")
else:
print("Curve25519 Test ",i+1," passed (public key is not on curve).")
elif (computed == expected and valid):
print("Curve25519 Test ",i+1," passed.")
elif (not(computed == expected) and not valid):
print("Curve25519 Test ",i+1," passed.")
else:
print("Curve25519 Test ",i+1," failed:")
print("expected mac:",expected)
print("computed mac:",computed)
exit(1)
main(0)
<file_sep>/compiler/Makefile
# -*- Makefile -*-
# --------------------------------------------------------------------
OCAMLBUILD_JOBS ?= 2
OCAMLBUILD_BIN ?= ocamlbuild
OCAMLBUILD_EXTRA ?=
OCAMLBUILD_OPTS := -use-ocamlfind -j $(OCAMLBUILD_JOBS)
# In Emacs, use classic display to enable error jumping.
ifeq ($(shell echo $$TERM), dumb)
OCAMLBUILD_OPTS += -classic-display
endif
OCAMLBUILD_OPTS += $(OCAMLBUILD_EXTRA)
OCAMLBUILD := $(OCAMLBUILD_BIN) $(OCAMLBUILD_OPTS)
# --------------------------------------------------------------------
BINS := checker to_fstar
NAME := hacspec
INSTALL := scripts/install-sh
DESTDIR ?= /usr/local
# --------------------------------------------------------------------
.PHONY: all build install uninstall check test clean __force__
.PHONY: %.byte %.native %.ml %.mli %.inferred.mli
all: build
@true
build: $(BINS:%=%.native)
checker.%: __force__
$(OCAMLBUILD) src/$@
to_fstar.%: __force__
$(OCAMLBUILD) fstar-compiler/$@
install: $(BINS:%=%.native)
$(INSTALL) checker.native $(DESTDIR)/bin/hacs
$(INSTALL) to_fstar.native $(DESTDIR)/bin/to_fstar
uninstall:
rm -f $(DESTDIR)/$(NAME)
check:
make -C fstar-compiler/specs
test: check
@true
clean:
$(OCAMLBUILD) -clean
rm -f $(BINS:%=%.native) $(BINS:%=%.byte) *.fst
make -C fstar-compiler/specs clean
<file_sep>/specs/wots.py
#!/usr/bin/env python3
from lib.speclib import *
from specs.sha2 import sha256
# Influences signature length, not security
w_four : uint32_t = uint32(4)
w_sixteen : uint32_t = uint32(16)
# Parameters
# n := length (message, signature, key), SHA2 output length
n : int = 32
w : uint32_t = w_sixteen
log_w : int = 4
length1 : uint32_t = uint32(int(speclib.ceil(8*n // log_w)))
tmp : int = uint32.to_int(length1) * (uint32.to_int(w) - 1)
tmp = speclib.log(tmp, 2)
length2 : uint32_t = uint32(int(tmp // log_w + 1))
length : uint32_t = length1 + length2
# Types
key_t = bytes_t(n)
sk_t = array_t(key_t, uint32.to_int(length))
pk_t = array_t(key_t, uint32.to_int(length))
sig_t = array_t(key_t, uint32.to_int(length))
address_t = array_t(uint32_t, 8)
key_pair_t = tuple_t(sk_t, pk_t, address_t)
digest_t = bytes_t(32)
seed_t = bytes_t(n)
chain_t = tuple_t(address_t, vlbytes_t)
# F: SHA2-256(toByte(0, 32) || KEY || M),
# PRF: SHA2-256(toByte(3, 32) || KEY || M).
@typechecked
def hash(prefix: key_t, key: key_t, m: vlbytes_t) -> digest_t:
h_in : bytes_t = bytes.concat(prefix, key)
h_in = bytes.concat(h_in, m)
return sha256(bytes(h_in))
@typechecked
def F(key: key_t, m: vlbytes_t) -> digest_t:
return hash(bytes.from_nat_be(nat(0), nat(32)), key, m)
@typechecked
def PRF(key: key_t, m: address_t) -> digest_t:
m_ : address_t = bytes.from_uint32_be(m[0])
m_ = bytes.concat(m_, bytes.from_uint32_be(m[1]))
m_ = bytes.concat(m_, bytes.from_uint32_be(m[2]))
m_ = bytes.concat(m_, bytes.from_uint32_be(m[3]))
m_ = bytes.concat(m_, bytes.from_uint32_be(m[4]))
m_ = bytes.concat(m_, bytes.from_uint32_be(m[5]))
m_ = bytes.concat(m_, bytes.from_uint32_be(m[6]))
m_ = bytes.concat(m_, bytes.from_uint32_be(m[7]))
return hash(bytes.from_nat_be(nat(3), nat(32)), key, m_)
# Address is a 32-byte array with the following definition
# 4-byte: layer address
# 8-byte: tree address
# 4-byte: type: 0 for OTS, 1 for L-tree, 2 for hash tree
# 4-byte: OTS address, L-tree address, padding (0)
# 4-byte: chain address, tree height
# 4-byte: hash address, tree index
# 4-byte: key and mask
@typechecked
def set_type(adr: address_t, t: uint32_t) -> address_t:
result : address_t = array.copy(adr)
result[3] = t
return result
@typechecked
def set_ots_address(adr: address_t, ots_adr: uint32_t) -> address_t:
result : address_t = array.copy(adr)
result[-4] = ots_adr
return result
@typechecked
def set_ltree_address(adr: address_t, ltree_adr: uint32_t) -> address_t:
return set_ots_address(adr, ltree_adr)
@typechecked
def set_chain_address(adr: address_t, h_adr: uint32_t) -> address_t:
result : address_t = array.copy(adr)
result[-3] = h_adr
return result
@typechecked
def set_tree_height(adr: address_t, h: uint32_t) -> address_t:
return set_chain_address(adr, h)
@typechecked
def get_tree_height(adr: address_t) -> uint32_t:
return adr[-3]
@typechecked
def set_hash_address(adr: address_t, h_adr: uint32_t) -> address_t:
result : address_t = array.copy(adr)
result[-2] = h_adr
return result
@typechecked
def set_tree_index(adr: address_t, i: uint32_t) -> address_t:
return set_hash_address(adr, i)
@typechecked
def get_tree_index(adr: address_t) -> uint32_t:
return adr[-2]
@typechecked
def set_key_and_mask(adr: address_t, kam: uint32_t) -> address_t:
result : address_t = array.copy(adr)
result[-1] = kam
return result
# Input: Input string X, start index i, number of steps s, seed SEED, address ADRS
# Output: value of F iterated s times on X
@typechecked
def wots_chain(x: bytes_t, start: int, steps: int, seed: seed_t, adr: address_t) -> chain_t:
hmo = bytes.copy(x)
adr : address_t
for i in range(start + steps):
if i >= start:
# TODO: This is a hack because hacspec currently doesn't allow range(min, max)
adr = set_hash_address(adr, uint32(i))
adr = set_key_and_mask(adr, uint32(0))
key : digest_t = PRF(seed, adr)
adr = set_key_and_mask(adr, uint32(1))
bm : digest_t = PRF(seed, adr)
fin = bytes([])
for (a, b) in array.zip(hmo, bm):
fin = bytes.concat(fin, bytes([a ^ b]))
hmo = F(key, fin)
return adr, hmo
@typechecked
def key_gen_sk() -> sk_t:
sk : sk_t = sk_t.create(uintn.to_int(length), key_t.create(n, uint8(0)))
for i in range(uint32.to_int(length)):
sk_i: bytes_t = bytes.create_random_bytes(n)
sk[i] = sk_i
return sk
@typechecked
def key_gen_pk(adr: address_t, seed: seed_t, sk: sk_t) -> tuple_t(pk_t, address_t):
pk : pk_t = pk_t.create(uintn.to_int(length), key_t.create(n, uint8(0)))
pk_i : vlbytes_t
for i in range(uint32.to_int(length)):
adr : address_t = set_chain_address(adr, uint32(i))
adr, pk_i = wots_chain(sk[i], 0, uint32.to_int(w)-1, seed, adr)
pk[i] = pk_i
return (pk, adr)
@typechecked
def key_gen(adr: address_t, seed: seed_t) -> key_pair_t:
sk : sk_t = key_gen_sk()
pk : pk_t
adr_out : address_t
pk, adr_out = key_gen_pk(adr, seed, sk)
return (sk, pk, adr_out)
@typechecked
def base_w(msg: vlbytes_t, l: uint32_t) -> vlbytes_t:
i : int = 0
out : int = 0
total : int = 0
bits : int = 0
basew : vlbytes_t = bytes([])
for consumed in range(uint32.to_int(l)):
if bits == 0:
total = uint8.to_int(msg[i])
i = i + 1
bits = bits + 8
bits = bits - int(log_w)
bw : int = (total >> bits) & int(uint32.to_int(w) - 1)
basew = array.concat(basew, bytes([uint8(bw)]))
out = out + 1
return basew
@typechecked
def wots_msg(msg: digest_t) -> vlbytes_t:
csum : int = 0
m : vlbytes_t = base_w(msg, length1)
for i in range(uint32.to_int(length1)):
csum = csum + uint32.to_int(w) - 1 - uint32.to_int(m[i])
csum = nat(csum << int(8 - ((uint32.to_int(length2) * log_w) % 8)))
length2_bytes : nat_t = speclib.ceil((uint32.to_int(length2) * log_w) // 8)
csum_bytes : bytes_t = bytes.from_nat_be(csum, length2_bytes)
m : bytes_t = array.concat(m, base_w(csum_bytes, length2))
return m
@typechecked
def wots_sign(msg: digest_t, sk: sk_t, adr: address_t, seed: seed_t) -> sig_t:
m : vlbytes_t = wots_msg(msg)
sig : sig_t = sig_t.create(uintn.to_int(length), key_t.create(n, uint8(0)))
for i in range(uint32.to_int(length)):
adr : address_t = set_chain_address(adr, uint32(i))
sig_i : vlbytes_t
adr, sig_i = wots_chain(sk[i], 0, uint32.to_int(m[i]), seed, adr)
sig[i] = sig_i
return sig
@typechecked
def wots_verify(pk: pk_t, msg: digest_t, sig: sig_t, adr: address_t, seed: seed_t) -> tuple_t(pk_t, address_t):
m : vlbytes_t = wots_msg(msg)
pk2 : pk_t = pk_t.create(uintn.to_int(length), key_t.create(n, uint8(0)))
for i in range(uint32.to_int(length)):
adr : address_t = set_chain_address(adr, uint32(i))
m_i : int = uint32.to_int(m[i])
pk_i : vlbytes_t
adr, pk_i = wots_chain(sig[i], m_i, uint32.to_int(w) - 1 - m_i, seed, adr)
pk2[i] = pk_i
return (pk2, adr)
<file_sep>/tests/poly1305_test.py
from specs.poly1305 import *
import json
from sys import exit
def main (x: int) -> None :
file = open('tests/test_vectors/poly1305_test_vectors.json')
poly1305_test_vectors = json.load(file)
# RFC 7539 Test Vectors
msg = bytes.from_ints([
0x43, 0x72, 0x79, 0x70, 0x74, 0x6f, 0x67, 0x72,
0x61, 0x70, 0x68, 0x69, 0x63, 0x20, 0x46, 0x6f,
0x72, 0x75, 0x6d, 0x20, 0x52, 0x65, 0x73, 0x65,
0x61, 0x72, 0x63, 0x68, 0x20, 0x47, 0x72, 0x6f,
0x75, 0x70])
k = bytes.from_ints([
0x85, 0xd6, 0xbe, 0x78, 0x57, 0x55, 0x6d, 0x33,
0x7f, 0x44, 0x52, 0xfe, 0x42, 0xd5, 0x06, 0xa8,
0x01, 0x03, 0x80, 0x8a, 0xfb, 0x0d, 0xb2, 0xfd,
0x4a, 0xbf, 0xf6, 0xaf, 0x41, 0x49, 0xf5, 0x1b])
expected = bytes.from_ints([
0xa8, 0x06, 0x1d, 0xc1, 0x30, 0x51, 0x36, 0xc6,
0xc2, 0x2b, 0x8b, 0xaf, 0x0c, 0x01, 0x27, 0xa9 ])
computed = poly1305_mac(msg,k)
if (computed == expected):
print("Poly1305 Test 0 passed.")
else:
print("Poly1305 Test 0 failed:")
print("expected mac:",expected)
print("computed mac:",computed)
exit(1);
for i in range(len(poly1305_test_vectors)):
msg = bytes.from_hex(poly1305_test_vectors[i]['input'])
k = bytes.from_hex(poly1305_test_vectors[i]['key'])
expected = bytes.from_hex(poly1305_test_vectors[i]['tag'])
computed = poly1305_mac(msg,k)
if (computed == expected):
print("Poly1305 Test ",i+1," passed.")
else:
print("Poly1305 Test ",i+1," failed:")
print("expected mac:",expected)
print("computed mac:",computed)
exit(1)
main(0)
<file_sep>/Makefile
#To run these specs in Python you need to install Python >= 3.6.4
PYTHON?=python3.6
# hacspecs passing compiler checks
FINISHED_SPECS=poly1305 chacha20 aead_chacha20poly1305 curve25519 curve448 \
aes gf128 aead_aes128gcm
# hacspecs that pass all python checks and can be used in any make target.
SPECS=$(FINISHED_SPECS) sha2 sha3 ed25519 p256 rsapss blake2
# Don't use these in test. They take too long.
SLOW_SPECS=wots kyber
# Like SLOW_SPECS and they fail the spec checker.
SLOW_SPECS_FAILING_SPECHECK=frodo argon2i
# These specs run just fine but don't pass the spec checker.
SPECS_FAILING_SPECHECK=
# These specs are broken or work in progress.
BROKEN_SPECS=vrf xmss
.PHONY: test $(SPECS) all compiler
# Python targets. These only require Python.
all: run check test
run: $(SPECS) $(SPECS_FAILING_SPECHECK) $(SLOW_SPECS)
test: $(addsuffix -test, $(SPECS) $(SPECS_FAILING_SPECHECK))
check: $(addsuffix -check, $(SPECS) $(SLOW_SPECS))
$(SPECS) $(SLOW_SPECS) $(SPECS_FAILING_SPECHECK):
PYTHONPATH=. $(PYTHON) -O tests/$@_test.py
%-check: specs/%.py
PYTHONPATH=. $(PYTHON) lib/check.py $<
%-test: tests/%_test.py
PYTHONPATH=. $(PYTHON) $<
# Compiler targets
# NOTE that this requires OCAML.
compiler: checker fstar parse
checker:
make -C compiler
fstar:
make -C compiler
parse: $(addsuffix -parse, $(FINISHED_SPECS))
%-parse: specs/%.py
compiler/checker.native $<
# Documentation targets
# NOTE that this requires hugo (https://gohugo.io)
website:
cd doc/website/ && hugo
cd doc/poly-slides && hugo
website-dev:
cd doc/website/ && hugo serve -D
website-slides-dev:
cd doc/poly-slides && hugo serve -D
python-docs:
make -C build/ docs
<file_sep>/doc/website/content/_index.md
---
title: "hacspec"
description: "the new specification language for crypto primitives"
type: page
---
## github
Check the repository on [Github](https://github.com/HACS-workshop/hacspec).
## docs
* You can find and early version of the documentation for the hacspec `speclib` [here](https://hacs-workshop.github.io/hacspec/docs/).
* Translating RFC pseudo-code to hacspec, [an example (poly1305)](./poly-slides).
## contact
Discussions are happening on the [mailing list](https://moderncrypto.org/mailman/listinfo/hacspec).
Chat with us on [gitter](https://gitter.im/hacspec/Lobby?utm_source=share-link&utm_medium=link&utm_campaign=share-link).
<file_sep>/tests/rsapss_test.py
from lib.speclib import *
from specs.rsapss import rsapss_sign, rsapss_verify, os2ip
from sys import exit
from tests.testlib import print_dot, exit
import json
def main():
file = open('tests/test_vectors/rsapss_test_vectors.json')
rsapss_test_vectors = json.load(file)
print_dot()
for i in range(len(rsapss_test_vectors)):
modBits = nat(rsapss_test_vectors[i]['modBits'])
nBytes = bytes.from_hex(rsapss_test_vectors[i]['n'])
eBytes = bytes.from_hex(rsapss_test_vectors[i]['e'])
dBytes = bytes.from_hex(rsapss_test_vectors[i]['d'])
salt = bytes.from_hex(rsapss_test_vectors[i]['salt'])
msg = bytes.from_hex(rsapss_test_vectors[i]['msg'])
sgnt_expected = bytes.from_hex(rsapss_test_vectors[i]['sgnt'])
valid = rsapss_test_vectors[i]['valid']
pkey = (os2ip(nBytes), os2ip(eBytes))
skey = (pkey, os2ip(dBytes))
sLen = nat(array.length(salt))
sgnt_computed = rsapss_sign(modBits, skey, salt, msg)
vrfy = rsapss_verify(modBits, pkey, sLen, msg, sgnt_computed)
if (sgnt_computed == sgnt_expected and valid and vrfy):
print("RSA-PSS Test ", i, " passed.")
elif (not(sgnt_computed == sgnt_expected) and not valid and not vrfy):
print("RSA-PSS Test ", i, " passed.")
else:
print("RSA-PSS Test ", i, " failed:")
print("expected: ", sgnt_expected)
print("computed: ", sgnt_computed)
exit(1)
exit(0)
if __name__ == '__main__':
main()
<file_sep>/tests/aead_aes128gcm_test.py
from specs.aead_aes128gcm import *
from sys import exit
import json
def main(x: int) -> None :
file = open('tests/test_vectors/aead_aes128gcm_test_vectors.json')
aead_aes128gcm_test_vectors = json.load(file)
for i in range(len(aead_aes128gcm_test_vectors)):
msg = bytes.from_hex(aead_aes128gcm_test_vectors[i]['input'])
k = bytes.from_hex(aead_aes128gcm_test_vectors[i]['key'])
n = bytes.from_hex(aead_aes128gcm_test_vectors[i]['nonce'])
aad = bytes.from_hex(aead_aes128gcm_test_vectors[i]['aad'])
exp_cipher = bytes.from_hex(aead_aes128gcm_test_vectors[i]['output'])
exp_mac = bytes.from_hex(aead_aes128gcm_test_vectors[i]['tag'])
cipher, mac = aead_aes128gcm_encrypt(k,n,aad,msg)
decrypted_msg = aead_aes128gcm_decrypt(k,n,aad,cipher,mac)
if (exp_cipher == cipher and exp_mac == mac and decrypted_msg == msg):
print("AEAD-AES128-GCM Test ",i+1," passed.")
else:
print("AEAD-AES128-GCM Test ",i+1," failed:")
print("expected cipher: ", exp_cipher)
print("computed cipher: ", cipher)
print("expected mac: ", exp_mac)
print("computed mac: ", mac)
exit(1)
if __name__ == "__main__":
main(0)
| 6754b923e699d46e42d404c70eabedeca96a25f3 | [
"reStructuredText",
"Markdown",
"TOML",
"Makefile",
"Python",
"Dockerfile",
"Shell"
] | 70 | Python | HACS-workshop/hacspec | cf537b90934541ab2b31da8c573bfb2f4123833c | 6b85ae0e3dfe43bc1a4c5fb4637989d2caf3783f |
refs/heads/main | <repo_name>jjamtiger3/vue3-components<file_sep>/src/store/index.js
import { createStore } from 'vuex'
export const store = createStore({
state() {
return {
checkBoxState: {}
}
},
mutations: {
setChecked (state, payload) {
const { target, checked } = payload
if (!state.checkBoxState[target]) {
state.checkBoxState[target] = {}
}
state.checkBoxState[target].checked = checked;
},
setText (state, payload) {
const { target, text } = payload
if (!state.checkBoxState[target]) {
state.checkBoxState[target] = {}
}
state.checkBoxState[target].text = text;
}
},
getters: {
checkboxState: (state) => {
return state.checkBoxState
}
}
})<file_sep>/src/classes/Column.js
class Column {
id = '' // 초기 컬럼id를 부여할 방법을 생각해봐야할듯
header = {}
style = {}
constructor(columnOptions) {
this.header = columnOptions.header;
this.id = columnOptions.id;
this.style = columnOptions.style;
}
get header () {
return this.header;
}
set header (header) {
this.header = header;
}
get style () {
return this.style;
}
set style (style) {
this.style = style;
}
}
export default Column;<file_sep>/src/common/maskLib.js
import util from './util';
const maskLib = {
_applyReal: (value, diffValue) => {
const arrNewValue = value.split('');
const origValue = diffValue.split('');
arrNewValue.filter((value, index) => {
if (origValue[index] && value === '*') {
arrNewValue[index] = origValue[index];
}
});
return arrNewValue.join('').replace(/[^\d\w]/g, '');
},
_applyMask: (mask, value) => {
const masks = mask.split(';');
const maskLen = masks.length; // 패턴의 개수
let firstMaskLen = 0;
let secondMaskLen = 0;
let objMask;
let _maskedValue = value;
const splitter = mask.replace(/[0]/g, '')[0];
if (splitter) {
// 첫번째 패턴이 두번째 패턴보다 길이가 길면 두번째 패턴이 먼저 나와야 하므로 위치 교환하는 로직
if(maskLen > 1) {
if(masks[0].replace(/(\W)/g, '').length > masks[1].replace(/(\W)/g, '').length) {
var tmp = masks[0];
masks[0] = masks[1];
masks[1] = tmp;
}
}
firstMaskLen = masks[0].replace(/(\W)/g, '').trim().length; // 첫번째 패턴에서 특수기호를 제거한 실제 패턴의 길이
objMask = maskLib._getExpFromMask(value, masks[0], splitter); // 첫번째 패턴을 토대로 정규식과 변환식을 리턴받음
if(maskLen > 1) {
// 일단 패턴이 최대 두개라는 가정하에. 추후3개이상인 경우 반복문 처리해야 할듯
secondMaskLen = masks[1].replace(/(\W)/g, '').trim().length;
// 두번째 패턴의 길이가 짧고 실제 값이 두번째 패턴 길이보다 크면 첫번째 패턴값을 적용
// 첫번째 패턴의 길이가 짧고 실제 값이 첫번째 패턴 길이보다 크면 두번째 패턴값을 적용
if(firstMaskLen > secondMaskLen && value.length > secondMaskLen || firstMaskLen === secondMaskLen) {
objMask = maskLib._getExpFromMask(value, masks[0], splitter);
} else if (firstMaskLen < secondMaskLen && value.length > firstMaskLen) {
objMask = maskLib._getExpFromMask(value, masks[1], splitter);
}
}
// 출력값에 실제값을 정규식 변환한 내용을 저장한다.
const { regExp, repExp } = objMask;
_maskedValue = _maskedValue.replace(regExp, repExp);
}
return _maskedValue;
},
_applyAsterik: (mask, maskValue) => {
let applyValue = maskValue;
const editLen = applyValue.length;
for (let pos = 0; pos < editLen; pos += 1) {
if (mask[pos] === '*') {
applyValue = util.replaceAt(applyValue, pos, mask[pos]);
}
}
return applyValue;
},
_getExpFromMask: (_maskedValue, mask, splitter) => {
const _maskPiece = mask.split(splitter);
const _pattLen = _maskPiece.length;
const strExp = [];
const repExp = [];
const arrCurrMask = [];
const charPatt = maskLib.type === 'number' ? 'Z' : 'A';
for (let i = 0; i < _pattLen; i += 1) {
let _strReg;
switch (charPatt) {
case 'Z':
if (i === 0) { // 첫번째 패턴 영역인 경우 무조건 전체 길이를 받아옴
_strReg = `(\\d{${_maskPiece[i].length}})`;
strExp.push(_strReg);
repExp.push(`$${i + 1}`);
} else { // 그 외엔 앞에 1, 패턴길이로 받아옴
_strReg = `(\\d{1,${_maskPiece[i].length}})`;
strExp.push(_strReg);
repExp.push(`$${i + 1}`);
}
arrCurrMask.push(_maskPiece[i]);
break;
case 'A':
if (i === 0) { // 첫번째 패턴 영역인 경우 무조건 전체 길이를 받아옴
_strReg = `(\\w{${_maskPiece[i].length}})`;
strExp.push(_strReg);
repExp.push(`$${i + 1}`);
} else { // 그 외엔 앞에 1, 패턴길이로 받아옴
_strReg = `(\\w{1,${_maskPiece[i].length}})`;
strExp.push(_strReg);
repExp.push(`$${i + 1}`);
}
arrCurrMask.push(_maskPiece[i]);
break;
default:
break;
}
// 실제 입력된 값이 현재 길이보다 짧다면 패턴을 만들면 안되므로 break;
if (_maskedValue.length <= arrCurrMask.join('').length) {
break;
}
}
return { // 정규식과 $1-$2-$3같은 변환식을 만들어서 리턴
// eslint-disable-next-line no-eval
regExp: new RegExp(eval(`/${strExp.join('')}/g`)),
repExp: repExp.join(splitter),
};
},
_isIncludeAsterik(mask) {
return mask.indexOf('*' > -1);
},
}
export default maskLib;<file_sep>/src/main.js
import { createApp } from 'vue'
import App from './App.vue'
import { store } from './store'
import '@fortawesome/fontawesome-free/css/all.css'
import '@fortawesome/fontawesome-free/js/all.js'
const app = createApp(App)
app.use(store)
app.mount('#app')
| ab831dd4396a3b349dee7a6ded330b1c91f10e42 | [
"JavaScript"
] | 4 | JavaScript | jjamtiger3/vue3-components | 0d88e87ec3ea5fbb0eb5d88c3534fe16e6a5df52 | d7c29b6cbb6954c5783f9bdf5d59b672df80c7c7 |
refs/heads/master | <repo_name>emersondevto/100DaysOfCPP<file_sep>/BasicCPP/do-while-loop.h
//
// do-while-loop.h
// BasicCPP
//
// Created by Emerson on 2/01/20.
// Copyright © 2020 Emerson. All rights reserved.
//
#ifndef do_while_loop_h
#define do_while_loop_h
// La unica diferencia entre while y do while es que el segundo siempre va a ser ejecutado almenos una sola vez.
void do_while_loop () {
int index = 0;
do {
std::cout << "Number: " << index << std::endl;
index++;
} while(index < 0);
}
#endif /* do_while_loop_h */
<file_sep>/BasicCPP/array-1D.h
//
// array-1D.h
// BasicCPP
//
// Created by Emerson on 2/01/20.
// Copyright © 2020 Emerson. All rights reserved.
//
#ifndef array_1D_h
#define array_1D_h
void array_numbers (){
int array[] = {1,2,3,4,5,6};
std::cout << array[0] << std::endl;
}
#endif /* array_1D_h */
<file_sep>/BasicCPP/const.h
//
// const.h
// BasicCPP
//
// Created by Emerson on 2/01/20.
// Copyright © 2020 Emerson. All rights reserved.
//
#ifndef const_h
#define const_h
void constant () {
const double pi = 3.65; // pi = 4.5;
}
#endif /* const_h */
<file_sep>/BasicCPP/array_2D.h
//
// array_2d.h
// BasicCPP
//
// Created by Emerson on 2/01/20.
// Copyright © 2020 Emerson. All rights reserved.
//
#ifndef array_2d_h
#define array_2d_h
void array_2D (){
int matrix[3][2] = {
{ 1,2 },
{ 3,4 },
{ 5,6 }
};
std::cout <<"Matrix Fila 0 Columna 1: " << matrix[0][1] <<std::endl;
}
#endif /* array_2d_h */
<file_sep>/BasicCPP/for-loop.h
//
// for-loop.h
// BasicCPP
//
// Created by Emerson on 2/01/20.
// Copyright © 2020 Emerson. All rights reserved.
//
#ifndef for_loop_h
#define for_loop_h
void for_loop () {
for (int index = 0; index < 5; index++) {
std::cout << "for loop number: " << index << std::endl;
}
}
#endif /* for_loop_h */
<file_sep>/demo.cpp
#include <iostream>
using namespace std;
// https://www.youtube.com/watch?v=-erXR6k9TeE&t=4893s
int main(int argc, char const *argv[])
{
cout << "Hola Mundo" << endl;
int x ;
cin >> x;
cout << x << endl;
/* code */
return 0;
}<file_sep>/BasicCPP/getting-started.h
//
// getting-started.h
// BasicCPP
//
// Created by Emerson on 2/01/20.
// Copyright © 2020 Emerson. All rights reserved.
//
#ifndef getting_started_h
#define getting_started_h
using namespace std;
void getting_started () {
string name;
// insert code here...
cout << "Escriba su nombre" << endl;
// cin >> name;
getline(cin, name);
cout << name << endl;
cout << "versión de compilación: " << __cplusplus << endl;
}
#endif /* getting_started_h */
<file_sep>/BasicCPP/classes.h
//
// classes.h
// BasicCPP
//
// Created by Emerson on 4/01/20.
// Copyright © 2020 Emerson. All rights reserved.
//
#ifndef classes_h
#define classes_h
void classes () {
class Persona {
public:
int age;
string name;
};
Persona emerson;
emerson.age = 20;
emerson.name = "Emerson";
std::cout << emerson.age << std::endl;
std::cout << emerson.name << std::endl;
}
#endif /* classes_h */
<file_sep>/BasicCPP/pointers.h
//
// pointers.h
// BasicCPP
//
// Created by Emerson on 3/01/20.
// Copyright © 2020 Emerson. All rights reserved.
//
#ifndef pointers_h
#define pointers_h
void pointers() {
string name = "Emerson";
int age = 31;
bool isMale = true;
// Imprime en consola los valores de las variables
std::cout << name << std::endl;
std::cout << age << std::endl;
std::cout << isMale << std::endl;
// Imprime en consola la direccion hexadecimal de la memora RAM (Random Access Memory)
std::cout << &name << std::endl;
std::cout << &age << std::endl;
std::cout << &isMale << std::endl;
// Estas variables guardan la direccion hexadecimal, toda variable que sea definida con *
string *name1 = &name;
int *age1 = &age;
bool *isMale1 = &isMale;
// Imprime en consola la direccion haxadecimal de la memoria RAM (Random Access Memory)
// Se imprime su valor hexadecimal sin el uso de *
std::cout << name1 << std::endl;
std::cout << age1 << std::endl;
std::cout << isMale1 << std::endl;
// Imprime en consola el valor alojado en dicha direccion hexadecimal
std::cout << *name1 << std::endl;
std::cout << *age1 << std::endl;
std::cout << *isMale1 << std::endl;
string holaMundo = "Hola Mundo";
std::cout << holaMundo << std::endl;
std::cout << &holaMundo << std::endl;
string *holaMundoHexa = &holaMundo;
std::cout << holaMundoHexa << std::endl;
std::cout << " *holaMundoHexa " << *holaMundoHexa << std::endl;
holaMundo = "Hola Mundo 2";
std::cout << holaMundo << std::endl;
std::cout << *holaMundoHexa << std::endl;
// Genera error dado que aunque esta declarado como un string, realmente es un acceso hexadecimal de char(13)
// holaMundoHexa = "<NAME>";
//
// std::cout << holaMundo << std::endl;
// std::cout << *holaMundoHexa << std::endl;
}
#endif /* pointers_h */
<file_sep>/BasicCPP/if.h
//
// if.h
// BasicCPP
//
// Created by Emerson on 31/12/19.
// Copyright © 2019 Emerson. All rights reserved.
//
#ifndef if_h
#define if_h
#include <iostream>
void if_statement (int num1, int num2) {
if(num1 > num2){
std::cout << num1 << "es mayor" << std::endl;
} else {
std::cout << num2 << "es mayor" << std::endl;
}
}
#endif /* if_h */
<file_sep>/BasicCPP/main.cpp
//
// main.cpp
// #100DaysOfCPP
//
// Created by Emerson on 28/12/19.
// Copyright © 2019 Emerson. All rights reserved.
//
#include <iostream>
//using std::cout;
//using std::endl;
#include "if.h"
#include "array-1D.h"
#include "const.h"
#include "getting-started.h"
#include "while-loop.h"
#include "do-while-loop.h"
#include "for-loop.h"
#include "array_2D.h"
#include "nested_loops.h"
#include "pointers.h"
#include "classes.h"
#include "classes2.h"
using namespace std;
int main(int argc, const char * argv[]) {
// getting_started();
// if_statement(10, 3);
// array_numbers();
// constant();
// while_loop();
// do_while_loop();
// for_loop();
// array_2D();
// nested_loops();
pointers();
classes();
classes2();
return 0;
}
<file_sep>/BasicCPP/classes2.h
//
// classes2.h
// BasicCPP
//
// Created by Emerson on 4/01/20.
// Copyright © 2020 Emerson. All rights reserved.
//
#ifndef classes2_h
#define classes2_h
void classes2 () {
class Persona {
public:
int age;
string name;
void saludo(){
std::cout << "Hola " << name << endl;
}
};
// Persona emerson;
Persona emerson;
Persona* person2 = new Persona();//declare a person
emerson.age = 20;
emerson.name = "Emerson";
std::cout << emerson.age << std::endl;
std::cout << emerson.name << std::endl;
emerson.saludo();
person2->name = "Moore";
std::cout << "person2 name: " << person2->name << std::endl;
}
#endif /* classes2_h */
<file_sep>/BasicCPP/nested_loops.h
//
// nested_loops.h
// BasicCPP
//
// Created by Emerson on 2/01/20.
// Copyright © 2020 Emerson. All rights reserved.
//
#ifndef nested_loops_h
#define nested_loops_h
void nested_loops (){
int matrix[3][2] = {
{ 1,2 },
{ 3,4 },
{ 5,6 }
};
for (int row = 0; row < 3; row++) {
for (int column = 0; column < 2; column++) {
std::cout << matrix[row][column];
}
std::cout << std::endl;
}
}
#endif /* nested_loops_h */
<file_sep>/BasicCPP/while-loop.h
//
// while-loop.h
// BasicCPP
//
// Created by Emerson on 2/01/20.
// Copyright © 2020 Emerson. All rights reserved.
//
#ifndef while_loop_h
#define while_loop_h
void while_loop () {
int index = 0;
while(index < 5){
std::cout << "Number: " << index << std::endl;
index++;
}
}
#endif /* while_loop_h */
| 45ef5e85bd02f7d29be73ee98ebadb776f948afb | [
"C",
"C++"
] | 14 | C++ | emersondevto/100DaysOfCPP | 9e57bbcf16c463ed40cfbb14ac6a6686fd6d2a7d | e7e864352a56039e8a77e478605ca897796615a0 |
refs/heads/master | <file_sep>#ifndef __STEPPER_APP_H
#define __STEPPER_APP_H
#include "stepper_drv.h"
#include "modbus_rtu.h"
void StepperInit(void);
void StepperTask(void const * argument);
void msg_init(ArmMachine_TypeDef *ArmMachineMsg);
#endif
<file_sep>#include "stepper_msg.h"
ArmMachine_TypeDef ArmMachineMsg;
<file_sep>/* USER CODE BEGIN Header */
/**
******************************************************************************
* File Name : freertos.c
* Description : Code for freertos applications
******************************************************************************
* @attention
*
* <h2><center>© Copyright (c) 2019 STMicroelectronics.
* All rights reserved.</center></h2>
*
* This software component is licensed by ST under Ultimate Liberty license
* SLA0044, the "License"; You may not use this file except in compliance with
* the License. You may obtain a copy of the License at:
* www.st.com/SLA0044
*
******************************************************************************
*/
/* USER CODE END Header */
/* Includes ------------------------------------------------------------------*/
#include "FreeRTOS.h"
#include "task.h"
#include "main.h"
#include "cmsis_os.h"
#include "stepper_app.h"
#include "iwdg.h"
#include "serial_queue_app.h"
/* Private includes ----------------------------------------------------------*/
/* USER CODE BEGIN Includes */
/* USER CODE END Includes */
/* Private typedef -----------------------------------------------------------*/
/* USER CODE BEGIN PTD */
/* USER CODE END PTD */
/* Private define ------------------------------------------------------------*/
/* USER CODE BEGIN PD */
/* USER CODE END PD */
/* Private macro -------------------------------------------------------------*/
/* USER CODE BEGIN PM */
/* USER CODE END PM */
/* Private variables ---------------------------------------------------------*/
/* USER CODE BEGIN Variables */
/* USER CODE END Variables */
osThreadId defaultTaskHandle;
osThreadId ledTaskHandle;
//osMessageQId serialRecQueueHandle;
//osMessageQId serialSendQueueHandle;
/* Private function prototypes -----------------------------------------------*/
/* USER CODE BEGIN FunctionPrototypes */
/* USER CODE END FunctionPrototypes */
void StartDefaultTask(void const * argument);
void LedTask(void const * argument);
void MX_FREERTOS_Init(void); /* (MISRA C 2004 rule 8.1) */
/* Hook prototypes */
void configureTimerForRunTimeStats(void);
unsigned long getRunTimeCounterValue(void);
/* USER CODE BEGIN 1 */
/* Functions needed when configGENERATE_RUN_TIME_STATS is on */
__weak void configureTimerForRunTimeStats(void)
{
}
__weak unsigned long getRunTimeCounterValue(void)
{
return 0;
}
/* USER CODE END 1 */
/**
* @brief FreeRTOS initialization
* @param None
* @retval None
*/
void MX_FREERTOS_Init(void) {
/* USER CODE BEGIN Init */
/* USER CODE END Init */
/* USER CODE BEGIN RTOS_MUTEX */
/* add mutexes, ... */
/* USER CODE END RTOS_MUTEX */
/* USER CODE BEGIN RTOS_SEMAPHORES */
/* add semaphores, ... */
/* USER CODE END RTOS_SEMAPHORES */
/* USER CODE BEGIN RTOS_TIMERS */
/* start timers, add new ones, ... */
/* USER CODE END RTOS_TIMERS */
/* Create the queue(s) */
/* definition and creation of serialRecQueue */
// osMessageQDef(serialRecQueue, 16, uint16_t);
// serialRecQueueHandle = osMessageCreate(osMessageQ(serialRecQueue), NULL);
/* definition and creation of serialSendQueue */
// osMessageQDef(serialSendQueue, 16, uint16_t);
// serialSendQueueHandle = osMessageCreate(osMessageQ(serialSendQueue), NULL);
/* USER CODE BEGIN RTOS_QUEUES */
/* add queues, ... */
/* USER CODE END RTOS_QUEUES */
/* Create the thread(s) */
/* definition and creation of defaultTask */
osThreadDef(defaultTask, StartDefaultTask, osPriorityNormal, 0, 128);
defaultTaskHandle = osThreadCreate(osThread(defaultTask), NULL);
/* definition and creation of ledTask */
osThreadDef(ledTask, LedTask, osPriorityLow, 0, 128);
ledTaskHandle = osThreadCreate(osThread(ledTask), NULL);
/* USER CODE BEGIN RTOS_THREADS */
/* add threads, ... */
// printf("---------------------------------------\r\n");
// printf("Init --> MX_FREERTOS_Init()\r\n");
StepperInit();
data_queue_task_init();
/* USER CODE END RTOS_THREADS */
}
/* USER CODE BEGIN Header_StartDefaultTask */
/**
* @brief Function implementing the defaultTask thread.
* @param argument: Not used
* @retval None
*/
/* USER CODE END Header_StartDefaultTask */
void StartDefaultTask(void const * argument)
{
/* USER CODE BEGIN StartDefaultTask */
/* Infinite loop */
for(;;)
{
HAL_IWDG_Refresh(&hiwdg);
osDelay(10);
}
/* USER CODE END StartDefaultTask */
}
/* USER CODE BEGIN Header_LedTask */
/**
* @brief Function implementing the ledTask thread.
* @param argument: Not used
* @retval None
*/
/* USER CODE END Header_LedTask */
void LedTask(void const * argument)
{
/* USER CODE BEGIN LedTask */
/* Infinite loop */
for(;;)
{
// printf("Task --> LedTask()\r\n");
HAL_GPIO_TogglePin(GPIOA, GPIO_PIN_5);
// HAL_GPIO_TogglePin(GPIOB, GPIO_PIN_4);
// HAL_GPIO_TogglePin(GPIOB, GPIO_PIN_5);
// HAL_GPIO_TogglePin(GPIOB, GPIO_PIN_6);
// HAL_GPIO_TogglePin(GPIOB, GPIO_PIN_7);
osDelay(1000);
}
/* USER CODE END LedTask */
}
/* Private application code --------------------------------------------------*/
/* USER CODE BEGIN Application */
/* USER CODE END Application */
/************************ (C) COPYRIGHT STMicroelectronics *****END OF FILE****/
<file_sep>#ifndef __STEPPER_DRV_H
#define __STEPPER_DRV_H
/* Includes ------------------------------------------------------------------*/
#include "FreeRTOS.h"
#include "task.h"
#include "main.h"
#include "cmsis_os.h"
#include "stepper_msg.h"
#include "usart.h"
void stepper_test(void);
#endif
<file_sep>/* Includes ------------------------------------------------------------------*/
#include "stepper_drv.h"
/*
* 本文件包含了:
* 1. 步进电机所使用的RS485串口的端口初始化
* This file contains:
* 1. Init of the serial port used by Stepper
*/
void stepper_send_data(void)
{
}
void stepper_test(void)
{
/* 使用“直接数据运行”
号机号码: 01h ==> 1
功能代码: 10h
写入寄存器起始地址: 00 58h
写入寄存器数: 00 10h ==>16个
写入byte数: 20h ==> 32个
1. 运行数据No.: 00 00 00 00h ==> 0
2. 方式: 相对定位(00 00 00 02)
3. 位置: 00 00 21 24h ==> 8500 step
4. 速度: 00 00 07 D0h ==> 2000Hz
5. 起动/变速斜率: 00 00 05 DCh ==> 1.5kHz
6. 停止斜率: 00 00 05 DCh ==> 1.5kHz
7. 运行电流: 00 00 03 E8h ==> 100.0%
反映触发: 全部数据反映(00 00 00 01)
错误检查: 1C 08h
*/
uint8_t test_data[41] = {
0x01,
0x10,
0x00, 0x58,
0x00, 0x10,
0x20,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x02,
0x00, 0x00, 0x21, 0x34,
0x00, 0x00, 0x07, 0xd0,
0x00, 0x00, 0x05, 0xdc,
0x00, 0x00, 0x05, 0xdc,
0x00, 0x00, 0x03, 0xe8,
0x00, 0x00, 0x00, 0x01,
0x1c, 0x08
};
HAL_UART_Transmit(&huart2, test_data, 41, 0xFFFF);
}
<file_sep>#ifndef __SERIAL_QUEUE_APP_H
#define __SERIAL_QUEUE_APP_H
#include "FreeRTOS.h"
#include "task.h"
#include "main.h"
#include "cmsis_os.h"
#include "stepper_msg.h"
#include <string.h>
#include "usart.h"
void data_queue_task_init(void);
//void HAL_UART_RxCpltCallback(UART_HandleTypeDef *huart);
//void RTUSend_Data(ArmMachine_TypeDef ArmMachineMsg);
void ArmMachineSend_Data(ArmMachine_TypeDef ArmMachineMsg);
#endif
<file_sep>/**
******************************************************************************
* File Name : USART.c
* Description : This file provides code for the configuration
* of the USART instances.
******************************************************************************
* @attention
*
* <h2><center>© Copyright (c) 2019 STMicroelectronics.
* All rights reserved.</center></h2>
*
* This software component is licensed by ST under Ultimate Liberty license
* SLA0044, the "License"; You may not use this file except in compliance with
* the License. You may obtain a copy of the License at:
* www.st.com/SLA0044
*
******************************************************************************
*/
/* Includes ------------------------------------------------------------------*/
#include "usart.h"
/* USER CODE BEGIN 0 */
/* USER CODE END 0 */
UART_HandleTypeDef huart1;
UART_HandleTypeDef huart2;
UART_HandleTypeDef huart3;
/* USART1 init function */
void MX_USART1_UART_Init(void)
{
huart1.Instance = USART1;
huart1.Init.BaudRate = 115200;
huart1.Init.WordLength = UART_WORDLENGTH_8B;
huart1.Init.StopBits = UART_STOPBITS_1;
huart1.Init.Parity = UART_PARITY_NONE;
huart1.Init.Mode = UART_MODE_TX_RX;
huart1.Init.HwFlowCtl = UART_HWCONTROL_NONE;
huart1.Init.OverSampling = UART_OVERSAMPLING_16;
if (HAL_UART_Init(&huart1) != HAL_OK)
{
Error_Handler();
}
}
/* USART2 init function */
void MX_USART2_UART_Init(void)
{
huart2.Instance = USART2;
huart2.Init.BaudRate = 115200;
huart2.Init.WordLength = UART_WORDLENGTH_8B;
huart2.Init.StopBits = UART_STOPBITS_1;
huart2.Init.Parity = UART_PARITY_NONE;
huart2.Init.Mode = UART_MODE_TX_RX;
huart2.Init.HwFlowCtl = UART_HWCONTROL_NONE;
huart2.Init.OverSampling = UART_OVERSAMPLING_16;
if (HAL_UART_Init(&huart2) != HAL_OK)
{
Error_Handler();
}
}
/* USART3 init function */
void MX_USART3_UART_Init(void)
{
huart3.Instance = USART3;
huart3.Init.BaudRate = 115200;
huart3.Init.WordLength = UART_WORDLENGTH_8B;
huart3.Init.StopBits = UART_STOPBITS_1;
huart3.Init.Parity = UART_PARITY_NONE;
huart3.Init.Mode = UART_MODE_TX_RX;
huart3.Init.HwFlowCtl = UART_HWCONTROL_NONE;
huart3.Init.OverSampling = UART_OVERSAMPLING_16;
if (HAL_UART_Init(&huart3) != HAL_OK)
{
Error_Handler();
}
}
void HAL_UART_MspInit(UART_HandleTypeDef* uartHandle)
{
GPIO_InitTypeDef GPIO_InitStruct = {0};
if(uartHandle->Instance==USART1)
{
/* USER CODE BEGIN USART1_MspInit 0 */
/* USER CODE END USART1_MspInit 0 */
/* USART1 clock enable */
__HAL_RCC_USART1_CLK_ENABLE();
__HAL_RCC_GPIOA_CLK_ENABLE();
/**USART1 GPIO Configuration
PA9 ------> USART1_TX
PA10 ------> USART1_RX
*/
GPIO_InitStruct.Pin = GPIO_PIN_9;
GPIO_InitStruct.Mode = GPIO_MODE_AF_PP;
GPIO_InitStruct.Speed = GPIO_SPEED_FREQ_HIGH;
HAL_GPIO_Init(GPIOA, &GPIO_InitStruct);
GPIO_InitStruct.Pin = GPIO_PIN_10;
GPIO_InitStruct.Mode = GPIO_MODE_INPUT;
GPIO_InitStruct.Pull = GPIO_NOPULL;
HAL_GPIO_Init(GPIOA, &GPIO_InitStruct);
/* USART1 interrupt Init */
HAL_NVIC_SetPriority(USART1_IRQn, 12, 0);
HAL_NVIC_EnableIRQ(USART1_IRQn);
/* USER CODE BEGIN USART1_MspInit 1 */
/* USER CODE END USART1_MspInit 1 */
}
else if(uartHandle->Instance==USART2)
{
/* USER CODE BEGIN USART2_MspInit 0 */
/* USER CODE END USART2_MspInit 0 */
/* USART2 clock enable */
__HAL_RCC_USART2_CLK_ENABLE();
__HAL_RCC_GPIOA_CLK_ENABLE();
/**USART2 GPIO Configuration
PA2 ------> USART2_TX
PA3 ------> USART2_RX
*/
GPIO_InitStruct.Pin = GPIO_PIN_2;
GPIO_InitStruct.Mode = GPIO_MODE_AF_PP;
GPIO_InitStruct.Speed = GPIO_SPEED_FREQ_HIGH;
HAL_GPIO_Init(GPIOA, &GPIO_InitStruct);
GPIO_InitStruct.Pin = GPIO_PIN_3;
GPIO_InitStruct.Mode = GPIO_MODE_INPUT;
GPIO_InitStruct.Pull = GPIO_NOPULL;
HAL_GPIO_Init(GPIOA, &GPIO_InitStruct);
/* USART2 interrupt Init */
HAL_NVIC_SetPriority(USART2_IRQn, 12, 0);
HAL_NVIC_EnableIRQ(USART2_IRQn);
/* USER CODE BEGIN USART2_MspInit 1 */
/* USER CODE END USART2_MspInit 1 */
}
else if(uartHandle->Instance==USART3)
{
/* USER CODE BEGIN USART3_MspInit 0 */
/* USER CODE END USART3_MspInit 0 */
/* USART3 clock enable */
__HAL_RCC_USART3_CLK_ENABLE();
__HAL_RCC_GPIOB_CLK_ENABLE();
/**USART3 GPIO Configuration
PB10 ------> USART3_TX
PB11 ------> USART3_RX
*/
GPIO_InitStruct.Pin = GPIO_PIN_10;
GPIO_InitStruct.Mode = GPIO_MODE_AF_PP;
GPIO_InitStruct.Speed = GPIO_SPEED_FREQ_HIGH;
HAL_GPIO_Init(GPIOB, &GPIO_InitStruct);
GPIO_InitStruct.Pin = GPIO_PIN_11;
GPIO_InitStruct.Mode = GPIO_MODE_INPUT;
GPIO_InitStruct.Pull = GPIO_NOPULL;
HAL_GPIO_Init(GPIOB, &GPIO_InitStruct);
/* USART3 interrupt Init */
HAL_NVIC_SetPriority(USART3_IRQn, 12, 0);
HAL_NVIC_EnableIRQ(USART3_IRQn);
/* USER CODE BEGIN USART3_MspInit 1 */
/* USER CODE END USART3_MspInit 1 */
}
}
void HAL_UART_MspDeInit(UART_HandleTypeDef* uartHandle)
{
if(uartHandle->Instance==USART1)
{
/* USER CODE BEGIN USART1_MspDeInit 0 */
/* USER CODE END USART1_MspDeInit 0 */
/* Peripheral clock disable */
__HAL_RCC_USART1_CLK_DISABLE();
/**USART1 GPIO Configuration
PA9 ------> USART1_TX
PA10 ------> USART1_RX
*/
HAL_GPIO_DeInit(GPIOA, GPIO_PIN_9|GPIO_PIN_10);
/* USART1 interrupt Deinit */
HAL_NVIC_DisableIRQ(USART1_IRQn);
/* USER CODE BEGIN USART1_MspDeInit 1 */
/* USER CODE END USART1_MspDeInit 1 */
}
else if(uartHandle->Instance==USART2)
{
/* USER CODE BEGIN USART2_MspDeInit 0 */
/* USER CODE END USART2_MspDeInit 0 */
/* Peripheral clock disable */
__HAL_RCC_USART2_CLK_DISABLE();
/**USART2 GPIO Configuration
PA2 ------> USART2_TX
PA3 ------> USART2_RX
*/
HAL_GPIO_DeInit(GPIOA, GPIO_PIN_2|GPIO_PIN_3);
/* USART2 interrupt Deinit */
HAL_NVIC_DisableIRQ(USART2_IRQn);
/* USER CODE BEGIN USART2_MspDeInit 1 */
/* USER CODE END USART2_MspDeInit 1 */
}
else if(uartHandle->Instance==USART3)
{
/* USER CODE BEGIN USART3_MspDeInit 0 */
/* USER CODE END USART3_MspDeInit 0 */
/* Peripheral clock disable */
__HAL_RCC_USART3_CLK_DISABLE();
/**USART3 GPIO Configuration
PB10 ------> USART3_TX
PB11 ------> USART3_RX
*/
HAL_GPIO_DeInit(GPIOB, GPIO_PIN_10|GPIO_PIN_11);
/* USART3 interrupt Deinit */
HAL_NVIC_DisableIRQ(USART3_IRQn);
/* USER CODE BEGIN USART3_MspDeInit 1 */
/* USER CODE END USART3_MspDeInit 1 */
}
}
/* USER CODE BEGIN 1 */
#ifdef __GNUC__
#define PUTCHAR_PROTOTYPE int __io_putchar(int ch)
#else
#define PUTCHAR_PROTOTYPE int fputc(int ch, FILE *f)
#endif
PUTCHAR_PROTOTYPE
{
//具体哪个串口可以更改huart1为其它串口
HAL_UART_Transmit(&huart2 , (uint8_t *)&ch, 1 , 0xffff);
return ch;
}
//#ifdef __GNUC__
//#define PUTCHAR_PROTOTYPE int __io_putchar(int ch)
//#else
//#define PUTCHAR_PROTOTYPE int fputc(int ch, FILE *f)
//#endif
//PUTCHAR_PROTOTYPE
//{
// //具体哪个串口可以更改USART1为其它串口
// while ((USART2->SR & 0X40) == 0); //循环发送,直到发送完毕
// USART2->DR = (uint8_t) ch;
// return ch;
//}
/*
void UART_Nvidiacom_Configuration(void)
{
GPIO_InitTypeDef GPIO_InitStructure;
UART_InitTypeDef USART_InitStructure;
NVIC_InitTypeDef NVIC_InitStructure;
// 时钟配置
RCC_APB2PeriphClockCmd(NVIDIA_COM_TX_GPIO_CLK | NVIDIA_COM_RX_GPIO_CLK , ENABLE);
if(USART1 == NVIDIA_COM)
RCC_APB2PeriphClockCmd(NVIDIA_COM_CLK, ENABLE);
else
RCC_APB1PeriphClockCmd(NVIDIA_COM_CLK, ENABLE);
// 引脚配置
GPIO_InitStructure.GPIO_Pin = NVIDIA_COM_TX_PIN; //UART Tx
GPIO_InitStructure.GPIO_Speed = GPIO_Speed_50MHz;
GPIO_InitStructure.GPIO_Mode = GPIO_Mode_AF_PP;
GPIO_Init(NVIDIA_COM_TX_GPIO_PORT, &GPIO_InitStructure);
GPIO_InitStructure.GPIO_Pin = NVIDIA_COM_RX_PIN; //UART Rx
GPIO_InitStructure.GPIO_Mode = GPIO_Mode_IN_FLOATING;
GPIO_Init(NVIDIA_COM_RX_GPIO_PORT, &GPIO_InitStructure);
// NVIC配置
NVIC_InitStructure.NVIC_IRQChannel = NVIDIA_COM_IRQn;
NVIC_InitStructure.NVIC_IRQChannelPreemptionPriority = NVIDIA_COM_Priority;
NVIC_InitStructure.NVIC_IRQChannelSubPriority = 0;
NVIC_InitStructure.NVIC_IRQChannelCmd = ENABLE;
NVIC_Init(&NVIC_InitStructure);
// UART配置
USART_InitStructure.USART_BaudRate = NVIDIA_COM_BaudRate; //波特率
USART_InitStructure.USART_WordLength = USART_WordLength_8b; //传输位数
USART_InitStructure.USART_StopBits = USART_StopBits_1; //停止位
USART_InitStructure.USART_Parity = USART_Parity_No ; //校验位
USART_InitStructure.USART_HardwareFlowControl = USART_HardwareFlowControl_None;
USART_InitStructure.USART_Mode = USART_Mode_Rx | USART_Mode_Tx; //收发功能
USART_Init(NVIDIA_COM, &USART_InitStructure);
USART_ClearFlag(NVIDIA_COM, USART_FLAG_RXNE | USART_FLAG_TC);
USART_ITConfig(NVIDIA_COM, USART_IT_RXNE, ENABLE); //接收中断
USART_Cmd(NVIDIA_COM, ENABLE); //使能UART
}
*/
/* USER CODE END 1 */
/************************ (C) COPYRIGHT STMicroelectronics *****END OF FILE****/
| a4024c0aaac1798ee15c074129e06c9de46476fc | [
"C"
] | 7 | C | felixwf/New-zealand-arm-machine | c7bb685d08b7974314666665c5109863d5181304 | 1182ad75ba0164d6aff78a0228ec1f7d530a9705 |
refs/heads/master | <file_sep>package musen.woocommerce;
import com.mashape.unirest.http.exceptions.UnirestException;
import musen.woocommerce.httpclient.HttpClient;
import musen.woocommerce.httpclient.Options;
import java.io.UnsupportedEncodingException;
import java.security.InvalidKeyException;
import java.security.NoSuchAlgorithmException;
/**
* Created by Muluneh on 8/15/2016.
*/
public class Client {
public static final String VERSION = "1.0.0";
public HttpClient http;
public Client(String url, String consumerKey, String consumerSecret, Options options) {
this.http = new HttpClient(url, consumerKey, consumerSecret, options);
}
public String post(String endpoint, String data) throws UnirestException, NoSuchAlgorithmException, InvalidKeyException, UnsupportedEncodingException {
return this.http.request(endpoint, "POST", data, null);
}
public String get(String endpoint) throws UnirestException, NoSuchAlgorithmException, InvalidKeyException, UnsupportedEncodingException {
return this.http.request(endpoint, "GET", null, null);
}
public String put(String endpoint, String data) throws UnirestException, NoSuchAlgorithmException, InvalidKeyException, UnsupportedEncodingException {
return this.http.request(endpoint, "PUT", data, null);
}
public String delete(String endpoint) throws UnirestException, NoSuchAlgorithmException, InvalidKeyException, UnsupportedEncodingException {
return this.http.request(endpoint, "DELETE", null, null);
}
}
| 77db6eaf34081ec09ca6efea318d2385ceac49fd | [
"Java"
] | 1 | Java | quickeee/wc-api-java | 9687d9e3f93fde8954cc2dac167362d96e5ac0c7 | 69dcdaf763e8e84f2ea0f68e9fd56d4960c8e767 |
refs/heads/master | <file_sep>package main
import "fmt"
func main() {
//values1 := []int32{2, 3, 4, 5, 1}
values1 := []int32{2, 3, 4, 5, 6, 7, 8, 9, 10, 1}
insertionSort1(10, values1)
}
func insertionSort1(n int32, arr []int32) {
var temp = arr[n-1]
for j := n - 2; j >= 0; j-- {
if j > 0 {
if temp < arr[j] {
arr[j+1] = arr[j]
printArray(arr)
} else {
arr[j+1] = temp
printArray(arr)
break
}
} else if temp < arr[j] {
arr[j+1] = arr[j]
printArray(arr)
arr[j] = temp
printArray(arr)
}else{
arr[j+1] = temp
printArray(arr)
}
}
}
func printArray(arr []int32) {
for i, value := range arr {
if i > 0 {
fmt.Print(" ")
}
fmt.Print(value)
}
fmt.Println()
}
<file_sep>package main
import (
"fmt"
"math/big"
)
func main() {
//31415926535897932384626433832795
//18446744073709551615
values1 := []string{"31415926535897932384626433832795", "1", "3", "10", "3", "5"}
//values1 := []int32{-20, -3916237, -357920, -3620601, 7374819, -7330761, 30, 6246457, -6461594, 266854}
fmt.Print(bigSorting(values1))
}
// Complete the bigSorting function below.
func bigSorting(unsorted []string) []string {
n := new(big.Int)
n, _ = n.SetString(unsorted[0], 10)
nodo := Nodo{
value: *n,
}
for i := 1; i < len(unsorted); i++ {
n := new(big.Int)
var setString, _ = n.SetString(unsorted[i], 10)
nodo.add(*setString)
}
var result []string
nodo.printInOrder(&result)
return result
}
type Nodo struct {
value big.Int
left *Nodo
right *Nodo
}
func (n *Nodo) add(value big.Int) {
if n.value.Cmp(&value) == 1 {
if n.left != nil {
n.left.add(value)
} else {
n.left = &Nodo{
value: value,
}
}
} else {
if n.right != nil {
n.right.add(value)
} else {
n.right = &Nodo{
value: value,
}
}
}
}
func (n *Nodo) printInOrder(result *[]string) {
if n.left != nil {
n.left.printInOrder(result)
}
*result = append(*result, n.value.String())
if n.right != nil {
n.right.printInOrder(result)
}
}
<file_sep>package main
import (
"fmt"
"sort"
)
func main() {
array := []int32{-59, -36, -13, 1, -53, -92, -2, -96, -54, 75}
//array := []int32{3, -7, 0}
difference := minimumAbsoluteDifference(array)
fmt.Print("Minima diferencia ", difference)
}
func minimumAbsoluteDifference(arr []int32) (result int32) {
sort.Slice(arr, func(i, j int) bool { return arr[i] < arr[j] })
if arr[0]-arr[1] < 0 {
result = (arr[0] - arr[1]) * -1
} else {
result = arr[0] - arr[1]
}
for i := 0; i < len(arr)-1; i++ {
newArray := arr[i+1:10]
binary(newArray, arr[i], &result)
}
return result
}
func binary(arr []int32, valorPosition int32, min *int32) {
for j := 0; j < len(arr); j++ {
rest := valorPosition - arr[j]
if rest < 0 {
rest = rest * -1
}
if *min > rest {
*min = rest
}
}
}<file_sep>package main
func main() {
values1 := []int32{40,50,60}
values2 := []int32{5,8,12}
getMoneySpent := getMoneySpent(values1, values2, 60)
println(getMoneySpent)
}
func getMoneySpent(keyboards []int32, drives []int32, b int32) (result int32) {
for i := 0; i < len(keyboards); i++ {
for j := 0; j < len(drives); j++ {
sum := keyboards[i] + drives[j]
if sum <= b && sum > result {
result = sum
}
}
}
if result == 0 {
return -1
}
return result
}
<file_sep>package main
import (
"fmt"
"sort"
"strconv"
"strings"
)
func main() {
//values := []int32{99, 40, 53, 31, 92, 68, 17, 70, 100, 16, 26, 82, 72, 89, 19, 14, 56, 7, 26, 69, 8, 44, 51, 88, 24, 34, 40, 70, 90, 68, 95, 95, 28, 39, 71, 75, 31, 17, 96, 60, 98, 98, 33, 35, 68, 84, 17, 11, 76, 17, 45, 61, 72, 76, 18, 67, 55, 81, 57, 43, 45, 96, 58, 49, 4, 61, 38, 66, 82, 16, 44, 100, 50, 19, 82, 15, 72, 5, 81, 97, 94, 70, 7, 92, 75, 55, 1, 87, 4, 9, 92, 35, 83, 20, 53, 8, 90, 2, 92, 82}
//plusMinus(values)
//staircase(66)
//miniMaxSum(values)
//candles := birthdayCakeCandles(values)
//students := gradingStudents(values)
//values2 := []int32{1, 3, 2, 6, 1, 2}
//pairs := divisibleSumPairs(100, 32, values)
//pairs2 := divisibleSumPairs(6, 3, values2)
//fmt.Println(pairs)
//values := []int32{10, 5, 20, 20, 4 ,5 ,2 ,25, 1}
//values := []int32{3, 4, 21, 36, 10, 28, 35, 5, 24, 42}
//records := breakingRecords(values)
//fmt.Print(records)
//fmt.Print(catAndMouse(1, 2, 3))
fmt.Println(timeConversion("12:05:45AM"))
}
/*
Given an array of integers, calculate the ratios of its elements that are positive, negative, and zero.
Print the decimal value of each fraction on a new line with places after the decimal.
Note: This challenge introduces precision problems. The test cases are scaled to six decimal places,
though answers with absolute error of up to are acceptable.
*/
// Complete the plusMinus function below.
func plusMinus(arr []int32) {
var positve float32
var negative float32
var zero float32
for _, value := range arr {
if value > 0 {
positve++
} else if value < 0 {
negative++
} else {
zero++
}
}
fmt.Printf("%06f\n", positve/float32(len(arr)))
fmt.Printf("%06f\n", negative/float32(len(arr)))
fmt.Printf("%06f\n", zero/float32(len(arr)))
}
/*
Staircase detail
This is a staircase of size :
#
##
###
####
Its base and height are both equal to . It is drawn using # symbols and spaces. The last line is not preceded by any spaces.
Write a program that prints a staircase of size .
Function Description
Complete the staircase function in the editor below.
staircase has the following parameter(s):
int n: an integer
Print
Print a staircase as described above.
Input Format
A single integer,n , denoting the size of the staircase.
*/
func staircase(n int32) {
var printf = "%" + fmt.Sprintf("%ds\n", n)
var print string
for i := 1; i <= int(n); i++ {
for j := 0; j < i; j++ {
print = print + "#"
}
fmt.Printf(printf, print)
print = ""
}
}
// Complete the miniMaxSum function below.
func miniMaxSum(arr []int32) {
var min int
var max int
var temp int
for i, _ := range arr {
temp = 0
for j, second := range arr {
if i != j {
temp += int(second)
}
}
if temp < min || min == 0 {
min = temp
}
if temp > max {
max = temp
}
}
fmt.Printf("%d %d", min, max)
}
/*
*
* The function is expected to return an INTEGER.
* The function accepts INTEGER_ARRAY candles as parameter.
*/
func birthdayCakeCandles(candles []int32) int32 {
sort.Slice(candles, func(i, j int) bool { return candles[i] < candles[j] })
result := Filter(candles, func(val int) bool {
return val == int(candles[len(candles)-1])
})
return int32(len(result))
}
func Filter(arr []int32, cond func(int) bool) []int {
var result []int
for i := range arr {
if cond(int(arr[i])) {
result = append(result, int(arr[i]))
}
}
return result
}
/*
*
* The function is expected to return an INTEGER_ARRAY.
* The function accepts INTEGER_ARRAY grades as parameter.
*/
func gradingStudents(grades []int32) (result []int32) {
for _, grade := range grades {
if grade < 38 {
result = append(result, grade)
continue
}
var val int
if val = (int(grade) / 5) + 1; grade%5 == 0 {
val = int(grade) / 5
}
diff := 5 * val
if diff-int(grade) < 3 {
result = append(result, int32(diff))
} else {
result = append(result, grade)
}
}
return result
}
func simpleArraySum(ar []int64) int64 {
var result int64
for i := 0; i < len(ar); i++ {
result += ar[i]
}
return result
}
// Complete the compareTriplets function below.
func compareTriplets(a []int32, b []int32) []int32 {
var alice int32
var bob int32
for i := 0; i < len(a); i++ {
if a[i] > b[i] {
alice++
} else if b[i] > a[i] {
bob++
}
}
return []int32{alice, bob}
}
func diagonalDifference(arr [][]int32) int32 {
var right int32
var left int32
var indiceLeft = len(arr) - 1
for i := 0; i < len(arr); i++ {
right = right + arr[i][i]
left = left + arr[i][indiceLeft]
indiceLeft--
}
result := (left) - (right)
if result < 0 {
result = result * -1
}
return result
}
// Complete the divisibleSumPairs function below.
func divisibleSumPairs(n int32, k int32, ar []int32) (r int32) {
for i := 0; i < int(n); i++ {
for j := i; j < int(n); j++ {
if i == j {
continue
}
if (ar[j]+ar[i])%k == 0 {
r++
}
}
}
return r
}
// Complete the breakingRecords function below.
func breakingRecords(scores []int32) []int32 {
temp := []int32{0, scores[0], 0, 0}
for i, score := range scores {
if score > temp[0] {
temp[0] = score
if i > 0 {
temp[2] = temp[2] + 1
}
}
if score < temp[1] {
temp[1] = score
if i > 0 {
temp[3] = temp[3] + 1
}
}
}
return []int32{temp[2], temp[3]}
}
// Complete the catAndMouse function below.
func catAndMouse(x int32, y int32, z int32) string {
diff1 := x - z
diff2 := y - z
if diff1 < 0 {
diff1 = diff1 * -1
}
if diff2 < 0 {
diff2 = diff2 * -1
}
if diff1 == diff2 {
return "Mouse C"
} else if diff1 > diff2 {
return "Cat B"
}
return "Cat A"
}
func timeConversion(s string) string {
split := strings.Split(s, ":")
pm := strings.Contains(split[2], "PM")
valueInt, _ := strconv.Atoi(split[0])
if pm {
if valueInt < 12 {
split[0] = strconv.Itoa(valueInt + 12)
}
} else if valueInt == 12 {
split[0] = "00"
}
split[2] = split[2][0:2]
return strings.Join(split, ":")
}
<file_sep>package main
import (
"fmt"
"math"
"sort"
)
func main() {
array := []int32{5, 10, 7}
//array := []int32{3, -7, 0}
difference := marcsCakewalk(array)
fmt.Print("Minima diferencia ", difference)
}
func marcsCakewalk(calorie []int32) (result int64) {
sort.Slice(calorie, func(i, j int) bool { return calorie[i] > calorie[j] })
for i := 0; i < len(calorie); i++ {
pow := int64(math.Pow(2, float64(i)))
result = result + (pow * int64(calorie[i]))
}
return result
}
<file_sep>package main
import "fmt"
func main() {
values1 := []int32{0, 1, 2, 4, 6, 5, 3}
fmt.Println(findMedian(values1))
}
// Complete the findMedian function below.
func findMedian(arr []int32) int32 {
root := Nodo{
value: int(arr[0]),
}
for i := 1; i < len(arr); i++ {
root.add(int(arr[i]))
}
var result []int
root.printInOrder(&result)
return int32(result[len(result)/2])
}
<file_sep>package main
import "fmt"
func main() {
values := []int32{1}
//values1 := []int32{1, 2, 3}
//values2 := []int32{1, 2, 3, 3}
//values1 := []int32{1, 1, 4, 1, 1}
//values2 := []int32{2 ,0 ,0 ,0}
//values3 := []int32{0 ,0 ,2 ,0}
//values := []int32{75,26,45,72,81,47,29,97,2,75,25,82,84,17,56,32,2,28,37,57,39,18,11,79,6,40,68,68,16,40,63,93,49,91,10,55,68,31,80,57,18,34,28,76,55,21,80,22,45,11,67,67,74,91,4,35,34,65,80,21,95,1,52,25,31,2,53,96,22,89,99,7,66,32,2,68,33,75,92,84,10,94,28,54,12,9,80,43,21,51,92,20,97,7,25,67,17,38,100,86}
//values1 := []int32{83,20,6,81,58,59,53,2,54,62,25,35,79,64,27,49,32,95,100,20,58,39,92,30,67,89,58,81,100,66,73,29,75,81,70,55,18,28,7,35,98,52,30,11,69,48,84,54,13,14,15,86,34,82,92,26,8,53,62,57,50,31,61,85,88,5,80,64,90,52,47,43,40,93,69,70,16,43 ,7,25,99,12,63,99,71,76,55,17,90,43,27,20,42,84,39,96,75,1,58,49}
//values2 := []int32{185, 170, 208, 216, 236, 155, 88, 206, 211, 209, 84, 99, 130, 245, 232, 125, 127, 232, 187, 140, 92, 213, 221, 231, 129, 197, 221, 168, 95, 186, 136, 180, 94, 125, 150, 244, 249, 248, 140, 207, 125, 84, 123, 85, 100, 175, 67, 116, 107, 143, 158, 75, 165, 172, 115, 134, 175, 123, 115, 123, 159, 181, 63, 176, 158, 109, 67, 154, 126, 141, 111, 95, 138, 161, 71, 118, 151, 189, 126, 109, 194, 176, 159, 151, 189, 71, 95, 133, 154, 157, 109, 78, 101, 174, 169, 152, 94, 193, 176, 137}
fmt.Println(balancedSums(values))
//fmt.Println(balancedSums(values1))
//fmt.Println(balancedSums(values2))
}
// Complete the balancedSums function below.
func balancedSums(arr []int32) string {
var sumLef int32
var sumRight = divideMSS(arr[1:])
if len(arr) > 1 {
for i := 0; i < len(arr); i++ {
if i == len(arr)-1 {
sumRight = 0
sumLef = sumLef + arr[i]
} else if i > 0{
sumRight = sumRight - arr[i]
sumLef = sumLef + arr[i-1]
}
if sumLef == sumRight {
return "YES"
}
}
}else{
return "YES"
}
return "NO"
}
func divideMSS(arreglo []int32) int32 {
if len(arreglo) > 0 {
var sum int32
return sumArray(arreglo, 0, len(arreglo)-1, &sum)
}
return 0
}
func sumArray(arreglo []int32, inicio, fin int, sum *int32) int32 {
if inicio == fin {
*sum += arreglo[inicio]
return *sum
} else if len(arreglo) == 2 {
*sum += arreglo[inicio] + arreglo[fin]
return *sum
}
var half = (inicio + fin) / 2
sumArray(arreglo, inicio, half, sum)
sumArray(arreglo, half+1, fin, sum)
return *sum
}<file_sep>package main
import "fmt"
func main() {
slice := []int32{63, 54, 17, 78, 43, 70, 32, 97, 16, 94, 74, 18, 60, 61, 35, 83, 13, 56, 75, 52, 70, 12, 24, 37, 17, 0, 16, 64, 34, 81, 82, 24, 69,
2, 30, 61, 83, 37, 97, 16, 70, 53, 0, 61, 12, 17, 97, 67, 33, 30, 49, 70, 11, 40, 67, 94, 84, 60, 35, 58, 19, 81, 16, 14, 68, 46, 42, 81, 75, 87, 13,
84, 33, 34, 14, 96, 7, 59, 17, 98, 79, 47, 71, 75, 8, 27, 73, 66, 64, 12, 29, 35, 80, 78, 80, 6, 5, 24, 49, 82}
/*slice := []int32{63,25,73,1,98,73,56,84,86,57,16,83,8,25,81,56,9,53,98,67,99,12,83,89,80,91,39,86,76,85,74,39,25,
90,59,10,94,32,44,3,89,30,27,79,46,96,27,32,18,21,92,69,81,40,40,34,68,78,24,87,42,69,23,41,78,22,6,90,99,89,
50,30,20,1,43,3,70,95,33,46,44,9,69,48,33,60,65,16,82,67,61,32,21,79,75,75,13,87,70,33}*/
//fmt.Println(countingSort(slice)) // [0 1 2 2 5 7]*/
//slice := []int32{1, 1, 3, 2, 1}
fmt.Println(countingSort(slice)) // [0 1 2 2 5 7]
//slice1 := []int{1, 2, 3, 6, 4, 5, 4, 6, 7, 8}
//fmt.Println(countingSort(slice1)) // [0 1 2 3 4 4 5 6 6 7 8]
//fmt.Println(slice1) // [1 2 3 6 4 5 4 6 7 8]
//slice2 := []int{20, 370, 45, 75, 410, 1802, 24, 2, 66}
//fmt.Println(countingSort(slice2))
// [0 2 20 24 45 66 75 370 410 1802]
//fmt.Println(slice)
// [20 370 45 75 410 1802 24 2 66]
}
func countingSort(arr []int32) []int32 {
k := GetMaxIntArray(arr)
count := make([]int32, k+1)
array := CountIntArray(arr)
for i := 0; i < len(count); i++ {
count[i] = array[int32(i)]
}
return count
}
// CountIntArray counts the element frequencies.
func CountIntArray(arr []int32) map[int32]int32 {
model := make(map[int32]int32)
for _, elem := range arr {
// first element is already initialized with 0
model[elem] += 1
}
return model
}
// Return the maximum value in an integer array.
func GetMaxIntArray(arr []int32) int32 {
maxNum := arr[0]
for _, elem := range arr {
if maxNum < elem {
maxNum = elem
}
}
return maxNum
}
<file_sep>package main
import (
"fmt"
"sort"
)
//clave valor tabla hash
func main() {
//values1 := []int32{203, 204, 205, 206, 207, 208, 203, 204, 205, 206}
//values2 := []int32{203, 204, 204, 205, 206, 207, 205, 208, 203, 206, 205, 206, 204}
values1 := []int32{11, 4, 11, 7, 13, 4, 12, 11, 10, 14}
values2 := []int32{11, 4, 11, 7, 3, 7, 10, 13, 4, 8, 12, 11, 10, 14, 12}
numbers := missingNumbers(values1, values2)
fmt.Println(numbers)
}
func missingNumbers(arr []int32, brr []int32) (result []int32) {
var table = make(map[int32]int)
var table2 = make(map[int32]int)
for _, value := range arr {
table[value] = table[value] + 1
}
for _, value := range brr {
table2[value] = table2[value] + 1
}
if len(arr) > len(brr) {
for valor := range table {
if table[valor] != table2[valor] {
result = append(result, valor)
}
}
} else {
for valor := range table2 {
if table[valor] != table2[valor] {
result = append(result, valor)
}
}
}
sort.Slice(result, func(i, j int) bool { return result[i] < result[j] })
return result
}
<file_sep>package main
/*
Sunny and Johnny like to pool their money and go to the ice cream parlor.
Johnny never buys the same flavor that Sunny does. The only other rule they have is that they spend all of their money.
Given a list of prices for the flavors of ice cream, select the two that will cost all of the money they have
*/
func main() {
values1 := []int32{1, 3, 4, 5, 6}
parlor := icecreamParlor(6, values1)
println(parlor)
}
func icecreamParlor(m int32, arr []int32) (result []int32) {
for i, value := range arr {
for j, val := range arr {
if i != j && (value + val) == m {
return []int32{int32(i + 1), int32(j + 1)}
}
}
}
return result
}
func binarySearch(matches []int32, sum int32, value int32) int {
var indexA = 0
var indexB = len(matches) - 1
for indexA <= indexB {
var mid = (indexA + indexB) / 2
if matches[mid] <= 0 {
indexA = mid + 1
} else {
indexB = mid - 1
}
}
return indexB
}
<file_sep>package main
import "fmt"
func main() {
values1 := []int32{4, 5, 3, 7, 2}
fmt.Print(quickSort(values1))
}
// Complete the quickSort function below.
func quickSort(arr []int32) (result []int32) {
var pivote = arr[0]
for i := 1; i < len(arr); i++ {
if arr[i] < pivote {
result = append(result, arr[i])
}
}
result = append(result, pivote)
for i := 1; i < len(arr); i++ {
if arr[i] > pivote {
result = append(result, arr[i])
}
}
return result
}
<file_sep>package main
import "fmt"
func main() {
values1 := []int32{2, 3, 4, 5, 6, 7, 8, 9, 10, 1}
fmt.Print(runningTime(values1))
}
func runningTime(arr []int32) []int32 {
var n = len(arr)
for i := 1; i < n; i++ {
j := i
for j > 0 {
if arr[j-1] > arr[j] {
arr[j-1], arr[j] = arr[j], arr[j-1]
}
j = j - 1
}
}
return arr
}
<file_sep>package main
func main() {
//values1 := []int32{5, 2, 3, 4, 1}
//values1 := []int32{-20, -3916237, -357920, -3620601, 7374819, -7330761, 30, 6246457, -6461594, 266854}
//fmt.Print(closestNumbers(values1))
}
/*
// Complete the closestNumbers function below.
func closestNumbers(arr []int32) (result []int32) {
root := Nodo{
value: int(arr[0]),
}
for i := 1; i < len(arr); i++ {
root.add(int(arr[i]))
}
var order []int
root.printInOrder(&order)
var minDiff int
for i := 0; i < len(order)-1; i++ {
diff := order[i+1] -order[i]
if minDiff == 0 || diff < minDiff {
minDiff = diff
result = []int32{}
result = append(result, int32(order[i]))
result = append(result, int32(order[i+1]))
} else if diff == minDiff {
result = append(result, int32(order[i]))
result = append(result, int32(order[i+1]))
}
}
return result
}
type Nodo struct {
value int
left *Nodo
right *Nodo
}
func (n *Nodo) add(value int) {
if value < n.value {
if n.left != nil {
n.left.add(value)
} else {
n.left = &Nodo{
value: value,
}
}
} else {
if n.right != nil {
n.right.add(value)
} else {
n.right = &Nodo{
value: value,
}
}
}
}
func (n *Nodo) printInOrder(result *[]int) {
if n.left != nil {
n.left.printInOrder(result)
}
*result = append(*result, n.value)
if n.right != nil {
n.right.printInOrder(result)
}
}
*/<file_sep>package main
import "fmt"
func main() {
slice2 := []int32{19, 10, 12, 10, 24, 25, 22}
fmt.Println(countingSort2(slice2))
// [0 2 20 24 45 66 75 370 410 1802]
//fmt.Println(slice)
// [20 370 45 75 410 1802 24 2 66]
}
func countingSort2(arr []int32) []int32 {
k := GetMaxIntArray2(arr)
count := make([]int32, k+1)
array := CountIntArray2(arr)
for i := 0; i < len(count); i++ {
count[i] = array[int32(i)]
}
var result []int32
for i := 0; i < len(count); i++ {
for array[int32(i)] > 0{
result = append(result,int32(i))
array[int32(i)] = array[int32(i)] -1
}
}
return result
}
// CountIntArray counts the element frequencies.
func CountIntArray2(arr []int32) map[int32]int32 {
model := make(map[int32]int32)
for _, elem := range arr {
// first element is already initialized with 0
model[elem] += 1
}
return model
}
// Return the maximum value in an integer array.
func GetMaxIntArray2(arr []int32) int32 {
maxNum := arr[0]
for _, elem := range arr {
if maxNum < elem {
maxNum = elem
}
}
return maxNum
}
| 92b263bb000c5bac9f9d170cf6e63cdc4d479c0f | [
"Go"
] | 15 | Go | danielink28/hackerRunExercises | 339c17b60c9160e778cbc7d3b64b49e128f26974 | f23b5c85d8a97ae8892c768ae33a6e31bcc6a1cb |
refs/heads/main | <repo_name>shir15a/bank-app-client<file_sep>/src/components/CreateAccount.js
import React, { useState } from 'react'
import axios from 'axios';
function CreateAccount() {
const [israeliId, setId] = useState('');
const [name, setName] = useState('');
const [email, setEmail] = useState('');
const onButtonClick = async()=>{
// console.log(amount);
const result = await axios.post(`https://bank-app-server.herokuapp.com/api/bank/account/`,{israeliId, name, email})
console.log(result);
}
return (
<div>
<label>Israeli Id:</label>
<input type="text" placeholder="Enter ID" onChange={(e) => setId(e.target.value)}></input>
<label>Name:</label>
<input type="text" placeholder="Enter your name" onChange={(e) => setName(e.target.value)}></input>
<label>Email:</label>
<input type="email" placeholder="Enter your email"
onChange={(e) => setEmail(e.target.value)}></input>
<button onClick={onButtonClick}>Create!</button>
</div>
)
}
export default CreateAccount
<file_sep>/src/components/InputsForUpdateAndDeposit.js
// for deposit, Update and Withdraw
import axios from 'axios';
import React, { useState } from 'react'
function Form({type, buttonValue}) {
const [id, setId] = useState('');
const [amount, setAmount] = useState('');
const onButtonClick = async()=>{
// console.log(amount);
const result = await axios.put(`https://bank-app-server.herokuapp.com/api/bank/transactions/${type}/${id}`,{amount})
console.log(result);
}
return (
<div>
<label>id:</label>
<input type="text" placeholder="Enter ID" onChange={(e)=>setId(e.target.value)}></input>
<label>amount:</label>
<input type="text" placeholder="Enter amount"
onChange={(e) => setAmount(e.target.value)}></input>
<button onClick={onButtonClick}>{buttonValue}</button>
</div>
)
}
export default Form
<file_sep>/src/components/SpecificAccount.js
import axios from 'axios';
import React, { useState } from 'react'
function SpecificAccount() {
const [id, setId] = useState("");
const [data, setData] = useState("");
const onButtonClick = async () => {
const { data } = await axios.get(`https://bank-app-server.herokuapp.com/api/bank/account/${id}`)
console.log(data);
setData(data)
}
const Details = () => {
if (data) {
if (data.error) return <h2>{data.error}</h2>;
else
return (
<div className="card">
<h3>{`ID: ${data.israeliId}`}</h3>
<h4>{`Name: ${data.name}`}</h4>
<h4>{`Cash: ${data.account.cash}`}</h4>
<h4>{`Credit: ${data.account.credit}`}</h4>
<h4>{`Active: ${String(data.isActive)}`}</h4>
</div>
);
} else return <p></p>
};
return (
<div>
<input type='text'
placeholder='write an Id'
onChange={(e) => setId(e.target.value)}
/>
{id && <button onClick={onButtonClick}>Search!</button>}
<button onClick={() => window.location.reload()}>Clear!</button>
<Details />
</div>
)
}
export default SpecificAccount
<file_sep>/src/components/Navbar.js
import React from "react";
import { Link } from "react-router-dom";
import './style.css'
const Navbar = () => {
return (
<div className="navbar">
<Link to="/" className="link">Home</Link>
<Link to="/all" className="link"> Show All Users</Link>
<Link to="/SpecificAccount" className="link">Specific User</Link>
<Link to="/Deposit" className="link">Deposit</Link>
<Link to="/Withdraw" className="link">Withdraw</Link>
<Link to="/UpdateCredit" className="link">Update credit</Link>
<Link to="/TransferMoney" className="link">Transfer money</Link>
<Link to="/Create" className="link">Create new account</Link>
<Link to="/TrasnactionById" className="link">Show user's trasnactions</Link>
</div>
);
};
export default Navbar;<file_sep>/src/components/TransferMoney.js
import React, { useState } from 'react'
import axios from 'axios';
function TransferMoney() {
const [fromAccount, setFromAccount] = useState('');
const [toAccount, setToAccount] = useState('');
const [amount, setAmount] = useState('');
const onButtonClick = async()=>{
// console.log(amount);
const result = await axios.put(`https://bank-app-server.herokuapp.com/api/bank/transactions/transfer/${fromAccount}/${toAccount}`,{amount})
console.log(result);
}
return (
<div>
<label>from:</label>
<input type="text" placeholder="Enter your ID" onChange={(e) => setFromAccount(e.target.value)}></input>
<label>to:</label>
<input type="text" placeholder="Enter ID" onChange={(e) => setToAccount(e.target.value)}></input>
<label>amount:</label>
<input type="text" placeholder="Enter amount"
onChange={(e) => setAmount(e.target.value)}></input>
<button onClick={onButtonClick}>Transfer!</button>
</div>
)
}
export default TransferMoney
<file_sep>/src/components/Deposit.js
import React from 'react'
import Form from "./InputsForUpdateAndDeposit";
function Deposit() {
return (
<div>
<Form type="depositing" buttonValue="Deposit"/>
</div>
)
}
export default Deposit
<file_sep>/src/components/HomePage.js
import React from 'react'
function HomePage() {
return (
<div style={{display:'flex', justifyContent:'center'}}>
WELCOME TO MY BANK !
</div>
)
}
export default HomePage
<file_sep>/src/components/TrasnactionById.js
import React, { useState } from 'react'
import axios from 'axios';
function TrasnactionById() {
const [id, setId] = useState('');
const [data, setData] = useState([]);
const onButtonClick = async () => {
const { data } = await axios.get(`https://bank-app-server.herokuapp.com/api/bank/transactions/${id}`)
console.log(data);
setData(data)
}
return (
<div>
<label>ID:</label>
<input type="text" placeholder="Enter your Id" onChange={(e) => setId(e.target.value)}></input>
<button onClick={onButtonClick}>Show!</button>
{data && data.map((data) => {
return (
<div style={{ border: '1px solid black', marginTop: '3px', marginLeft: '3px', width: '200px' }} key={data._id}>
<h3>{data.from && `From: ${data.from}`}</h3>
<h4>{`To: ${data.to}`}</h4>
<h4>{`Type: ${data.operation_type}`}</h4>
<h4>{`Amount: ${data.amount}`}</h4>
</div>
)
})}
</div>
)
}
export default TrasnactionById
<file_sep>/src/components/Bank.js
import React, { Component } from 'react'
import { BrowserRouter, Route, Switch } from 'react-router-dom';
import AllAccounts from './AllAccounts'
import SpecificAccount from './SpecificAccount'
import Navbar from './Navbar'
import HomePage from './HomePage'
import Deposit from './Deposit'
import Withdraw from './Withdraw'
import UpdateCredit from './UpdateCredit'
import TransferMoney from './TransferMoney'
import Create from './CreateAccount'
import TrasnactionById from './TrasnactionById'
export default class Bank extends Component {
render() {
return (
<BrowserRouter>
<div>
<Navbar />
<Switch>
<Route path="/" exact component={HomePage} />
<Route path="/all" exact component={AllAccounts} />
<Route path="/SpecificAccount" exact component={SpecificAccount} />
<Route path="/Deposit" exact component={Deposit} />
<Route path="/Withdraw" exact component={Withdraw} />
<Route path="/UpdateCredit" exact component={UpdateCredit} />
<Route path="/TransferMoney" exact component={TransferMoney} />
<Route path="/create" exact component={Create} />
<Route path="/TrasnactionById" exact component={TrasnactionById} />
</Switch>
</div>
</BrowserRouter>
)
}
}
| 3c182336b6b0b513540466e1594ebf06a31895d7 | [
"JavaScript"
] | 9 | JavaScript | shir15a/bank-app-client | ad3355c2ed07ef11e94efd3468a941081f7b5851 | db298c5df410566318762fffbe24af2a710eccf9 |
refs/heads/master | <repo_name>trbarton/React-DRT-UI<file_sep>/src/detail/Detail.js
import React, { Component } from 'react';
import styled from 'styled-components'
import BoatDetail from './BoatDetail';
import Leaderboard from './Leaderboard';
import Timer from './Timer';
const BackgroundElement = styled.div`
position: fixed;
top: 0;
right: 0;
width: 25vw;
height: 100%;
background: #F2F5FA;
display: flex;
flex-direction: column;
box-shadow: 0 3px 6px rgba(0,0,0,0.16);
`;
class Detail extends Component {
render() {
return (
<BackgroundElement>
<Timer></Timer>
<Leaderboard></Leaderboard>
<BoatDetail></BoatDetail>
</BackgroundElement>
);
}
}
export default Detail;
<file_sep>/src/sidebar/AccountGroup.js
import React from 'react';
import account from './svg-icons/account.svg';
const IconSize = '30px';
const IconStyle = {
margin: '5px'
}
export default () => {
return (
<img src={account} width={IconSize} height={IconSize} style={IconStyle} />
)
}
<file_sep>/src/sidebar/ControlGroup.js
import React from 'react';
import styled from 'styled-components';
import play from './svg-icons/play.svg';
import stop from './svg-icons/stop.svg';
import rewind from './svg-icons/rewind.svg';
const IconContainer = styled.div`
display: flex;
flex-direction: column;
`;
const IconSize = '30px';
const IconStyle = {
margin: '5px'
}
export default () => {
return (
<IconContainer>
<img src={play} width={IconSize} height={IconSize} style={IconStyle} />
<img src={stop} width={IconSize} height={IconSize} style={IconStyle} />
<img src={rewind} width={IconSize} height={IconSize} style={IconStyle} />
</IconContainer>
)
}
<file_sep>/src/detail/BoatDetail.js
import React, { Component } from 'react';
import styled from 'styled-components'
const BackgroundElement = styled.div`
width: 100%;
height: 25%;
background: #E3EDF7;
border-left: 8px solid #4df28a;
`;
export default () => {
return (
<BackgroundElement></BackgroundElement>
)
}
<file_sep>/src/map/Map.js
import React, { Component } from 'react';
import styled from 'styled-components';
// import img from 'map-bg.png';
const MapElement = styled.div`
width: 100%;
height: 100%;
`;
class Map extends Component {
render() {
return (
<MapElement></MapElement>
);
}
}
export default Map;
<file_sep>/README.md
# Dinghy Race Trak
A work-in-progress project re-designing the user interface for a sailing dinghy tracking application.
UI Mockups created using: Adobe XD
Implemented using: React
## UI Mockup

## Progress
Progress will soon be visible on github pages
<file_sep>/src/detail/Timer.js
import React, { Component } from 'react';
import styled from 'styled-components'
const BackgroundElement = styled.div`
width: 100%;
height: 200px;
background: #7FA6F7;
display: flex;
flex-direction: column;
align-content: center;
justify-content: center;
box-shadow: 0 3px 6px rgba(0,0,0,0.16);
`;
const TimeBold = styled.h2`
font-family: 'Montserrat', sans-serif;
font-weight: 700;
color: white;
font-size: 6em;
margin: 0;
font-variant-numeric: tabular-nums;
`
const PaleSmall = styled.h4`
font-family: 'Montserrat', sans-serif;
font-weight: 700;
color: rgba(255,255,255,0.44);
margin: 0;
font-size: 0.9em;
`
class Timer extends Component {
constructor(props) {
super(props);
this.state = {
seconds: '00',
minutes: '00'
}
this.secondsRemaining = 0;
this.tick = this.tick.bind(this);
}
componentDidMount () {
setInterval(this.tick, 1000);
}
tick() {
var min = Math.floor(this.secondsRemaining / 60);
var sec = this.secondsRemaining - (min * 60);
this.setState({
minutes: min,
seconds: sec
})
if (sec < 10) {
this.setState({
seconds: "0" + this.state.seconds,
})
}
if (min < 10) {
this.setState({
minutes: "0" + min,
})
}
this.secondsRemaining++
}
render() {
return (
<BackgroundElement>
<PaleSmall>RACE TIME ELAPSED</PaleSmall>
<TimeBold>{this.state.minutes}:{this.state.seconds}</TimeBold>
</BackgroundElement>
)
}
}
export default Timer;<file_sep>/src/detail/Leaderboard.js
import React, { Component } from 'react';
import styled from 'styled-components'
const BackgroundElement = styled.div`
width: 100%;
flex-grow: 1;
padding: 5% 0;
text-align: left;
`;
const Title = styled.h2`
color: rgba(101,101,101,0.22);
font-family: 'Montserrat', sans-serif;
font-weight: 600;
font-size: 1.1em;
margin: 0 5%;
`
const Competitor = styled.div`
width: 100%;
height: 35px;
font-family: 'Montserrat', sans-serif;
font-weight: 300;
font-size: 1.2em;
border-right: 8px solid #4df28a;
box-sizing: border-box;
margin: 0.5em 0;
padding-left: 5%;
color: rgba(101,101,101,0.53);
display: flex;
flex-direction: row;
align-items: center;
`
const Bold = styled.span`
font-weight: 500;
padding-left: 0.5em;
`
const Spacer = styled.div`
flex-grow: 1;
`
const competitors = [
{name: "Laser 13404", speed: "10.1"},
{name: "Laser 13404", speed: "10.1"},
{name: "Laser 13404", speed: "10.1"},
{name: "Laser 13404", speed: "10.1"},
]
const colours = [
"#4df28a",
"#FF9200",
"#FF3B00",
"#00FFFF",
"#0076FF",
"#3100FF",
"#9300FF",
"#EB00FF",
"#FF004E"
]
export default () => {
return (
<BackgroundElement>
<Title>Leaderboard</Title>
{competitors.map((c, index) => {
return (
<Competitor style={{borderColor: colours[index]}}>
1. <Bold>Laser 13404</Bold><Spacer />SOG: <Bold>10.1 Knots</Bold><Spacer />
</Competitor>
)
})}
</BackgroundElement>
)
}
<file_sep>/src/sidebar/Sidebar.js
import React, { Component } from 'react';
import styled from 'styled-components'
import HomeGroup from './HomeGroup';
import ControlGroup from './ControlGroup';
import AccountGroup from './AccountGroup';
const ElementBackground = styled.div`
position: fixed;
width: 5%;
min-width: 60px;
max-width: 80px;
top: 0;
left: 0;
bottom: 0;
background: white;
display: flex;
flex-direction: column;
align-items: center;
justify-content: space-between;
padding: 25px 5px;
box-shadow: 0px 5px 10px rgba(0,0,0,0.16)
`;
class Sidebar extends Component {
render() {
return (
<ElementBackground>
<HomeGroup></HomeGroup>
<ControlGroup></ControlGroup>
<AccountGroup></AccountGroup>
</ElementBackground>
);
}
}
export default Sidebar;
| a113890df86cf584b3dfc4c15b9ee9bbe92cc36d | [
"JavaScript",
"Markdown"
] | 9 | JavaScript | trbarton/React-DRT-UI | 8eb123b038097432db60268e9f4e318c9f9db9c3 | f93f6ed09d37b5e1a81a529cf31b17735c774058 |
refs/heads/master | <file_sep><?php
namespace Chitanka\LibBundle\Form\Type;
use Symfony\Component\Form\AbstractType;
use Symfony\Component\Form\FormBuilderInterface;
class TextCommentType extends AbstractType
{
public function buildForm(FormBuilderInterface $builder, array $options)
{
$builder
->add('text_id', 'hidden')
->add('replyto_id', 'hidden')
->add('content', 'textarea')
->add('reader');
}
public function getDefaultOptions(array $options)
{
return array(
'data_class' => 'Chitanka\LibBundle\Entity\TextComment',
);
}
public function getName()
{
return 'text_comment';
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Listener;
use Doctrine\ORM\EntityManager;
use FOS\CommentBundle\Event\CommentEvent;
use FOS\CommentBundle\Event\ThreadEvent;
use Chitanka\LibBundle\Service\Notifier;
use Chitanka\LibBundle\Entity\Comment;
class CommentListener
{
private $mailer;
private $em;
public function __construct(\Swift_Mailer $mailer, EntityManager $em)
{
$this->mailer = $mailer;
$this->em = $em;
}
public function onCommentPostPersist(CommentEvent $event)
{
/* @var $comment Comment */
$comment = $event->getComment();
if ($comment->isForWorkEntry() && !$comment->isDeleted()) {
$notifier = new Notifier($this->mailer);
$notifier->sendMailByNewWorkroomComment($comment, $comment->getWorkEntry(), $this->loadExtraRecipientsForWorkEntryComment($comment));
}
}
protected function loadExtraRecipientsForWorkEntryComment(Comment $comment)
{
$recipients = array();
$usernames = array_map('trim', explode(',', $comment->getCc()));
$users = $this->em->getRepository('LibBundle:User')->findByUsernames($usernames);
foreach ($users as $user) {
$recipients[$user->getEmail()] = $user->getName();
}
$recipients['<EMAIL>'] = 'Работно ателие';
return $recipients;
}
public function onThreadPostPersist(ThreadEvent $event)
{
/* @var $thread Thread */
$thread = $event->getThread();
$target = $thread->getTarget($this->em)->setCommentThread($thread);
$this->em->persist($target);
$this->em->flush();
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller;
use Symfony\Component\HttpFoundation\Request;
use Chitanka\LibBundle\Entity\Feedback;
use Chitanka\LibBundle\Form\Type\FeedbackType;
class FeedbackController extends Controller
{
protected $responseAge = 86400; // 24 hours
public function indexAction(Request $request)
{
$adminEmail = $this->container->getParameter('admin_email');
$feedback = new Feedback($this->get('mailer'), $adminEmail);
$form = $this->createForm(new FeedbackType, $feedback);
if ($request->getMethod() == 'POST') {
$form->bind($request);
if ($form->isValid()) {
$form->getData()->process();
$this->view['message'] = 'Съобщението ви беше изпратено.';
// $this->mailFailureMessage = 'Изглежда е станал някакъв фал при изпращането на съобщението ви. Ако желаете, пробвайте още веднъж.';
// if ( empty($this->referer) ) {
// return '';
// }
// "<p>Обратно към предишната страница</p>";
// return $this->redirect($this->generateUrl('task_success'));
}
}
$this->view['admin_email'] = key($adminEmail);
$this->view['form'] = $form->createView();
return $this->display('index');
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Legacy;
class OpensearchdescPage extends Page {
public function __construct() {
parent::__construct();
$this->action = 'opensearchdesc';
$this->contentType = 'application/opensearchdescription+xml';
$this->skin->useAbsolutePath();
$defObj = key($this->searchOptions);
$this->searchKey = $this->request->value('key', $defObj, 1, $this->searchOptions);
$this->searchKeyTitle = $this->searchOptions[$this->searchKey];
$this->title = 'Описание за OpenSearch — ' . $this->searchKeyTitle;
}
protected function buildContent() {
$pi = '<?xml version="1.0"?>';
$searchUrl = $this->request->server() . 'action' . $this->searchKey;
$favicon = $this->getFavicon();
$this->addTemplates();
return $this->fullContent = Legacy::expandTemplates(<<<EOS
$pi
<OpenSearchDescription xmlns="http://a9.com/-/spec/opensearch/1.1/" xmlns:moz="http://www.mozilla.org/2006/browser/search/">
<ShortName>$this->sitename ($this->searchKeyTitle)</ShortName>
<Description>Търсене в $this->sitename по $this->searchKeyTitle</Description>
<InputEncoding>$this->inencoding</InputEncoding>
<Image width="16" height="16" type="image/png">$favicon</Image>
<Url type="text/html" method="get" template="$searchUrl?q={searchTerms}&prefix=%&sortby=first&mode=simple"/>
<Url type="application/x-suggestions+json" template="$searchUrl?q={searchTerms}&prefix=%&ajaxFunc=openSearch"/>
<moz:SearchForm>$searchUrl</moz:SearchForm>
</OpenSearchDescription>
EOS
);
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Legacy;
use Chitanka\LibBundle\Util\Char;
use Chitanka\LibBundle\Util\String;
use Chitanka\LibBundle\Util\Ary;
use Buzz\Browser;
class Legacy
{
static private
$months = array(
1 => 'Януари', 'Февруари', 'Март', 'Април', 'Май', 'Юни',
'Юли', 'Август', 'Септември', 'Октомври', 'Ноември', 'Декември'
);
static private $types = array(
// code => array(singular, plural, sing. article, pl. article)
'anecdote' => array('Анекдот', 'Анекдоти', 'анекдота', 'анекдотите'),
'fable' => array('Басня', 'Басни', 'баснята', 'басните'),
'biography' => array('Биография', 'Биографии', 'биографията', 'биографиите'),
'dialogue' => array('Диалог', 'Диалози', 'диалога', 'диалозите'),
'docu' => array('Документалистика', 'Документалистика', 'книгата', 'книгите'),
'essay' => array('Есе', 'Есета', 'есето', 'есетата'),
'interview' => array('Интервю', 'Интервюта', 'интервюто', 'интервютата'),
'gamebook' => array('Книга игра', 'Книги игри', 'книгата игра', 'книгите игри'),
'memo' => array('Мемоари/спомени', 'Мемоари/спомени', 'творбата', 'творбите'),
'science' => array('Научен текст', 'Научни текстове', 'научният текст', 'научните текстове'),
'popscience' => array('Научнопопулярен текст', 'Научнопопулярни текстове', 'творбата', 'творбите'),
'novelette' => array('Новела', 'Новели', 'новелата', 'новелите'),
'ocherk' => array('Очерк', 'Очерци', 'очерка', 'очерците'),
'shortstory' => array('Разказ', 'Разкази', 'разказа', 'разказите'),
'review' => array('Рецензия', 'Рецензии', 'рецензията', 'рецензиите'),
'novel' => array('Роман', 'Романи', 'романа', 'романите'),
#'parable' => array('Парабола', 'Параболи', 'параболата', 'параболите'),
'play' => array('Пиеса', 'Пиеси', 'пиесата', 'пиесите'),
'letter' => array('Писмо', 'Писма', 'писмото', 'писмата'),
'poetry' => array('Поезия', 'Поезия', 'поетичната творба', 'поетичните творби'),
'poem' => array('Поема', 'Поеми', 'поемата', 'поемите'),
'novella' => array('Повест', 'Повести', 'повестта', 'повестите'),
'outro' => array('Послеслов', 'Послеслови', 'послеслова', 'послесловите'),
'intro' => array('Предговор', 'Предговори', 'предговора', 'предговорите'),
'tale' => array('Приказка', 'Приказки', 'приказката', 'приказките'),
'pritcha' => array('Притча', 'Притчи', 'притчата', 'притчите'),
'travelnotes' => array('Пътепис', 'Пътеписи', 'пътеписа', 'пътеписите'),
'speech' => array('Реч', 'Речи', 'речта', 'речите'),
'article' => array('Статия', 'Статии', 'статията', 'статиите'),
'prosepoetry' => array('Лирика в проза', 'Лирика в проза', 'стихотворението', 'стихотворенията'),
'screenplay' => array('Сценарий', 'Сценарии', 'сценария', 'сценариите'),
'textbook' => array('Учебник', 'Учебници', 'учебника', 'учебниците'),
'feuilleton' => array('Фейлетон', 'Фейлетони', 'фейлетона', 'фейлетоните'),
'haiku' => array('Хайку', 'Хайку', 'поетичната творба', 'поетичните творби'),
'jure' => array('Юридически текст', 'Юридически текстове', 'юридическият текст', 'юридическите текстове'),
'critique' => array('Литературна критика', 'Литературна критика', 'творбата', 'творбите'),
'philosophy' => array('Философски текст', 'Философски текст', 'творбата', 'творбите'),
'religion' => array('Религиозен текст', 'Религиозен текст', 'творбата', 'творбите'),
'historiography' => array('Историография', 'Историография', 'творбата', 'творбите'),
'collection' => array('Сборник', 'Сборник', 'творбата', 'творбите'),
'other' => array('Разни', 'Разни', 'творбата', 'творбите'),
);
static public function workType($code, $singular = true) {
if ( !array_key_exists($code, self::$types) ) return '';
return $singular ? self::$types[$code][0] : self::$types[$code][1];
}
static public function workTypeArticle($code, $singular = true) {
if ( !array_key_exists($code, self::$types) ) return '';
return $singular ? self::$types[$code][2] : self::$types[$code][3];
}
static public function workTypes($singular = true) {
$ntypes = array();
foreach (self::$types as $code => $name) {
$ntypes[$code] = $singular ? self::$types[$code][0] : self::$types[$code][1];
}
return $ntypes;
}
static private $picTypes = array(
'magazine' => 'Списание'
);
static public function picType($code) {
if ( !array_key_exists($code, self::$picTypes) ) return '';
return self::$picTypes[$code];
}
static private $seriesTypes = array(
// code => array(singular, plural, sing. article, pl. article)
'newspaper' => array('вестник', 'вестници', 'вестника', 'вестниците'),
'series' => array('серия', 'серии', 'серията', 'сериите'),
'collection' => array('сборник', 'сборници', 'сборника', 'сборниците'),
'poetry' => array('стихосбирка', 'стихосбирки', 'стихосбирката', 'стихосбирките'),
);
static private $pseudoSeries = array('collection', 'poetry');
static public function seriesSuffix($code) {
return $code == 'series' || empty(self::$seriesTypes[$code][0])
? ''
: ' ('. self::$seriesTypes[$code][0] .')';
}
static public function seriesType($code, $singular = true) {
if ( !array_key_exists($code, self::$seriesTypes) ) return '';
return $singular ? self::$seriesTypes[$code][0] : self::$seriesTypes[$code][1];
}
static public function seriesTypeArticle($code, $singular = true) {
if ( !array_key_exists($code, self::$seriesTypes) ) return '';
return $singular ? self::$seriesTypes[$code][2] : self::$seriesTypes[$code][3];
}
static public function isPseudoSeries($type) {
return in_array($type, self::$pseudoSeries);
}
static public function humanDate($isodate = '')
{
$format = 'Y-m-d H:i:s';
if ( empty($isodate) ) {
$isodate = date($format);
} else if ($isodate instanceof \DateTime) {
$isodate = $isodate->format($format);
}
if ( strpos($isodate, ' ') === false ) { // no hours
$ymd = $isodate;
$hours = '';
} else {
list($ymd, $his) = explode(' ', $isodate);
list($h, $i, $s) = explode(':', $his);
$hours = " в $h:$i";
}
list($y, $m, $d) = explode('-', $ymd);
return ltrim($d, '0') .' '. Char::mystrtolower(self::monthName($m)) .' '. $y . $hours;
}
static public function fillOnEmpty(&$var, $value) {
if ( empty($var) ) {
$var = $value;
}
}
static public function fillOnNull(&$var, $value) {
if ( is_null($var) ) {
$var = $value;
}
}
static public function monthName($m, $asUpper = true) {
$name = self::$months[(int)$m];
return $asUpper ? $name : Char::mystrtolower($name);
}
static public function header_encode($header)
{
return '=?utf-8?B?'.base64_encode($header).'?=';
}
/**
* @param $val Value
* @param $data Associative array
* @param $defVal Default value
* @return $val if it exists in $data, otherwise $defVal
*/
static public function normVal($val, $data, $defVal = null) {
self::fillOnNull($defVal, @$data[0]);
return in_array($val, $data) ? $val : $defVal;
}
static private $regPatterns = array(
'/\[\[(.+)\|(.+)\]\]/Us' => '<a href="$1" title="$1 — $2">$2</a>',
'#(?<=[\s>])(\w+://[^])\s"<]+)([^])\s"<,.;!?])#' => '<a href="$1$2" title="$1$2">$1$2</a>',
);
static public function wiki2html($s) {
$s = preg_replace(array_keys(self::$regPatterns), array_values(self::$regPatterns), $s);
return $s;
}
static private $templates = array(
'{SITENAME}' => '{SITENAME}',
);
static public function expandTemplates($s) {
return strtr($s, self::$templates);
}
static public function addTemplate($key, $val) {
self::$templates['{'.$key.'}'] = $val;
}
/**
* Never run this function on a string with cyrillic letters: they all get converted to "Y"
*/
static public function removeDiacritics($s) {
return strtr(utf8_decode($s),
utf8_decode(
'ŠŒŽšœžŸ¥µÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýÿ'),
'SOZsozYYuAAAAAAACEEEEIIIIDNOOOOOOUUUUYsaaaaaaaceeeeiiiionoooooouuuuyy');
}
/** bytes to kibibytes */
static public function int_b2k($bytes) {
$k = $bytes >> 10; // divide by 2^10 w/o rest
return $k > 0 ? $k : 1;
}
/** bytes to mebibytes */
static public function int_b2m($bytes) {
$m = $bytes >> 20; // divide by 2^20 w/o rest
return $m > 0 ? $m : 1;
}
/** bytes to gibibytes */
static public function int_b2g($bytes) {
$m = $bytes >> 30; // divide by 2^30 w/o rest
return $m > 0 ? $m : 1;
}
/** bytes to human readable */
static public function int_b2h($bytes) {
if ( $bytes < ( 1 << 10 ) ) return $bytes . ' B';
if ( $bytes < ( 1 << 20 ) ) return self::int_b2k( $bytes ) . ' KiB';
if ( $bytes < ( 1 << 30 ) ) return self::int_b2m( $bytes ) . ' MiB';
return self::int_b2g( $bytes ) . ' GiB';
}
/**
Convert a php.ini value to an integer
(copied from php.net)
*/
static public function ini_bytes($val) {
$val = trim($val);
$last = strtolower($val{strlen($val)-1});
switch ($last) {
// The 'G' modifier is available since PHP 5.1.0
case 'g':
$val *= 1024;
case 'm':
$val *= 1024;
case 'k':
$val *= 1024;
}
return $val;
}
/**
Removes trailing zeros after the decimal sign
*/
static public function rmTrailingZeros($number, $decPoint = ',') {
$number = rtrim($number, '0');
$number = rtrim($number, $decPoint); // remove the point too
return $number;
}
static public function getMaxUploadSizeInMiB() {
return self::int_b2m( self::ini_bytes( ini_get('upload_max_filesize') ) );
}
static public function chooseGrammNumber($num, $sing, $plur, $null = '') {
settype($num, 'int');
if ($num > 1) {
return $plur;
} else if ($num == 1) {
return $sing;
} else {
return empty($null) ? $plur : $null;
}
}
static public function isUrl($string)
{
return strpos($string, 'http://') === 0;
}
static public function getAcronym($words) {
$acronym = '';
$words = preg_replace('/[^a-zA-Z\d ]/', '', $words);
foreach ( explode(' ', $words) as $word ) {
$acronym .= empty($word) ? '' : $word[0];
}
return strtoupper($acronym);
}
static public function extract2object($assocArray, &$object) {
foreach ( (array) $assocArray as $key => $val ) {
if ( ctype_alnum($key[0]) ) {
$object->$key = $val;
}
}
}
static private $contentDirs = array(
'text' => 'content/text/',
'text-info' => 'content/text-info/',
'text-anno' => 'content/text-anno/',
'user' => 'content/user/',
'sandbox' => 'content/user/sand/',
'info' => 'content/info/',
'img' => 'content/img/',
'cover' => 'content/cover/',
'book' => 'content/book/',
'book-anno' => 'content/book-anno/',
'book-info' => 'content/book-info/',
'book-img' => 'content/book-img/',
'book-cover' => 'thumb/book-cover/',
'book-cover-content' => 'content/book-cover/',
'book-djvu' => 'content/book-djvu/',
'book-pdf' => 'content/book-pdf/',
'book-pic' => 'content/book-pic/',
);
static public function getContentFile($key, $num) {
$file = self::getInternalContentFilePath($key, $num);
if ( file_exists($file) ) {
return file_get_contents($file);
}
return null;
}
static public function getContentFilePath($key, $num, $full = true) {
$pref = Ary::arrVal(self::$contentDirs, $key, $key .'/');
return $pref . self::makeContentFilePath($num, $full);
}
static public function getInternalContentFilePath($key, $num, $full = true) {
return __DIR__ .'/../../../../web/'. self::getContentFilePath($key, $num, $full);
}
// use this for sfbzip too
static public function makeContentFilePath($num, $full = true) {
$realnum = $num;
$num = (int) $num;
$word = 4; // a word is four bytes long
$bin_in_hex = 4; // one hex character corresponds to four binary digits
$path = str_repeat('+/', $num >> ($word * $bin_in_hex));
$hex = str_pad(dechex($num), $word, '0', STR_PAD_LEFT);
$hex = substr($hex, -$word); // take last $word characters
$path .= substr($hex, 0, 2) . '/';
if ($full) {
$path .= $realnum;
}
return $path;
}
/** Handles only JPEG */
static public function genThumbnail($filename, $width = 250)
{
if ( ! preg_match('/\.jpe?g$/', $filename) ) {
return $filename;
}
list($width_orig, $height_orig) = getimagesize($filename);
if ($width_orig < $width) {
return $filename;
}
$height = $width * $height_orig / $width_orig;
$image_p = imagecreatetruecolor($width, $height);
$image = imagecreatefromjpeg($filename);
imagecopyresampled($image_p, $image, 0, 0, 0, 0, $width, $height, $width_orig, $height_orig);
$temp = Setup::setting('tmp_dir').'/thumb-'.uniqid().'-'.basename($filename);
imagejpeg($image_p, $temp, 80);
return $temp;
}
static public function getFromUrl($url, $postData = array())
{
$ch = curl_init();
$options = array(
CURLOPT_URL => $url,
CURLOPT_RETURNTRANSFER => true, // return content
CURLOPT_HEADER => false, // don't return headers
CURLOPT_CONNECTTIMEOUT => 30, // timeout on connect
CURLOPT_TIMEOUT => 60, // timeout on response
CURLOPT_USERAGENT => 'Mylib (http://chitanka.info)',
CURLOPT_FOLLOWLOCATION => true,
);
if ( ! empty($postData)) {
$options[CURLOPT_POST] = true;
$options[CURLOPT_POSTFIELDS] = $postData;
}
curl_setopt_array($ch, $options);
$contents = curl_exec($ch);
curl_close($ch);
return $contents;
}
static public function getFromUrlOrCache($url, $cacheTime = 0)
{
$id = md5($url);
$action = 'url';
if ( $cacheTime && CacheManager::cacheExists($action, $id, $cacheTime) ) {
return CacheManager::getCache($action, $id);
}
$content = self::getFromUrl($url);
if ( empty($content) ) {
return '';
}
return CacheManager::setCache($action, $id, $content);
}
static public function getMwContent($url, Browser $browser, $cacheDays = 7)
{
$id = md5($url);
$action = 'info';
if ( CacheManager::cacheExists($action, $id, $cacheDays) ) {
return CacheManager::getCache($action, $id);
}
try {
$response = $browser->get("$url?action=render", array('User-Agent: Mylib (http://chitanka.info)'));
if ($response->isOk()) {
$content = self::processMwContent($response->getContent(), $url);
return CacheManager::setCache($action, $id, $content);
}
} catch (\RuntimeException $e) {
}
return '';
}
static protected function processMwContent($content, $url)
{
$up = parse_url($url);
$server = "$up[scheme]://$up[host]";
$content = strtr($content, array(
' ' => ' ',
' href="/wiki/' => ' href="'.$server.'/wiki/',
));
$patterns = array(
'/rel="[^"]+"/' => '',
// images
'| src="(/\w)|' => " src=\"$server$1",
);
$content = preg_replace(array_keys($patterns), array_values($patterns), $content);
$content = sprintf('<div class="editsection">[<a href="%s?action=edit" title="Редактиране на статията">±</a>]</div>', $url) . $content;
return $content;
}
/**
Validates an e-mail address.
Regexps are taken from http://www.iki.fi/markus.sipila/pub/emailvalidator.php
(author: <NAME>, version: 1.0, 2006-08-02)
@param string $input E-mail address to be validated
@return int 1 if valid, 0 if not valid, -1 if valid but strange
*/
static public function validateEmailAddress($input, $allowEmpty = true) {
if ( empty($input) ) {
return $allowEmpty ? 1 : 0;
}
$ct = '[a-zA-Z0-9-]';
$cn = '[a-zA-Z0-9_+-]';
$cr = '[a-zA-Z0-9,!#$%&\'\*+\/=?^_`{|}~-]';
$normal = "/^$cn+(\.$cn+)*@$ct+(\.$ct+)*\.([a-z]{2,4})$/";
$rare = "/^$cr+(\.$cr+)*@$ct+(\.$ct+)*\.([a-z]{2,})$/";
if ( preg_match($normal, $input) ) { return 1; }
if ( preg_match($rare, $input) ) { return -1; }
return 0;
}
static public function sha1_loop($pass, $loops = 1) {
for ($i=0; $i < $loops; $i++) {
$pass = sha1($pass);
}
return $pass;
}
}
<file_sep><?php namespace Chitanka\LibBundle\Controller;
use Symfony\Component\HttpFoundation\Request;
use Chitanka\LibBundle\Legacy\CacheManager;
use Chitanka\LibBundle\Legacy\Legacy;
use Chitanka\LibBundle\Service\WikiEngine;
class WikiController extends Controller {
public function indexAction($page) {
$url = str_replace('$1', ucfirst($page), $this->container->getParameter('wiki_url'));
$this->view = array(
'page' => $page,
'wiki_page' => $url,
'contents' => $this->getFromWiki($url)
);
return $this->display('index');
}
public function showAction($page) {
$wiki = $this->wikiEngine();
$wikiPage = $wiki->getPage($page);
if (!$wikiPage->exists()) {
$this->responseStatusCode = 404;
}
return $this->display('show', array(
'page' => $wikiPage,
));
}
public function saveAction(Request $request) {
$input = $request->request;
$wiki = $this->wikiEngine();
$user = $this->getUser();
$wiki->savePage($input->get('summary'), $input->get('page'), $input->get('content'), $input->get('title'), "{$user->getUsername()} <{$user->getUsername()}@chitanka>");
return $this->displayJson(1);
}
public function previewAction(Request $request) {
return $this->displayText(WikiEngine::markdownToHtml($request->request->get('content')));
}
public function historyAction($page) {
$wiki = $this->wikiEngine();
$commits = $wiki->getHistory($page);
return $this->display('history', array(
'page' => $wiki->getPage($page),
'commits' => $commits,
));
}
protected function wikiEngine() {
return new WikiEngine($this->getParameter('content_dir').'/wiki');
}
private function getFromWiki($url, $ttl = 1) {
$id = md5($url);
$action = 'wiki';
if ($this->get('request')->query->get('cache', 1) == 0) {
$ttl = 0;
}
if ( CacheManager::cacheExists($action, $id, $ttl) ) {
return CacheManager::getCache($action, $id);
}
$content = Legacy::getFromUrl("$url?action=render");
if ( empty($content) ) {
return '';
}
return CacheManager::setCache($action, $id, $content);
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Admin;
use Sonata\AdminBundle\Form\FormMapper;
use Sonata\AdminBundle\Datagrid\DatagridMapper;
use Sonata\AdminBundle\Datagrid\ListMapper;
use Sonata\AdminBundle\Show\ShowMapper;
class WikiSiteAdmin extends Admin
{
protected $baseRoutePattern = 'wiki-site';
protected $baseRouteName = 'admin_wiki_site';
protected $translationDomain = 'admin';
protected function configureShowField(ShowMapper $showMapper)
{
$showMapper
->add('code')
->add('name')
->add('url')
->add('intro')
;
}
protected function configureListFields(ListMapper $listMapper)
{
$listMapper
->addIdentifier('name')
->add('url')
->add('_action', 'actions', array(
'actions' => array(
'view' => array(),
'edit' => array(),
'delete' => array(),
)
))
;
}
protected function configureFormFields(FormMapper $formMapper)
{
$formMapper
->with('General attributes')
->add('code')
->add('name')
->add('url')
->add('intro', null, array('required' => false))
->setHelps(array(
'code' => $this->trans('help.wikisite.code'),
'url' => $this->trans('help.wikisite.url'),
'intro' => $this->trans('help.wikisite.intro'),
))
->end()
;
}
protected function configureDatagridFilters(DatagridMapper $datagrid)
{
$datagrid
->add('name')
->add('url')
->add('intro')
;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller;
use Sfblib_SfbToHtmlConverter as SfbToHtmlConverter;
class SandboxController extends Controller
{
public function indexAction()
{
$request = $this->get('request')->request;
$image_dir = $request->get('image_dir');
$content = $request->get('content');
$this->view = compact('image_dir', 'content');
if ($content) {
$converter = new SfbToHtmlConverter($content, $image_dir);
$this->view['html_content'] = $converter->convert()->getContent();
}
return $this->display('index');
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller\Admin;
class BookLinkController extends CRUDController {
}
<file_sep><?php
namespace Chitanka\LibBundle\Command;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Input\InputOption;
use Symfony\Component\Console\Output\OutputInterface;
/**
* TODO Usage of book.title_author is deprecated
*/
class UpdateBookTitleAuthorDbCommand extends CommonDbCommand
{
protected function configure()
{
parent::configure();
$this
->setName('db:update-book-title-author')
->setDescription('Update legacy field book.title_author')
->addOption('dump-sql', null, InputOption::VALUE_NONE, 'Output SQL queries instead of executing them')
->setHelp(<<<EOT
The <info>db:update-book-title-author</info> command updates the legacy field book.title_author.
EOT
);
}
/**
* Executes the current command.
*
* @param InputInterface $input An InputInterface instance
* @param OutputInterface $output An OutputInterface instance
*
* @return integer 0 if everything went fine, or an error code
*
* @throws \LogicException When this abstract class is not implemented
*/
protected function execute(InputInterface $input, OutputInterface $output)
{
$this->em = $this->getContainer()->get('doctrine.orm.default_entity_manager');
$this->output = $output;
$this->dumpSql = $input->getOption('dump-sql') === true;
$this->updateBookTitleAuthor($this->dumpSql);
$output->writeln('/*Done.*/');
}
protected function updateBookTitleAuthor($dumpSql)
{
$queries = array();
$iterableResult = $this->em->createQuery('SELECT b FROM LibBundle:Book b WHERE b.title_author IS NULL')->iterate();
foreach ($iterableResult AS $row) {
$book = $row[0];
if (count($book->getAuthors()) == 0) {
continue;
}
$authorNames = array();
foreach ($book->getAuthors() as $author) {
$authorNames[] = $author->getName();
}
$titleAuthor = implode(', ', $authorNames);
$queries[] = "UPDATE book SET title_author = '$titleAuthor' WHERE id = ".$book->getId();
$this->em->detach($book);
}
if ($dumpSql) {
$this->printQueries($queries);
} else {
$this->executeUpdates($queries, $this->em->getConnection());
}
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller;
use Symfony\Component\HttpFoundation\Request;
use Symfony\Component\HttpFoundation\Response;
use Doctrine\ORM\NoResultException;
use Chitanka\LibBundle\Entity\Text;
use Chitanka\LibBundle\Entity\TextRating;
use Chitanka\LibBundle\Entity\UserTextRead;
use Chitanka\LibBundle\Form\Type\TextRatingType;
use Chitanka\LibBundle\Form\Type\TextLabelType;
use Chitanka\LibBundle\Legacy\Setup;
use Chitanka\LibBundle\Pagination\Pager;
use Chitanka\LibBundle\Service\TextBookmarkService;
use Chitanka\LibBundle\Service\TextDownloadService;
use Chitanka\LibBundle\Service\TextLabelService;
use Chitanka\LibBundle\Util\String;
class TextController extends Controller {
protected $responseAge = 86400; // 24 hours
public function indexAction($_format) {
if ($_format == 'html') {
$this->view = array(
'labels' => $this->getLabelRepository()->getAllAsTree(),
'types' => $this->getTextRepository()->getTypes(),
);
}
return $this->display("index.$_format");
}
public function listByTypeIndexAction($_format) {
return $this->display("list_by_type_index.$_format", array(
'types' => $this->getTextRepository()->getTypes()
));
}
public function listByLabelIndexAction($_format) {
return $this->display("list_by_label_index.$_format", array(
'labels' => $this->getLabelRepository()->getAll()
));
}
public function listByAlphaIndexAction($_format) {
return $this->display("list_by_alpha_index.$_format");
}
public function listByTypeAction($type, $page, $_format) {
$textRepo = $this->getTextRepository();
$limit = 30;
$this->view = array_merge($this->view, array(
'type' => $type,
'texts' => $textRepo->getByType($type, $page, $limit),
'pager' => new Pager(array(
'page' => $page,
'limit' => $limit,
'total' => $textRepo->countByType($type)
)),
'route_params' => array('type' => $type),
));
return $this->display("list_by_type.$_format");
}
public function listByLabelAction($slug, $page, $_format) {
$textRepo = $this->getTextRepository();
$limit = 30;
$slug = String::slugify($slug);
$label = $this->getLabelRepository()->findBySlug($slug);
if ($label === null) {
return $this->notFound("Няма етикет с код $slug.");
}
$labels = $label->getDescendantIdsAndSelf();
return $this->display("list_by_label.$_format", array(
'label' => $label,
'parents' => array_reverse($label->getAncestors()),
'texts' => $textRepo->getByLabel($labels, $page, $limit),
'pager' => new Pager(array(
'page' => $page,
'limit' => $limit,
'total' => $textRepo->countByLabel($labels)
)),
'route' => $this->getCurrentRoute(),
'route_params' => array('slug' => $slug),
));
}
public function listByAlphaAction($letter, $page, $_format) {
$textRepo = $this->getTextRepository();
$limit = 30;
$prefix = $letter == '-' ? null : $letter;
return $this->display("list_by_alpha.$_format", array(
'letter' => $letter,
'texts' => $textRepo->getByPrefix($prefix, $page, $limit),
'pager' => new Pager(array(
'page' => $page,
'limit' => $limit,
'total' => $textRepo->countByPrefix($prefix)
)),
'route_params' => array('letter' => $letter),
));
}
public function showAction(Request $request, $id, $_format) {
list($id) = explode('-', $id); // remove optional slug
try {
$text = $this->getTextRepository()->get($id);
} catch (NoResultException $e) {
return $this->notFound("Няма текст с номер $id.");
}
switch ($_format) {
case 'txt':
return $this->displayText($text->getContentAsTxt(), array('Content-Type' => 'text/plain'));
case 'fb2':
Setup::doSetup($this->container);
return $this->displayText($text->getContentAsFb2(), array('Content-Type' => 'application/xml'));
case 'sfb':
return $this->displayText($text->getContentAsSfb(), array('Content-Type' => 'text/plain'));
case 'fbi':
Setup::doSetup($this->container);
return $this->displayText($text->getFbi(), array('Content-Type' => 'text/plain'));
case 'data':
return $this->displayText($text->getDataAsPlain(), array('Content-Type' => 'text/plain'));
case 'html':
return $this->showHtml($text, 1);
}
if ($redirect = $this->tryMirrorRedirect($id, $_format)) {
return $this->urlRedirect($redirect);
}
Setup::doSetup($this->container);
$service = new TextDownloadService($this->getTextRepository());
switch ($_format) {
case 'txt.zip':
return $this->urlRedirect($this->getWebRoot() . $service->getTxtZipFile(explode(',', $id), $_format, $request->get('filename')));
case 'fb2.zip':
return $this->urlRedirect($this->getWebRoot() . $service->getFb2ZipFile(explode(',', $id), $_format, $request->get('filename')));
case 'sfb.zip':
return $this->urlRedirect($this->getWebRoot() . $service->getSfbZipFile(explode(',', $id), $_format, $request->get('filename')));
case 'epub':
return $this->urlRedirect($this->getWebRoot() . $service->getEpubFile(explode(',', $id), $_format, $request->get('filename')));
}
throw new \Exception("Неизвестен формат: $_format");
}
public function showPartAction($id, $part) {
return $this->showHtml($this->getTextRepository()->get($id), $part);
}
public function showHtml(Text $text, $part) {
$nextHeader = $text->getNextHeaderByNr($part);
$nextPart = $nextHeader ? $nextHeader->getNr() : 0;
$this->view = array(
'text' => $text,
'authors' => $text->getAuthors(),
'part' => $part,
'next_part' => $nextPart,
'obj_count' => 3, /* after annotation and extra info */
);
if (empty($nextPart)) {
$alikes = $text->getAlikes();
$this->view['similar_texts'] = $alikes ? $this->getTextRepository()->getByIds(array_slice($alikes, 0, 30)) : array();
}
$this->view['js_extra'][] = 'text';
return $this->display('show');
}
public function showMultiAction(Request $request, $id, $_format) {
$mirror = $this->tryMirrorRedirect(explode(',', $id), $_format);
$requestedFilename = $request->get('filename');
if ($mirror) {
if ($requestedFilename) {
$mirror .= '?filename=' . urlencode($requestedFilename);
}
return $this->urlRedirect($mirror);
}
Setup::doSetup($this->container);
$service = new TextDownloadService($this->getTextRepository());
switch ($_format) {
case 'txt.zip':
return $this->urlRedirect($this->getWebRoot() . $service->getTxtZipFile(explode(',', $id), $_format, $requestedFilename));
case 'fb2.zip':
return $this->urlRedirect($this->getWebRoot() . $service->getFb2ZipFile(explode(',', $id), $_format, $requestedFilename));
case 'sfb.zip':
return $this->urlRedirect($this->getWebRoot() . $service->getSfbZipFile(explode(',', $id), $_format, $requestedFilename));
case 'epub':
return $this->urlRedirect($this->getWebRoot() . $service->getEpubFile(explode(',', $id), $_format, $requestedFilename));
}
throw new \Exception("Неизвестен формат: $_format");
}
public function randomAction() {
$id = $this->getTextRepository()->getRandomId();
return $this->urlRedirect($this->generateUrl('text_show', array('id' => $id)));
}
public function commentsAction($id, $_format) {
$this->disableCache();
$_REQUEST['id'] = $id;
return $this->legacyPage('Comment');
}
public function similarAction($id) {
$text = $this->findText($id);
$alikes = $text->getAlikes();
$this->view = array(
'text' => $text,
'similar_texts' => $alikes ? $this->getTextRepository()->getByIds(array_slice($alikes, 0, 30)) : array(),
);
return $this->display('similar');
}
public function ratingAction(Request $request, $id) {
$text = $this->findText($id);
$em = $this->getEntityManager();
$user = $em->merge($this->getUser());
$rating = $this->getTextRatingRepository()->getByTextAndUser($text, $user);
if ( ! $rating) {
$rating = new TextRating($text, $user);
}
$form = $this->createForm(new TextRatingType, $rating);
// TODO replace with DoctrineListener
$oldRating = $rating->getRating();
if ($request->getMethod() == 'POST') {
$form->bind($request);
if ($form->isValid() && $this->getUser()->isAuthenticated()) {
// TODO replace with DoctrineListener
$text->updateAvgRating($rating->getRating(), $oldRating);
$this->getEntityManager()->persist($text);
// TODO bind overwrites the Text object with an id
$rating->setText($text);
$rating->setCurrentDate();
$em->persist($rating);
$em->flush();
}
}
$this->view = array(
'text' => $text,
'form' => $form->createView(),
'rating' => $rating,
);
if ($request->isXmlHttpRequest() || $request->isMethod('GET')) {
$this->disableCache();
return $this->display('rating');
}
return $this->redirectToText($text);
}
public function newLabelAction(Request $request, $id) {
$this->disableCache();
if (!$this->getUser()->canPutTextLabel()) {
return $this->notAllowed();
}
$text = $this->findText($id);
$service = new TextLabelService($this->getEntityManager(), $this->getSavableUser());
$textLabel = $service->newTextLabel($text);
$form = $this->createForm(new TextLabelType, $textLabel);
$this->view = array(
'text' => $text,
'text_label' => $textLabel,
'form' => $form->createView(),
);
if ($request->isMethod('POST') && $form->submit($request)->isValid()) {
$service->addTextLabel($textLabel, $text);
if ($request->isXmlHttpRequest()) {
$this->view['label'] = $textLabel->getLabel();
return $this->display('label_view');
}
return $this->redirectToText($text);
}
return $this->display('new_label');
}
public function deleteLabelAction(Request $request, $id, $labelId) {
$this->disableCache();
if (!$this->getUser()->canPutTextLabel()) {
return $this->notAllowed();
}
$text = $this->findText($id);
$label = $this->findLabel($labelId);
$service = new TextLabelService($this->getEntityManager(), $this->getSavableUser());
$service->removeTextLabel($text, $label);
if ($request->isXmlHttpRequest()) {
return $this->displayText(1);
}
return $this->redirectToText($text);
}
public function labelLogAction($id) {
$text = $this->findText($id);
$log = $this->getRepository('TextLabelLog')->getForText($text);
return $this->display('label_log', array(
'text' => $text,
'log' => $log,
));
}
public function fullLabelLogAction(Request $request) {
$page = $request->get('page', 1);
$limit = 30;
$repo = $this->getRepository('TextLabelLog');
return $this->display('label_log', array(
'log' => $repo->getAll($page, $limit),
'pager' => new Pager(array(
'page' => $page,
'limit' => $limit,
'total' => $repo->count()
)),
));
}
public function ratingsAction($id) {
$_REQUEST['id'] = $id;
return $this->legacyPage('Textrating');
}
public function markReadFormAction($id) {
$this->disableCache();
if ($this->getUser()->isAuthenticated()) {
$tr = $this->getUserTextReadRepository()->findOneBy(array('text' => $id, 'user' => $this->getUser()->getId()));
if ($tr) {
return new Response('Произведението е отбелязано като прочетено.');
}
}
return $this->render('LibBundle:Text:mark_read_form.html.twig', array('id' => $id));
}
public function markReadAction(Request $request, $id) {
$this->disableCache();
if ( ! $this->getUser()->isAuthenticated()) {
return $this->notAllowed();
}
$text = $this->findText($id);
$em = $this->getEntityManager();
$em->persist(new UserTextRead($this->getSavableUser(), $text));
$em->flush();
if ($request->isXmlHttpRequest()) {
return $this->displayJson('Произведението е отбелязано като прочетено.');
}
return $this->redirectToText($text);
}
public function addBookmarkAction(Request $request, $id) {
$this->disableCache();
if ( ! $this->getUser()->isAuthenticated()) {
return $this->notAllowed();
}
$text = $this->findText($id);
$service = new TextBookmarkService($this->getEntityManager(), $this->getSavableUser());
$bookmark = $service->addBookmark($text);
if ($request->isXmlHttpRequest()) {
$response = $bookmark
? array('addClass' => 'active', 'setTitle' => 'Премахване от Избрани')
: array('removeClass' => 'active', 'setTitle' => 'Добавяне в Избрани');
return $this->displayJson($response);
}
return $this->redirectToText($text);
}
public function suggestAction($id, $object) {
$_REQUEST['id'] = $id;
$_REQUEST['object'] = $object;
return $this->legacyPage('SuggestData');
}
protected function redirectToText($text) {
$id = $text instanceof Text ? $text->getId() : $text;
return $this->urlRedirect($this->generateUrl('text_show', array('id' => $id)));
}
protected function findText($textId, $bailIfNotFound = true) {
$text = $this->getTextRepository()->find($textId);
if ($bailIfNotFound && $text === null) {
return $this->notFound("Няма текст с номер $textId.");
}
return $text;
}
protected function findLabel($labelId, $bailIfNotFound = true) {
$label = $this->getLabelRepository()->find($labelId);
if ($bailIfNotFound && $label === null) {
return $this->notFound("Няма етикет с номер $labelId.");
}
return $label;
}
private function tryMirrorRedirect($ids, $format = null) {
$dlSite = $this->getMirrorServer();
if (!$dlSite) {
return false;
}
$ids = (array) $ids;
$url = (count($ids) > 1 ? '/text-multi/' : '/text/') . implode(',', $ids);
if ($format) {
$url .= '.' . $format;
}
return $dlSite . $url;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Entity;
use Doctrine\ORM\Mapping as ORM;
/**
* @ORM\Entity
* @ORM\Table(name="text_author",
* uniqueConstraints={@ORM\UniqueConstraint(name="person_text_uniq", columns={"person_id", "text_id"})},
* indexes={
* @ORM\Index(name="text_idx", columns={"text_id"})}
* )
*/
class TextAuthor extends Entity
{
/**
* @ORM\Column(type="integer")
* @ORM\Id
* @ORM\GeneratedValue(strategy="CUSTOM")
* @ORM\CustomIdGenerator(class="Chitanka\LibBundle\Doctrine\CustomIdGenerator")
*/
private $id;
/**
* @var integer $person
* @ORM\ManyToOne(targetEntity="Person", inversedBy="textAuthors")
*/
private $person;
/**
* @var integer $text
* @ORM\ManyToOne(targetEntity="Text", inversedBy="textAuthors")
*/
private $text;
/**
* @var integer $pos
* @ORM\Column(type="smallint", nullable=true)
*/
private $pos;
/**
* @var integer $year
* @ORM\Column(type="smallint", nullable=true)
*/
private $year;
public function getId() { return $this->id; }
public function setPerson($person) { $this->person = $person; }
public function getPerson() { return $this->person; }
public function setText($text) { $this->text = $text; }
public function getText() { return $this->text; }
public function setPos($pos) { $this->pos = $pos; }
public function getPos() { return $this->pos; }
public function setYear($year) { $this->year = $year; }
public function getYear() { return $this->year; }
/**
* Add Text
*
* @param Chitanka\LibBundle\Entity\Text $text
*/
public function addText(\Chitanka\LibBundle\Entity\Text $text)
{
$this->texts[] = $text;
}
/**
* Get Text
*
* @return Doctrine\Common\Collections\Collection $text
*/
public function getTexts()
{
return $this->texts;
}
/**
* Add Person
*
* @param Chitanka\LibBundle\Entity\Person $person
*/
public function addPerson(\Chitanka\LibBundle\Entity\Person $person)
{
$this->persons[] = $person;
}
/**
* Get Person
*
* @return Doctrine\Common\Collections\Collection $person
*/
public function getPersons()
{
return $this->persons;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Entity;
use Doctrine\ORM\NoResultException;
/**
*
*/
class TextRatingRepository extends EntityRepository
{
static public $ratings = array(
6 => '6 — Шедьовър',
5 => '5 — Много добро',
4 => '4 — Добро',
3 => '3 — Посредствено',
2 => '2 — Лошо',
1 => '1 — Отвратително',
);
public function getByTextAndUser($text, $user)
{
$dql = sprintf('SELECT r FROM %s r WHERE r.text = %d AND r.user = %d',
$this->getEntityName(),
(is_object($text) ? $text->getId() : $text),
(is_object($user) ? $user->getId() : $user)
);
$query = $this->_em->createQuery($dql)->setMaxResults(1);
try {
return $query->getSingleResult();
} catch (NoResultException $e) {
return null;
}
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Legacy;
use Chitanka\LibBundle\Util\Char;
use Chitanka\LibBundle\Util\File;
use Chitanka\LibBundle\Entity\BaseWork;
use Chitanka\LibBundle\Entity\Book;
use Chitanka\LibBundle\Entity\Text;
class DownloadFile
{
static private $_dlDir = 'cache/dl';
private $_zipFile = null;
public function __construct()
{
$this->_zipFile = new ZipFile;
}
public function getSfbForBook($book)
{
return $this->getDlFileForBook($book, 'sfb', 'addBinariesForSfb');
}
protected function addBinariesForSfb($book, $filename)
{
if ( ($cover = $book->getCover()) ) {
$this->addFileEntry($cover, $filename);
}
foreach ($book->getImages() as $image) {
$this->addFileEntry($image);
}
}
public function getTxtForBook($book)
{
return $this->getDlFileForBook($book, 'txt');
}
public function getFb2ForBook($book)
{
return $this->getDlFileForBook($book, 'fb2');
}
public function getEpubForBook($book)
{
return $this->getDlFileForBook($book, 'epub', 'addEpubEntries');
}
public function getStaticFileForBook(Book $book, $format)
{
$dlFileName = $this->getFullDlFileName($this->createWorkFileName($book, $format));
if (file_exists($dlFileName)) {
return $dlFileName;
}
$sourceFile = $book->getStaticFile($format);
if ( !file_exists($sourceFile)) {
throw new \Exception("$format file for book #{$book->getId()} does not exist: '$sourceFile'");
}
copy($sourceFile, $dlFileName);
return $dlFileName;
}
public function getEpubForText($text)
{
return $this->getDlFileForText($text, 'epub', 'addEpubEntries');
}
public function getEpubForTexts($texts)
{
return $this->getDlFileForTexts($texts, 'epub', 'addEpubEntries');
}
/**
* @param Book $book
* @param string $format
* @param string $binaryCallback
*/
public function getDlFileForBook(Book $book, $format, $binaryCallback = null)
{
$textIds = $book->getTextIds();
// a book with one text is different from the very same text
$textIds[] = "book".$book->getId();
if ( ($dlCache = self::getDlCache($textIds, $format)) ) {
if ( ($dlFile = self::getDlFile($dlCache)) ) {
return $dlFile;
}
}
$filename = $this->createWorkFileName($book);
$getMethod = sprintf('getContentAs%s', ucfirst($format));
if ( method_exists($book, $getMethod) ) {
$cacheKey = "book-".$book->getId().".$format";
$this->addContentEntry($book->$getMethod(), "$filename.$format", $cacheKey);
}
if ($binaryCallback && method_exists($this, $binaryCallback)) {
$this->$binaryCallback($book, $filename);
}
$zipFilename = "$filename.$format";
if ($format != 'epub') {
$zipFilename .= '.zip';
}
// TODO
self::setDlCache($textIds, $zipFilename, $format);
self::setDlFile($zipFilename, $this->_zipFile->file());
return self::getDlFile($zipFilename);
}
public function getDlFileForTexts($texts, $format, $binaryCallback = null)
{
$textIds = array_map(function(Text $text) {
return $text->getId();
}, $texts);
if ( ($dlCache = self::getDlCache($textIds, $format)) ) {
if ( ($dlFile = self::getDlFile($dlCache)) ) {
return $dlFile;
}
}
foreach ($texts as $text) {
$dlFile = new DownloadFile;
$filename = $dlFile->getDlFileForText($text, $format, $binaryCallback);
$this->_zipFile->addNewFileEntry(file_get_contents($filename), basename($filename), 0, false);
}
$zipFilename = sprintf('chitanka-info-%d-files-%s-%s.zip', count($texts), uniqid(), $format);
// TODO
self::setDlCache($textIds, $zipFilename, $format);
self::setDlFile($zipFilename, $this->_zipFile->file());
return self::getDlFile($zipFilename);
}
/**
*
*/
public function getDlFileForText($text, $format, $binaryCallback = null)
{
$textIds = array($text->getId());
if ( ($dlCache = self::getDlCache($textIds, $format)) ) {
if ( ($dlFile = self::getDlFile($dlCache)) ) {
return $dlFile;
}
}
$filename = $this->createWorkFileName($text);
$getMethod = sprintf('getContentAs%s', ucfirst($format));
if ( method_exists($text, $getMethod) ) {
$cacheKey = "text-".$text->getId().".$format";
$this->addContentEntry($text->$getMethod(), "$filename.$format", $cacheKey);
}
if ($binaryCallback && method_exists($this, $binaryCallback)) {
$this->$binaryCallback($text, $filename);
}
$zipFilename = "$filename.$format";
if ($format != 'epub') {
$zipFilename .= '.zip';
}
// TODO
self::setDlCache($textIds, $zipFilename, $format);
self::setDlFile($zipFilename, $this->_zipFile->file());
return self::getDlFile($zipFilename);
}
protected function addEpubEntries($work, $filename)
{
$epubFile = new EpubFile($work);
$file = $epubFile->getMimetypeFile();
$this->addContentEntry($file['content'], $file['name'], null, false);
$file = $epubFile->getContainerFile();
$this->addContentEntry($file['content'], $file['name']);
$file = $epubFile->getCssFile();
$this->addContentEntry($file['content'], $file['name']);
$this->addCoverForEpub($work, $epubFile);
$this->addBackCoverForEpub($work, $epubFile);
$file = $epubFile->getTitlePageFile();
$this->addContentEntry($file['content'], $file['name']);
$epubFile->addItem($file['name'], $file['title'], 'pre');
$this->addAnnotationForEpub($work, $epubFile);
$this->addExtraInfoForEpub($work, $epubFile);
$this->addChaptersForEpub($work, $epubFile);
$this->addImagesForEpub($work, $epubFile);
$file = $epubFile->getCreditsFile();
$this->addContentEntry($file['content'], $file['name']);
$epubFile->addItem($file['name'], $file['title'], 'post');
$file = $epubFile->getTocFile();
$this->addContentEntry($file['content'], $file['name']);
$file = $epubFile->getContentFile();
$this->addContentEntry($file['content'], $file['name']);
}
protected function addAnnotationForEpub($work, $epubFile)
{
if ( ($file = $epubFile->getAnnotation()) ) {
$this->addContentEntry($file['content'], $file['name']);
$epubFile->addItem($file['name'], $file['title'], 'pre');
}
}
protected function addExtraInfoForEpub($work, $epubFile)
{
if ( ($file = $epubFile->getExtraInfo()) ) {
$this->addContentEntry($file['content'], $file['name']);
$epubFile->addItem($file['name'], $file['title'], 'post');
}
}
protected function addChaptersForEpub(BaseWork $work, $epubFile)
{
$curObjCount = \Sfblib_SfbConverter::getObjectCount();
$chapters = $work->getEpubChunks($epubFile->getImagesDir(false));
foreach ($chapters as $i => $chapter) {
$file = $epubFile->getItemFileName($i);
$text = $chapter['text'];
if ( $i == 0 && $work->hasTitleNote() ) {
$text = $work->getTitleAsHtml($curObjCount+1) . $text;
}
$text = $epubFile->getXhtmlContent($text, $chapter['title']);
$this->addContentEntry($text, $file);
$epubFile->addItem($file);
}
}
protected function addCoverForEpub($work, $epubFile)
{
if ( ($cover = $work->getCover(400)) ) {
$file = $this->addFileEntry($cover, $epubFile->getCoverFileName());
$epubFile->addCover($file);
}
if ( ($file = $epubFile->getCoverPageFile()) ) {
$this->addContentEntry($file['content'], $file['name']);
$epubFile->addItem($file['name'], $file['title'], 'pre');
}
}
protected function addBackCoverForEpub($work, $epubFile)
{
if ( ($cover = $work->getBackCover(400)) ) {
$file = $this->addFileEntry($cover, $epubFile->getBackCoverFileName());
$epubFile->addBackCover($file);
}
if ( ($file = $epubFile->getBackCoverPageFile()) ) {
$this->addContentEntry($file['content'], $file['name']);
$epubFile->addItem($file['name'], $file['title'], 'post');
}
}
protected function addImagesForEpub($work, $epubFile)
{
$imagesDir = $epubFile->getImagesDir();
$thumbs = array();
foreach ($work->getThumbImages() as $i => $image) {
$file = $this->addFileEntry($image, "$imagesDir/thumb/");
$epubFile->addFile("image-thumb-$i", $file);
$thumbs[] = basename($image);
}
foreach ($work->getImages() as $i => $image) {
// for now skip thumbnailed images; may change in the future
if ( ! in_array(basename($image), $thumbs) ) {
$file = $this->addFileEntry($image, "$imagesDir/");
$epubFile->addFile("image-$i", $file);
}
}
$file = $this->addFileEntry(__DIR__ . '/../Resources/public/images/banner/logo_transparent.png', "$imagesDir/chitanka-logo");
$epubFile->addFile('logo-image', $file);
}
protected function addContentEntry($content, $filename, $cacheKey = null, $compress = true)
{
$fEntry = $this->_zipFile->newFileEntry($content, $filename, 0, $compress);
if ($cacheKey) {
CacheManager::setDlCache($cacheKey, serialize($fEntry));
}
$this->_zipFile->addFileEntry($fEntry);
return $filename;
}
protected function addFileEntry($filename, $targetName = null)
{
if ($targetName) {
if ($targetName[strlen($targetName)-1] == '/') {
$targetName .= basename($filename);
} else {
$targetName .= '.' . File::getFileExtension($filename);
}
} else {
$targetName = basename($filename);
}
$fEntry = $this->_zipFile->newFileEntry(file_get_contents($filename), $targetName);
$this->_zipFile->addFileEntry($fEntry);
return $targetName;
}
static public function getDlCache($textIds, $format = '')
{
return self::getDlFileByHash( self::getHashForTextIds($textIds, $format) );
}
static public function setDlCache($textIds, $file, $format = '')
{
$db = Setup::db();
$pk = self::getHashForTextIds($textIds, $format);
$db->insert(DBT_DL_CACHE, array(
"id = $pk",
'file' => $file,
), true, false);
foreach ( (array) $textIds as $textId ) {
$db->insert(DBT_DL_CACHE_TEXT, array(
"dc_id = $pk",
'text_id' => $textId,
), true);
}
return $file;
}
static public function getDlFileByHash($hash)
{
return Setup::db()->getFields(DBT_DL_CACHE, array("id = $hash"), 'file');
}
static protected function getHashForTextIds($textIds, $format = '')
{
if (is_array($textIds)) {
$textIds = implode(',', $textIds);
}
return '0x' . substr(md5($textIds . $format), 0, 16);
}
static public function getDlFile($fname)
{
$file = self::getFullDlFileName($fname);
if ( file_exists($file) && filesize($file) ) {
touch($file);
return $file;
}
return null;
}
static public function setDlFile($fname, $fcontent)
{
return File::myfile_put_contents(self::getFullDlFileName($fname), $fcontent);
}
static public function getFullDlFileName($filename)
{
return /*BASEDIR .'/'. */self::$_dlDir .'/'. $filename;
}
protected function addSfbToDlFileFromCache($textId)
{
$fEntry = unserialize( CacheManager::getDlCache($textId), '.sfb' );
$this->_zipFile->addFileEntry($fEntry);
return $fEntry['name'];
}
protected function addSfbToDlFileFromNew($textId)
{
$mainFileData = $this->getMainFileData($textId);
if ( ! $mainFileData ) {
return false;
}
list($this->filename, $this->fPrefix, $this->fSuffix, $fbi) = $mainFileData;
$this->addTextFileEntry($textId, '.sfb');
if ( $this->withFbi ) {
$this->addMiscFileEntry($fbi, $textId, '.fbi');
}
return true;
}
protected function addBinaryFileEntries($textId, $filename) {
// add covers
if ( $this->withCover ) {
foreach (BaseWork::getCovers($textId) as $file) {
$ename = BaseWork::renameCover(basename($file), $filename);
$fEntry = $this->_zipFile->newFileEntry(file_get_contents($file), $ename);
$this->_zipFile->addFileEntry($fEntry);
}
}
// add images
$dir = Legacy::getContentFilePath('img', $textId);
if ( !is_dir($dir) ) { return; }
if ($dh = opendir($dir)) {
while (($file = readdir($dh)) !== false) {
$fullname = "$dir/$file";
if ( $file[0] == '.' || $file[0] == '_' ||
File::isArchive($file) || is_dir($fullname) ) { continue; }
$fEntry = $this->_zipFile->newFileEntry(file_get_contents($fullname), $file);
$this->_zipFile->addFileEntry($fEntry);
}
closedir($dh);
}
}
private function createWorkFileName(BaseWork $work, $format = '')
{
$filename = File::cleanFileName( Char::cyr2lat($work->getNameForFile()) );
$filename = substr($filename, 0, 150);
if ($format !== '') {
$filename .= ".$format";
}
return $filename;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller\Admin;
class BookController extends CRUDController {
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller;
use Chitanka\LibBundle\Entity\Person;
class TranslatorController extends PersonController
{
protected function prepareViewForShow(Person $person, $format)
{
$this->prepareViewForShowTranslator($person, $format);
}
protected function getPersonRepository()
{
return parent::getPersonRepository()->asTranslator();
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Entity;
/**
*
*/
class TextHeaderRepository extends EntityRepository
{
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller\Admin;
class QuestionController extends CRUDController {
}
<file_sep><?php
namespace Chitanka\LibBundle\Entity;
use Chitanka\LibBundle\Util\File;
use Chitanka\LibBundle\Util\String;
use Chitanka\LibBundle\Legacy\Legacy;
use Sfblib_SfbToHtmlConverter as SfbToHtmlConverter;
abstract class BaseWork extends Entity
{
const TITLE_NEW_LINE = "<br>\n";
static public
$ratings = array(
6 => 'Шедьовър',
5 => 'Много добро',
4 => 'Добро',
3 => 'Посредствено',
2 => 'Лошо',
1 => 'Отвратително',
);
static protected $_minRating = null;
static protected $_maxRating = null;
static protected $annotationDir = 'anno';
static protected $infoDir = 'info';
protected $_hasTitleNote = null;
public function getDocId()
{
return 'http://chitanka.info';
}
public function getType()
{
return 'work';
}
public function getTitle()
{
return $this->title;
}
public function getSubtitle()
{
return $this->subtitle;
}
/**
* Return title and subtitle if any
* @param string $format Output format: %t1 — title, %t2 — subtitle
*/
public function getTitles($format = '%t1 — %t2')
{
if ( ($subtitle = $this->getSubtitle()) ) {
return strtr($format, array(
'%t1' => $this->getTitle(),
'%t2' => $subtitle,
));
}
return $this->getTitle();
}
private $authorIds;
public function getAuthorIds()
{
if ( ! isset($this->authorIds)) {
$this->authorIds = array();
foreach ($this->getAuthors() as $author) {
$this->authorIds[] = $author->getId();
}
sort($this->authorIds);
}
return $this->authorIds;
}
public function getLang()
{
return '';
}
public function getOrigLang()
{
return '';
}
public function getCover($width = null)
{
return null;
}
public function getBackCover($width = null)
{
return null;
}
public function isTranslation()
{
return $this->getLang() != $this->getOrigLang();
}
public function normalizeFileName($filename)
{
$filename = substr($filename, 0, 200);
$filename = File::cleanFileName($filename);
return $filename;
}
abstract public function getNameForFile();
public function getContentAsTxt($withBom = true)
{
return ($withBom ? self::getBom() : '')
. self::clearSfbMarkers( $this->getContentAsSfb() );
}
abstract public function getContentAsFb2();
static public function clearSfbMarkers($sfbContent)
{
$sfbContent = strtr($sfbContent, array(
">\t" => "\t",
">>\t" => "\t",
">>>\t" => "\t",
">>>>\t" => "\t",
">>>>>\t" => "\t",
"|\n" => "\n",
"A>\n" => "\n", "A$\n" => "\n",
"I>\n" => "\n", "I$\n" => "\n",
"D>\n" => "\n", "D$\n" => "\n",
"E>\n" => "\n", "E$\n" => "\n",
"L>\n" => "\n", "L$\n" => "\n",
"S>\n" => "\n", "S$\n" => "\n", "S\t" => "\t",
"N>\n" => "\n", "N$\n" => "\n", "N\t" => "\t",
"P>\n" => "\n", "P$\n" => "\n",
"M$\n" => "\n",
"C>\n" => "\n", "C$\n" => "\n",
"F>\n" => "\n", "F$\n" => "\n", "F\t" => "\t",
"T>\n" => "\n",
"T$\n" => "\n",
"#\t" => "\t",
"|\t" => "\t",
"!\t" => "\t",
"@@\t" => "\t",
"@\t" => "\t",
'{s}' => '', '{/s}' => '',
'{e}' => '', '{/e}' => '',
));
$sfbContent = strtr($sfbContent, array(
"\n" => "\r\n",
));
$sfbContent = preg_replace('/M>\t.+/', '', $sfbContent);
return $sfbContent;
}
public function getMaxHeadersDepth()
{
$depth = 1;
foreach ($this->getHeaders() as $header) {
if ($depth < $header->getLevel()) {
$depth = $header->getLevel();
}
}
return $depth;
}
public function getHeaders()
{
return array();
}
public function getHeadersAsNestedXml($allowEmpty = true)
{
$xml = '';
$prevlev = 0;
$lastpos = -1;
$id = -1;
foreach ($this->getHeaders() as $i => $header) {
if ($lastpos != $header->getFpos()) {
$id++;
}
$lastpos = $header->getFpos();
if ($prevlev < $header->getLevel()) {
$xml .= "\n<ul>".str_repeat("<li level=$id>\n<ul>", $header->getLevel() - 1 - $prevlev);
} else if ($prevlev > $header->getLevel()) {
$xml .= '</li>'.str_repeat("\n</ul>\n</li>", $prevlev - $header->getLevel());
} else {
$xml .= '</li>';
}
$xml .= "\n<li level=$id>";
$xml .= htmlspecialchars($header->getName());
$prevlev = $header->getLevel();
}
if ($prevlev) {
$xml .= '</li>'.str_repeat("\n</ul>\n</li>", $prevlev-1)."\n</ul>";
} else if ( ! $allowEmpty ) {
$xml = '<li level=0>Основен текст</li>';
}
return $xml;
}
static public function loadAnnotation($id)
{
$file = Legacy::getContentFilePath(static::$annotationDir, $id);
return file_exists($file) ? file_get_contents($file) : null;
}
private $annotation;
public function getAnnotation()
{
return isset($this->annotation) ? $this->annotation : $this->annotation = static::loadAnnotation($this->id);
}
public function setAnnotation($annotation)
{
$file = Legacy::getContentFilePath(static::$annotationDir, $this->id);
if ($annotation) {
file_put_contents($file, String::my_replace($annotation));
$this->setHasAnno(true);
} else {
file_exists($file) && unlink($file);
$this->setHasAnno(false);
}
$this->annotation = $annotation;
return $this;
}
public function getAnnotationAsSfb()
{
$text = $this->getAnnotation();
if ($text) {
$text = \Sfblib_SfbConverter::ANNO_S . \Sfblib_SfbConverter::EOL
. rtrim($text) . \Sfblib_SfbConverter::EOL
. \Sfblib_SfbConverter::ANNO_E . \Sfblib_SfbConverter::EOL
. \Sfblib_SfbConverter::EOL;
}
return $text;
}
public function getAnnotationAsXhtml($imgDir = null)
{
$text = $this->getAnnotation();
if ($text) {
$converter = $this->_getSfbConverter($text, $imgDir);
$converter->convert();
$text = $converter->getText() . $converter->getNotes(2);
}
return $text;
}
static public function loadExtraInfo($id) {
$file = Legacy::getContentFilePath(static::$infoDir, $id);
return file_exists($file) ? file_get_contents($file) : null;
}
private $extraInfo;
public function getExtraInfo() {
return isset($this->extraInfo) ? $this->extraInfo : $this->extraInfo = static::loadExtraInfo($this->id);
}
public function setExtraInfo($extraInfo) {
$file = Legacy::getContentFilePath(static::$infoDir, $this->id);
if ($extraInfo) {
file_put_contents($file, String::my_replace($extraInfo));
} else {
file_exists($file) && unlink($file);
}
$this->extraInfo = $extraInfo;
return $this;
}
public function getExtraInfoAsXhtml($imgDir = null)
{
$text = $this->getExtraInfo();
if ($text) {
$converter = $this->_getSfbConverter($text, $imgDir);
$converter->convert();
$text = $converter->getText() . $converter->getNotes(2);
}
return $text;
}
public function getHistoryInfo()
{
return array();
}
abstract public function getEpubChunks($imgDir);
protected function getEpubChunksFrom($input, $imgDir)
{
$chapters = array();
$headers = $this->getHeaders();
if (count($headers) == 0) {
$header = new TextHeader;
$header->setName('Основен текст');
$header->setFpos(0);
$header->setLinecnt(1000000);
$headers = array($header);
}
$lastpos = -1;
foreach ($headers as $header) {
if ($lastpos != $header->getFpos()) {
$lastpos = $header->getFpos();
$converter = $this->_getSfbConverter($input, $imgDir);
$converter->startpos = $header->getFpos();
$converter->maxlinecnt = $header->getLinecnt();
$converter->convert();
$text = $converter->getText() . $converter->getNotes(2);
$chapters[] = array('title' => $header->getName(), 'text' => $text);
}
}
return $chapters;
}
protected function _getSfbConverter($file, $imgDir)
{
$conv = new SfbToHtmlConverter($file, $imgDir);
if ($this->isGamebook()) {
// recognize section links
$conv->patterns['/#(\d+)/'] = '<a href="#l-$1" class="ep" title="Към част $1">$1</a>';
}
return $conv;
}
public function hasTitleNote()
{
return false;
}
static public function getBom($withEncoding = true)
{
$bom = "\xEF\xBB\xBF"; // Byte order mark for some windows software
if ($withEncoding) {
$bom .= "\t[Kodirane UTF-8]\n\n";
}
return $bom;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Admin;
use Sonata\AdminBundle\Form\FormMapper;
use Sonata\AdminBundle\Datagrid\DatagridMapper;
use Sonata\AdminBundle\Datagrid\ListMapper;
use Sonata\AdminBundle\Show\ShowMapper;
use Sonata\AdminBundle\Route\RouteCollection;
use Symfony\Component\Form\FormEvents;
use Symfony\Component\Form\FormEvent;
use Chitanka\LibBundle\Util\Language;
use Chitanka\LibBundle\Legacy\Legacy;
use Chitanka\LibBundle\Entity\Text;
use Chitanka\LibBundle\Entity\TextRevision;
class TextAdmin extends Admin
{
protected $baseRoutePattern = 'text';
protected $baseRouteName = 'admin_text';
protected $translationDomain = 'admin';
public $extraActions = 'LibBundle:TextAdmin:extra_actions.html.twig';
protected function configureRoutes(RouteCollection $collection) {
$collection->remove('create');
}
protected function configureShowField(ShowMapper $showMapper)
{
$showMapper
->add('slug')
->add('title')
->add('authors')
->add('translators')
->add('books')
->add('subtitle')
->add('lang')
->add('trans_year')
->add('trans_year2')
->add('orig_title')
->add('orig_subtitle')
->add('orig_lang')
->add('year')
->add('year2')
->add('orig_license')
->add('trans_license')
->add('type')
->add('series')
->add('sernr')
->add('sernr2')
->add('headlevel')
->add('source')
->add('removed_notice')
->add('comment_count')
// ->add('dl_count')
// ->add('read_count')
->add('rating')
->add('votes')
->add('is_compilation')
->add('labels')
->add('headers')
//->add('readers')
->add('userContribs')
->add('revisions')
->add('links', null, array('label' => 'Site Links'))
->add('created_at')
;
}
protected function configureListFields(ListMapper $listMapper)
{
$listMapper
->addIdentifier('title')
->add('slug')
->add('_action', 'actions', array(
'actions' => array(
'view' => array(),
'edit' => array(),
'delete' => array(),
)
))
;
}
protected function configureFormFields(FormMapper $formMapper)
{
$formMapper
->with('General attributes')
->add('slug')
->add('title')
->add('lang', 'choice', array('choices' => Language::getLangs()))
->add('orig_lang', 'choice', array('choices' => Language::getLangs()))
->add('type', 'choice', array('choices' => array('' => '') + Legacy::workTypes()))
->add('textAuthors', 'sonata_type_collection', array(
'by_reference' => false,
'required' => false,
), array(
'edit' => 'inline',
'inline' => 'table',
'sortable' => 'pos',
))
->add('textTranslators', 'sonata_type_collection', array(
'by_reference' => false,
'required' => false,
), array(
'edit' => 'inline',
'inline' => 'table',
'sortable' => 'pos',
))
->end()
->with('Extra attributes')
->add('subtitle', null, array('required' => false))
->add('orig_title', null, array('required' => false))
->add('orig_subtitle', null, array('required' => false))
->add('year', null, array('required' => false))
->add('year2', null, array('required' => false))
->add('trans_year', null, array('required' => false))
->add('trans_year2', null, array('required' => false))
->add('orig_license', null, array('required' => false))
->add('trans_license', null, array('required' => false))
->add('series', 'sonata_type_model_list', array('required' => false))
->add('sernr', null, array('required' => false))
->add('sernr2', null, array('required' => false))
->add('note')
->add('links', 'sonata_type_collection', array(
'by_reference' => false,
'required' => false,
'label' => 'Site Links',
), array(
'edit' => 'inline',
'inline' => 'table',
'sortable' => 'site_id'
))
->end()
->with('Textual content')
->add('annotation', 'textarea', array(
'required' => false,
'trim' => false,
'attr' => array(
'class' => 'span12',
),
))
->add('extra_info', 'textarea', array(
'required' => false,
'trim' => false,
'attr' => array(
'class' => 'span12',
),
))
->add('content_file', 'file', array('required' => false))
->add('headlevel', null, array('required' => false))
->add('revision_comment', 'text', array('required' => false))
->add('source', null, array('required' => false))
->add('removed_notice')
->end()
->with('Contributions')
->add('userContribs', 'sonata_type_collection', array(
'by_reference' => false,
'required' => false,
), array(
'edit' => 'inline',
//'inline' => 'table',
'sortable' => 'date',
))
->end()
->setHelps(array(
'sernr2' => $this->trans('help.text.sernr2'),
))
;
$builder = $formMapper->getFormBuilder();
$builder->addEventListener(FormEvents::PRE_BIND, array($this, 'fixNewLines'));
$builder->addEventListener(FormEvents::PRE_SET_DATA, function(FormEvent $event) use ($formMapper) {
$text = $event->getData();
if ($text instanceof Text) {
$formMapper->setHelps(array(
'content_file' => sprintf('(<a href="/%s">настоящ файл</a>)', Legacy::getContentFilePath('text', $text->getId())),
));
}
});
}
protected function configureDatagridFilters(DatagridMapper $datagrid)
{
$datagrid
->add('title')
->add('subtitle')
->add('lang')
->add('trans_year')
->add('trans_year2')
->add('orig_title')
->add('orig_subtitle')
->add('orig_lang')
->add('year')
->add('year2')
->add('orig_license')
->add('trans_license')
->add('type')
->add('removed_notice')
;
}
public function preUpdate($text) {
foreach ($text->getLinks() as $link) {
$link->setText($text);
}
foreach ($text->getTextAuthors() as $textAuthor) {
if ($textAuthor->getPerson()) {
$textAuthor->setText($text);
}
}
foreach ($text->getTextTranslators() as $textTranslator) {
if ($textTranslator->getPerson()) {
$textTranslator->setText($text);
}
}
foreach ($text->getUserContribs() as $userContrib) {
if (!$userContrib->getText()) {
$userContrib->setText($text);
}
}
if ($text->getRevisionComment()) {
$revision = new TextRevision;
$revision->setComment($text->getRevisionComment());
$revision->setText($text);
$revision->setDate(new \DateTime);
$revision->setFirst(false);
$text->addRevision($revision);
}
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller;
class SiteNoticeController extends Controller {
public function stripeAction() {
if ( rand(0, 5) === 0 /*every fifth*/ ) {
$this->view = array(
'siteNotice' => $this->getSiteNoticeRepository()->getGlobalRandom(),
);
}
return $this->render('LibBundle:SiteNotice:stripe.html.twig', $this->view);
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Command;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Output\OutputInterface;
use Doctrine\ORM\EntityManager;
use Chitanka\LibBundle\Entity\Site;
use Chitanka\LibBundle\Legacy\Legacy;
class UpdateSiteLinksDbCommand extends CommonDbCommand
{
protected function configure()
{
parent::configure();
$this
->setName('db:update-sites')
->setDescription('Update links to external sites')
->setHelp(<<<EOT
The <info>db:update-sites</info> command reads data from the wiki and updates the links to external sites.
EOT
);
}
/**
* Executes the current command.
*
* @param InputInterface $input An InputInterface instance
* @param OutputInterface $output An OutputInterface instance
*
* @return integer 0 if everything went fine, or an error code
*
* @throws \LogicException When this abstract class is not implemented
*/
protected function execute(InputInterface $input, OutputInterface $output)
{
$em = $this->getContainer()->get('doctrine.orm.default_entity_manager');
$this->updateLinks($this->fetchWikiContent($output), $output, $em);
$output->writeln('Done.');
}
protected function updateLinks($wikiContent, OutputInterface $output, EntityManager $em)
{
$linksData = $this->extractLinkData($wikiContent);
if (empty($linksData)) {
return;
}
$output->writeln('Updating site links...');
$repo = $em->getRepository('LibBundle:Site');
foreach ($linksData as $linkData) {
$site = $repo->findOneByUrlOrCreate($linkData[1]);
$site->setName($linkData[2]);
$site->setDescription(strip_tags($linkData[3]));
$em->persist($site);
}
$em->flush();
}
protected function fetchWikiContent(OutputInterface $output)
{
$output->writeln('Fetching wiki content...');
return Legacy::getMwContent('http://wiki.chitanka.info/Links', $this->getContainer()->get('buzz'), 0);
}
protected function extractLinkData($wikiContent)
{
if (preg_match_all('|class="external text" href="([^"]+)">([^<]+)</a>(.*)|', $wikiContent, $matches, PREG_SET_ORDER)) {
return $matches;
}
return false;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Service;
use Doctrine\ORM\EntityManager;
use Chitanka\LibBundle\Entity\Text;
use Chitanka\LibBundle\Entity\User;
use Chitanka\LibBundle\Entity\Bookmark;
use Chitanka\LibBundle\Entity\BookmarkRepository;
use Chitanka\LibBundle\Entity\BookmarkFolderRepository;
class TextBookmarkService {
private $em;
private $user;
public function __construct(EntityManager $em, User $user) {
$this->em = $em;
$this->user = $user;
}
public function addBookmark(Text $text, $folder = 'favorities') {
$folder = $this->getBookmarkFolderRepository()->getOrCreateForUser($this->user, $folder);
$bookmark = $this->getBookmarkRepository()->findOneBy(array(
'folder' => $folder->getId(),
'text' => $text->getId(),
'user' => $this->user->getId(),
));
if ($bookmark) { // an existing bookmark, remove it
$this->em->remove($bookmark);
$this->em->flush();
return null;
}
$newBookmark = new Bookmark(array(
'folder' => $folder,
'text' => $text,
'user' => $this->user,
));
$this->user->addBookmark($newBookmark);
$this->em->persist($folder);
$this->em->persist($newBookmark);
$this->em->persist($this->user);
$this->em->flush();
return $newBookmark;
}
/** @return BookmarkFolderRepository */
protected function getBookmarkFolderRepository() {
return $this->em->getRepository('LibBundle:BookmarkFolder');
}
/** @return BookmarkRepository */
protected function getBookmarkRepository() {
return $this->em->getRepository('LibBundle:Bookmark');
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Admin;
use Sonata\AdminBundle\Admin\Admin as BaseAdmin;
use Symfony\Component\Form\FormEvent;
abstract class Admin extends BaseAdmin
{
public function getEntityManager()
{
return $this->modelManager->getEntityManager($this->getClass());
}
public function getRepository()
{
return $this->getEntityManager()->getRepository($this->getClass());
}
public function fixNewLines(FormEvent $event)
{
$data = $event->getData();
foreach ($data as $field => $value) {
if (is_string($value)) {
$data[$field] = str_replace("\r\n", "\n", $value);
}
}
$event->setData($data);
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller\Admin;
class LicenseController extends CRUDController {
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller\Admin;
class SiteController extends CRUDController {
}
<file_sep><?php
namespace Chitanka\LibBundle\Util;
class Ary
{
/**
* @param $key Key
* @param $data Associative array
* @param $defKey Default key
* @return $key if it exists as key in $data, otherwise $defKey
*/
static public function normKey($key, $data, $defKey = '') {
return array_key_exists($key, $data) ? $key : $defKey;
}
static public function arrVal($arr, $key, $defVal = null) {
return array_key_exists($key, $arr) ? $arr[$key] : $defVal;
}
static public function cartesianProduct($arr1, $arr2) {
$prod = array();
foreach ($arr1 as $val1) {
foreach ($arr2 as $val2) {
$prod[] = $val1 . $val2;
}
}
return $prod;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Legacy;
use Chitanka\LibBundle\Entity\Text;
use Chitanka\LibBundle\Util\Ary;
use Chitanka\LibBundle\Util\String;
use Chitanka\LibBundle\Validator\Constraints\NotSpamValidator;
use Chitanka\LibBundle\Validator\Constraints\NotSpam;
class SuggestDataPage extends MailPage {
protected
$FF_SUBACTION = 'object',
$FF_TEXT_ID = 'id',
$FF_INFO = 'info', $FF_NAME = 'name', $FF_EMAIL = 'email',
$action = 'suggestData',
$subactions = array(
'orig_title' => '+оригинално заглавие',
'orig_year' => '+година на написване или първа публикация',
'translator' => '+преводач',
'trans_year' => '+година на превод',
'annotation' => 'Предложение за анотация'
),
$defSubaction = 'annotation',
$work = null;
public function __construct($fields) {
parent::__construct($fields);
$this->subaction = Ary::normKey(
$this->request->value($this->FF_SUBACTION, $this->defSubaction, 1),
$this->subactions, $this->defSubaction);
$this->title = strtr($this->subactions[$this->subaction],
array('+' => 'Информация за '));
$this->textId = (int) $this->request->value($this->FF_TEXT_ID, 0, 2);
$this->info = $this->request->value($this->FF_INFO);
$this->name = $this->request->value($this->FF_NAME, $this->user->getUsername());
$this->email = $this->request->value($this->FF_EMAIL, $this->user->getEmail());
$this->initData();
}
protected function processSubmission() {
if ( empty($this->info) ) {
$this->addMessage('Не сте въвели никаква информация.', true);
return $this->buildContent();
}
$notSpamValidator = new NotSpamValidator;
if ( $this->user->isAnonymous() && !$notSpamValidator->validate($this->info, new NotSpam) ) {
$this->addMessage('Съобщението ви е определено като спам. Вероятно съдържа прекалено много уеб адреси.', true);
return $this->buildContent();
}
$this->mailToEmail = Setup::setting('work_email');
$this->mailFromName = $this->name;
$this->mailFromEmail = $this->email;
$this->mailSubject = $this->title;
$this->mailSuccessMessage = 'Съобщението ви беше изпратено. Благодаря ви!';
$this->mailFailureMessage = 'Изглежда е станал някакъв фал при изпращането на съобщението ви. Ако желаете, пробвайте още веднъж.';
return parent::processSubmission();
}
protected function makeSubmissionReturn() {
return '<p>Обратно към „'.
$this->makeSimpleTextLink($this->work->getTitle(), $this->textId, 1)
.'“</p>';
}
protected function makeForm() {
if ( empty($this->work) ) {
return '';
}
$intro = $this->makeIntro();
$info = $this->out->textarea($this->FF_INFO, '', $this->info, 15, 80);
$name = $this->out->textField($this->FF_NAME, '', $this->name, 50);
$email = $this->out->textField($this->FF_EMAIL, '', $this->email, 50);
$submit = $this->out->submitButton('Пращане');
return <<<EOS
$intro
<p>Посочването на име и електронна поща не е задължително.</p>
<form action="" method="post">
<fieldset style="margin-top:1em; width:30em">
<table summary="table for the layout"><tr>
<td class="fieldname-left"><label for="$this->FF_NAME">Име:</label></td>
<td>$name</td>
</tr><tr>
<td class="fieldname-left"><label for="$this->FF_EMAIL">Е-поща:</label></td>
<td>$email</td>
</tr></table>
<label for="$this->FF_INFO">Информация:</label><br />
$info<br />
$submit
</fieldset>
</form>
EOS;
}
protected function makeIntro() {
$ta = '„'. $this->makeSimpleTextLink($this->work->getTitle(), $this->textId, 1) .'“'.
$this->makeFromAuthorSuffix($this->work);
switch ($this->subaction) {
case 'orig_title':
return "<p>Ако знаете какво е оригиналното заглавие на $ta, можете да го съобщите чрез следния формуляр, за да бъде въведено в базата от данни на библиотеката. Полезна е и всякаква друга допълнителна информация за произведението.</p>";
case 'translator':
return "<p>Ако знаете кой е превел $ta, можете да го съобщите чрез следния формуляр.</p>";
case 'annotation':
$commentUrl = $this->controller->generateUrl('text_comments', array('id' => $this->textId));
return <<<EOS
<p>Чрез следния формуляр можете да предложите анотация на $ta. Ако просто искате да оставите коментар към произведението, ползвайте <a href="$commentUrl">страницата за читателски мнения</a>.</p>
<p><strong>Ако сте копирали анотацията, задължително посочете точния източник!</strong></p>
EOS;
case 'orig_year':
return "<p>Ако имате информация за годината на написване или първа публикация на $ta, можете да я съобщите чрез следния формуляр.</p>";
case 'trans_year':
return "<p>Ако имате информация за годината на превод на $ta, можете да я съобщите чрез следния формуляр.</p>";
}
}
protected function makeMailMessage() {
$title = $this->work->getTitle();
return <<<EOS
Произведение: „{$title}“
http://chitanka.info/admin/text/$this->textId/edit
$this->info
EOS;
}
protected function initData() {
$this->work = $this->controller->getRepository('Text')->find($this->textId);
if ( empty($this->work) ) {
$this->addMessage("Не съществува текст с номер <strong>$this->textId</strong>.", true);
}
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller\Admin;
class BookSiteController extends CRUDController {
}
<file_sep><?php
namespace Chitanka\LibBundle\Entity;
use Symfony\Component\Validator\Constraints as Assert;
use Chitanka\LibBundle\Validator\Constraints as MyAssert;
class Feedback
{
public $referer;
/**
* @Assert\Length(min=3)
*/
public $name;
/**
* @Assert\Email()
*/
public $email;
/** */
public $subject = 'Обратна връзка от Моята библиотека';
/**
* @Assert\NotBlank()
* @Assert\Length(min=80)
* @MyAssert\NotSpam()
*/
public $comment;
private $mailer;
private $recipient;
public function __construct(\Swift_Mailer $mailer, $recipient)
{
$this->mailer = $mailer;
$this->recipient = $recipient;
}
public function process()
{
$fromEmail = empty($this->email) ? '<EMAIL>' : $this->email;
$fromName = empty($this->name) ? 'Анонимен' : $this->name;
$sender = array($fromEmail => $fromName);
$message = \Swift_Message::newInstance($this->subject)
->setFrom($sender)
->setTo($this->recipient)
->setBody($this->comment);
$headers = $message->getHeaders();
$headers->addMailboxHeader('Reply-To', $sender);
$headers->addTextHeader('X-Mailer', 'Chitanka');
$this->mailer->send($message);
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Service;
class DirectoryCopier
{
public function copy($sourceDir, $destDir)
{
$iterator = new \RecursiveIteratorIterator(
new \RecursiveDirectoryIterator($sourceDir, \RecursiveDirectoryIterator::SKIP_DOTS),
\RecursiveIteratorIterator::SELF_FIRST);
foreach ($iterator as $item) {
$dest = $destDir . DIRECTORY_SEPARATOR . $iterator->getSubPathName();
if ($item->isDir()) {
if ( ! file_exists($dest)) {
mkdir($dest);
}
} else {
copy($item, $dest);
}
}
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller\Admin;
class LabelController extends CRUDController {
}
<file_sep><?php
namespace Chitanka\LibBundle\Legacy;
use Chitanka\LibBundle\Entity\User;
class EmailUserPage extends MailPage {
protected
$action = 'emailUser';
public function __construct($fields) {
parent::__construct($fields);
$this->title = 'Пращане на писмо на потребител';
$this->username = $this->request->value('username', '', 1);
$this->mailSubject = $this->request->value('subject', 'Писмо чрез {SITENAME}');
$this->mailMessage = $this->request->value('message', '');
$this->mailSuccessMessage = 'Писмото беше изпратено.';
}
protected function processSubmission() {
$err = $this->validateData();
if ( !empty($err) ) {
$this->addMessage($err, true);
return '';
}
$err = $this->validateInput();
if ( !empty($err) ) {
$this->addMessage($err, true);
return $this->makeForm();
}
$this->mailToName = $this->localUser->getRealname() == ''
? $this->localUser->getUsername()
: $this->localUser->getRealname();
$this->mailToEmail = $this->localUser->getEmail();
$this->mailFromName = $this->user->getUsername();
$this->mailFromEmail = $this->user->getEmail();
return parent::processSubmission();
}
protected function buildContent() {
$err = $this->validateData();
if ( ! empty($err) ) {
$this->addMessage($err, true);
return '';
}
return $this->makeForm();
}
protected function validateData() {
if ( $this->user->isAnonymous() ) {
return 'Необходимо е да се регистрирате и да посочите валидна електронна поща, за да можете да пращате писма на други потребители.';
}
if ($this->user->getEmail() == '') {
$settingslink = $this->controller->generateUrl('user_edit', array('username' => $this->user->getUsername()));
return "Необходимо е да посочите валидна електронна поща в <a href=\"$settingslink\">настройките си</a>, за да можете да пращате писма на други потребители.";
}
return '';
}
protected function validateInput() {
if ( empty($this->username) ) {
return 'Не е избран потребител.';
}
$this->localUser = $this->controller->getRepository('User')->loadUserByUsername($this->username);
if ( ! $this->localUser) {
return "Не съществува потребител с име <strong>$this->username</strong>.";
}
if ($this->localUser->getAllowemail() == 0) {
return "<strong>$this->username</strong> не желае да получава писма чрез {SITENAME}.";
}
if ( empty($this->mailSubject) ) {
return 'Въведете тема на писмото!';
}
if ( empty($this->mailMessage) ) {
return 'Въведете текст на писмото!';
}
return '';
}
protected function makeForm() {
$ownsettingslink = $this->controller->generateUrl('user_edit', array('username' => $this->user->getUsername()));
$fromuserlink = $this->makeUserLink($this->user->getUsername());
$username = $this->out->textField('username', '', $this->username, 30, 30);
$subject = $this->out->textField('subject', '', $this->mailSubject, 60, 200);
$message = $this->out->textarea('message', '', $this->mailMessage, 20, 80);
$submit = $this->out->submitButton('Изпращане на писмото');
return <<<EOS
<p>Чрез долния формуляр можете да пратите писмо на потребител по електронната поща. Адресът, записан в <a href="$ownsettingslink">настройките ви</a>, ще се появи в полето „От“ на изпратеното писмо, така че получателят ще е в състояние да ви отговори.</p>
<form action="" method="post">
<fieldset>
<legend>Писмо</legend>
<table border="0"><tr>
<td class="fieldname-left">От:</td>
<td>$fromuserlink</td>
</tr><tr>
<td class="fieldname-left"><label for="username">До:</label></td>
<td>$username</td>
</tr><tr>
<td class="fieldname-left"><label for="subject">Относно:</label></td>
<td>$subject</td>
</tr></table>
<div><label for="message">Съобщение:</label><br />
$message</div>
<p>$submit</p>
</fieldset>
</form>
EOS;
}
protected function makeMailMessage() {
return <<<EOS
$this->mailMessage
----
Това писмо е изпратено от $this->mailFromName <$this->mailFromEmail> чрез $this->sitename (http://chitanka.info).
EOS;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Entity;
/**
*
*/
class BookmarkFolderRepository extends EntityRepository
{
public function getOrCreateForUser($user, $folderSlug, $folderName = '')
{
$folder = $this->findOneBy(array('slug' => $folderSlug, 'user' => $user->getId()));
if ( ! $folder) {
$folder = new BookmarkFolder;
$folder->setSlug($folderSlug);
$folder->setName($folderName ? $folderName : $folderSlug);
$folder->setUser($user);
}
return $folder;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Legacy;
use Chitanka\LibBundle\Util\Char;
use Chitanka\LibBundle\Util\String;
class OutputMaker {
public
$inencoding = 'utf-8',
$outencoding = 'utf-8';
protected
$defArgSeparator = '&',
$argSeparator = '&',
$queryStart = '?';
public function textField($name, $id = '', $value = '', $size = 30,
$maxlength = 255, $tabindex = null, $title = '', $attrs = array()) {
Legacy::fillOnEmpty($id, $name);
$attrs = array(
'type' => 'text', 'name' => $name, 'id' => $id,
'size' => $size, 'maxlength' => $maxlength,
'value' => $value, 'title' => $title, 'tabindex' => $tabindex
) + $attrs;
return $this->xmlElement('input', null, $attrs);
}
public function textarea($name, $id = '', $value = '', $rows = 5, $cols = 80,
$tabindex = null, $attrs = array()) {
Legacy::fillOnEmpty($id, $name);
$attrs = array(
'name' => $name, 'id' => $id,
'cols' => $cols, 'rows' => $rows, 'tabindex' => $tabindex
) + $attrs;
return $this->xmlElement('textarea', String::myhtmlentities($value), $attrs);
}
public function checkbox($name, $id = '', $checked = false, $label = '',
$value = null, $tabindex = null, $attrs = array()) {
Legacy::fillOnEmpty($id, $name);
$attrs = array(
'type' => 'checkbox', 'name' => $name, 'id' => $id,
'value' => $value, 'tabindex' => $tabindex
) + $attrs;
if ($checked) { $attrs['checked'] = 'checked'; }
if ( !empty($label) ) {
$label = $this->label($label, $id);
}
return $this->xmlElement('input', null, $attrs) . $label;
}
public function hiddenField($name, $value = '') {
$attrs = array('type' => 'hidden', 'name' => $name, 'value' => $value);
return $this->xmlElement('input', null, $attrs);
}
public function passField($name, $id = '', $value = '', $size = 30,
$maxlength = 255, $tabindex = null, $attrs = array()) {
Legacy::fillOnEmpty($id, $name);
$attrs = array(
'type' => 'password', 'name' => $name, 'id' => $id,
'size' => $size, 'maxlength' => $maxlength, 'value' => $value,
'tabindex' => $tabindex
) + $attrs;
return $this->xmlElement('input', null, $attrs);
}
public function fileField($name, $id = '', $tabindex = null,
$title = '', $attrs = array()) {
Legacy::fillOnEmpty($id, $name);
$attrs = array(
'type' => 'file', 'name' => $name, 'id' => $id,
'title' => $title, 'tabindex' => $tabindex
) + $attrs;
return $this->xmlElement('input', null, $attrs);
}
public function makeMaxFileSizeField() {
return $this->hiddenField('MAX_FILE_SIZE', Legacy::ini_bytes( ini_get('upload_max_filesize') ));
}
public function submitButton($value, $title = '', $tabindex = null,
$putname = true, $attrs = array()) {
$attrs = array(
'type' => 'submit', 'value' => $value, 'title' => $title,
'tabindex' => $tabindex
) + $attrs;
if ( is_string($putname) ) {
$attrs['name'] = $putname;
} else if ($putname) {
$attrs['name'] = 'submitButton';
}
return $this->xmlElement('input', null, $attrs);
}
public function selectBox($name, $id = '', $opts = array(), $selId = 0,
$tabindex = null, $attrs = array()) {
$o = '';
if ( ! is_array( $selId ) ) {
$selId = (array) $selId; // threat it as a multiple-select box
}
foreach ($opts as $key => $opt) {
if ( is_object($opt) ) {
$key = $opt->id;
$val = $opt->name;
$title = isset($opt->title) ? $opt->title : '';
} else if ( is_array($opt) ) {
list($val, $title) = $opt;
} else {
$val = $opt;
$title = '';
}
$oattrs = array('value' => $key, 'title' => $title);
if ( in_array( $key, $selId) ) $oattrs['selected'] = 'selected';
$o .= "\n\t". $this->xmlElement('option', $val, $oattrs);
}
Legacy::fillOnEmpty($id, $name);
$attrs = array(
'name' => $name, 'id' => $id, 'tabindex' => $tabindex
) + $attrs;
return $this->xmlElement('select', $o, $attrs);
}
public function link(
$url, $text = '', $title = '', $attrs = array(), $args = array())
{
if ($text === '') $text = $url;
return $this->link_raw($url, $this->escape($text), $title, $attrs, $args);
}
public function link_raw($url, $text, $title = '', $attrs = array(),
$args = array()) {
$q = array();
foreach ($args as $field => $value) {
$q[] = $field . Request::PARAM_SEPARATOR . $value;
}
if ( !empty($q) ) {
$url .= implode($this->argSeparator, $q);
}
$attrs = array( 'href' => $url ) + $attrs;
if ( ! empty( $title ) ) $attrs['title'] = $title;
return $this->xmlElement('a', $text, $attrs);
}
public function listItem($item, $attrs = array())
{
return "\n\t" . $this->xmlElement('li', $item, $attrs);
}
public function image($url, $alt, $title = null, $attrs = array()) {
Legacy::fillOnNull($title, $alt);
$attrs = array(
'src' => $url, 'alt' => $alt, 'title' => $title
) + $attrs;
return $this->xmlElement('img', null, $attrs);
}
public function label($text, $for, $title = '', $attrs = array()) {
$attrs = array(
'for' => $for, 'title' => $title
) + $attrs;
return $this->xmlElement('label', $text, $attrs);
}
public function ulist($items, $attrs = array())
{
$oitems = '';
foreach ($items as $item) {
if ( empty( $item ) ) {
continue;
}
$lattrs = array();
if ( is_array($item) ) {
assert( 'count($item) >= 2' );
list($item, $lattrs) = $item;
}
$oitems .= $this->listItem($item, $lattrs);
}
if ( empty($oitems) ) {
return '';
}
return $this->xmlElement('ul', $oitems, $attrs);
}
public function xmlElement($name, $content = '', $attrs = array(), $doEscape = true)
{
$end = is_null($content) ? ' />' : ">$content</$name>";
return '<'.$name . $this->makeAttribs($attrs, $doEscape) . $end;
}
public function xmlElementOrNone($name, $content, $attrs = array(), $doEscape = true)
{
if ( empty($content) ) {
return '';
}
return $this->xmlElement($name, $content, $attrs, $doEscape);
}
public function makeAttribs($attrs, $doEscape = true) {
$o = '';
foreach ($attrs as $attr => $value) {
$o .= $this->attrib($attr, $value, $doEscape);
}
return $o;
}
public function attrib($attrib, $value, $doEscape = true) {
if ( is_null($value) || ( empty($value) && $attrib == 'title' ) ) {
return '';
}
$value = strip_tags($value);
return ' '. $attrib .'="'
. ( $doEscape ? String::myhtmlspecialchars($value) : $value )
.'"';
}
/**
Creates an HTML table.
@param $caption Table caption
@param $data Array of arrays, i.e.
array(
array(CELL, CELL, ...),
array(CELL, CELL, ...),
...
)
CELL can be:
— a string — equivalent to a simple table cell
— an array:
— first element must be an associative array for cell attributes;
if this array contains a key 'type' with the value 'header',
then the cell is rendered as a header cell
— second element must be a string representing the cell content
@param attrs Optional associative array for table attributes
*/
public function simpleTable($caption, $data, $attrs = array()) {
$ext = $this->makeAttribs($attrs);
$t = "\n<table class=\"content\"$ext>";
if ( !empty($caption) ) {
$t .= "<caption>$caption</caption>";
}
$curRowClass = '';
foreach ($data as $row) {
$curRowClass = $this->nextRowClass($curRowClass);
$t .= "\n<tr class=\"$curRowClass\">";
foreach ($row as $cell) {
$ctype = 'd';
if ( is_array($cell) ) {
if ( isset( $cell[0]['type'] ) ) {
$ctype = $cell[0]['type'] == 'header' ? 'h' : 'd';
unset( $cell[0]['type'] );
}
$cattrs = $this->makeAttribs($cell[0]);
$content = $cell[1];
} else {
$cattrs = '';
$content = $cell;
}
$t .= "\n\t<t{$ctype}{$cattrs}>{$content}</t{$ctype}>";
}
$t .= "\n</tr>";
}
return $t.'</table>';
}
public function nextRowClass($curRowClass = '') {
return $curRowClass == 'even' ? 'odd' : 'even';
}
public function obfuscateEmail($email) {
return strtr($email,
array('@' => ' <span title="при сървъра">(при)</span> '));
}
public function addUrlQuery($url, $args) {
if ( !empty($this->queryStart) && strpos($url, $this->queryStart) === false ) {
$url .= $this->queryStart;
}
foreach ((array) $args as $key => $val) {
$sep = $this->getArgSeparator($url);
$url = preg_replace("!$sep$key".Request::PARAM_SEPARATOR."[^$sep]*!", '', $url);
$url .= $sep . $key . Request::PARAM_SEPARATOR . $this->urlencode($val);
}
return $url;
}
public function getArgSeparator($url = '') {
if ( empty($url) || strpos($url, $this->defArgSeparator) === false ) {
return $this->argSeparator;
}
return $this->defArgSeparator;
}
/**
TODO was done with myhtmlentities() (r1146), check why.
XHTML Mobile does not have most of the html entities,
so revert back to myhtmlspecialchars().
*/
public function escape($s) {
return String::myhtmlspecialchars( $s );
}
public function urlencode($str) {
$enc = urlencode($str);
if ( strpos($str, '/') !== false ) {
$enc = strtr($enc, array('%2F' => '/'));
}
return $enc;
}
public function slugify($name, $maxlength = 40)
{
$name = strtr($name, array(
' ' => '_', '/' => '_',
'²' => '2', '°' => 'deg',
'—' => '',
));
$name = Char::cyr2lat($name);
$name = Legacy::removeDiacritics($name);
$name = iconv($this->inencoding, 'ISO-8859-1//TRANSLIT', $name);
$name = strtolower($name);
$name = preg_replace('/__+/', '_', $name);
$name = preg_replace('/[^\w\d_]/', '', $name);
$name = rtrim(substr($name, 0, $maxlength), '_');
return $name;
}
public function getStartTag($elm, $attrs = array())
{
return '<'. $elm . $this->makeAttribs($attrs) . '>';
}
public function getEndTag($elm)
{
return '</'. $elm . '>';
}
public function getEmptyTag($elm, $attrs = array(), $xml = true)
{
$end = $xml ? '/>' : ' />';
return '<'. $elm . $this->makeAttribs($attrs) . $end;
}
public function getRssLink($url, $title = '')
{
return $this->getEmptyTag('link', array(
'rel' => 'alternate',
'type' => 'application/rss+xml',
'title' => "$title (RSS 2.0)",
'href' => $url,
), false);
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller\Admin;
class PersonController extends CRUDController {
}
<file_sep><?php
namespace Chitanka\LibBundle\Entity;
use Doctrine\ORM\Mapping as ORM;
use Doctrine\Common\Collections\ArrayCollection;
use Chitanka\LibBundle\Util\String;
use Chitanka\LibBundle\Legacy\Legacy;
use Chitanka\LibBundle\Legacy\Setup;
use Chitanka\LibBundle\Util\Ary;
use Sfblib_SfbConverter as SfbConverter;
use Sfblib_SfbToHtmlConverter as SfbToHtmlConverter;
use Sfblib_SfbToFb2Converter as SfbToFb2Converter;
use Symfony\Component\Validator\Constraints as Assert;
/**
* @ORM\Entity(repositoryClass="Chitanka\LibBundle\Entity\BookRepository")
* @ORM\HasLifecycleCallbacks
* @ORM\Table(name="book",
* indexes={
* @ORM\Index(name="title_idx", columns={"title"}),
* @ORM\Index(name="title_author_idx", columns={"title_author"}),
* @ORM\Index(name="subtitle_idx", columns={"subtitle"}),
* @ORM\Index(name="orig_title_idx", columns={"orig_title"})}
* )
*/
class Book extends BaseWork
{
/**
* @ORM\Column(type="integer")
* @ORM\Id
* @ORM\GeneratedValue(strategy="CUSTOM")
* @ORM\CustomIdGenerator(class="Chitanka\LibBundle\Doctrine\CustomIdGenerator")
*/
protected $id;
/**
* @var string $slug
* @ORM\Column(type="string", length=50)
*/
private $slug;
/**
* @var string $title_author
* @ORM\Column(type="string", length=255, nullable=true)
*/
private $title_author;
/**
* @var string $title
* @ORM\Column(type="string", length=255)
*/
private $title = '';
/**
* @var string $subtitle
* @ORM\Column(type="string", length=255, nullable=true)
*/
private $subtitle;
/**
* @var string
* @ORM\Column(type="string", length=1000, nullable=true)
*/
private $title_extra;
/**
* @var string $orig_title
* @ORM\Column(type="string", length=255, nullable=true)
*/
private $orig_title;
/**
* @var string $lang
* @ORM\Column(type="string", length=2)
*/
private $lang;
/**
* @var string $orig_lang
* @ORM\Column(type="string", length=3, nullable=true)
*/
private $orig_lang;
/**
* @var integer $year
* @ORM\Column(type="smallint", nullable=true)
*/
private $year;
/**
* @var integer $trans_year
* @ORM\Column(type="smallint", nullable=true)
*/
private $trans_year;
/**
* @var string $type
* @ORM\Column(type="string", length=10)
*/
private $type;
static private $typeList = array(
'single' => 'Обикновена книга',
'collection' => 'Сборник',
'poetry' => 'Стихосбирка',
'anthology' => 'Антология',
'magazine' => 'Списание',
);
/**
* @var integer
* @ORM\ManyToOne(targetEntity="Sequence", inversedBy="books")
*/
private $sequence;
/**
* @var integer
* @ORM\Column(type="smallint", nullable=true)
*/
private $seqnr;
/**
* @var integer
* @ORM\ManyToOne(targetEntity="Category", inversedBy="books")
*/
private $category;
/**
* @var boolean
* @ORM\Column(type="boolean")
*/
private $has_anno;
/**
* @var boolean
* @ORM\Column(type="boolean")
*/
private $has_cover;
/**
* List of formats in which the book is available
* @ORM\Column(type="array")
*/
private $formats = array();
/**
* @ORM\OneToMany(targetEntity="BookRevision", mappedBy="book", cascade={"persist"})
*/
private $revisions;
/**
* A notice if the content is removed
* @ORM\Column(type="text", nullable=true)
*/
private $removed_notice;
/*
* @ORM\ManyToMany(targetEntity="Person", inversedBy="books")
* @ORM\JoinTable(name="book_author")
*/
private $authors;
/**
* @var ArrayCollection
* @ORM\OneToMany(targetEntity="BookAuthor", mappedBy="book", cascade={"persist", "remove"}, orphanRemoval=true)
* @ORM\OrderBy({"pos" = "ASC"})
*/
private $bookAuthors;
/**
* @var ArrayCollection
* @ORM\OneToMany(targetEntity="BookText", mappedBy="book", cascade={"persist", "remove"}, orphanRemoval=true)
*/
private $bookTexts;
/** FIXME doctrine:schema:create does not allow this relation
* @ORM\ManyToMany(targetEntity="Text", inversedBy="books")
* @ORM\JoinTable(name="book_text",
* joinColumns={@ORM\JoinColumn(name="book_id", referencedColumnName="id")},
* inverseJoinColumns={@ORM\JoinColumn(name="text_id", referencedColumnName="id")})
*/
private $texts;
/**
* @var ArrayCollection
* @ORM\OneToMany(targetEntity="BookLink", mappedBy="book", cascade={"persist", "remove"}, orphanRemoval=true)
*/
private $links;
/**
* @var date
* @ORM\Column(type="date")
*/
private $created_at;
private $fb2CoverWidth = 400;
public function __construct()
{
$this->bookAuthors = new ArrayCollection;
$this->bookTexts = new ArrayCollection;
$this->links = new ArrayCollection;
}
public function __toString()
{
return $this->title;
}
public function getId() { return $this->id; }
public function setSlug($slug) { $this->slug = String::slugify($slug); }
public function getSlug() { return $this->slug; }
public function setTitleAuthor($titleAuthor) { $this->title_author = $titleAuthor; }
public function getTitleAuthor() { return $this->title_author; }
public function setTitle($title) { $this->title = $title; }
public function getTitle() { return $this->title; }
public function setSubtitle($subtitle) { $this->subtitle = $subtitle; }
public function getSubtitle() { return $this->subtitle; }
public function setTitleExtra($title) { $this->title_extra = $title; }
public function getTitleExtra() { return $this->title_extra; }
public function setOrigTitle($origTitle) { $this->orig_title = $origTitle; }
public function getOrigTitle() { return $this->orig_title; }
public function setLang($lang) { $this->lang = $lang; }
public function getLang() { return $this->lang; }
public function setOrigLang($origLang) { $this->orig_lang = $origLang; }
public function getOrigLang() { return $this->orig_lang; }
public function setYear($year) { $this->year = $year; }
public function getYear() { return $this->year; }
public function setTransYear($transYear) { $this->trans_year = $transYear; }
public function getTransYear() { return $this->trans_year; }
public function setType($type) { $this->type = $type; }
public function getType() { return $this->type; }
public function setFormats($formats) { $this->formats = $formats; }
public function getFormats() { return $this->formats; }
public function isInSfbFormat()
{
return in_array('sfb', $this->formats);
}
public function getRevisions() { return $this->revisions; }
public function addRevision(BookRevision $revision)
{
$this->revisions[] = $revision;
}
public function setRemovedNotice($removed_notice) { $this->removed_notice = $removed_notice; }
public function getRemovedNotice() { return $this->removed_notice; }
public function getAuthors()
{
if (!isset($this->authors)) {
$this->authors = array();
foreach ($this->getBookAuthors() as $author) {
if ($author->getPos() >= 0) {
$this->authors[] = $author->getPerson();
}
}
}
return $this->authors;
}
public function getAuthorsPlain($separator = ', ')
{
$authors = array();
foreach ($this->getAuthors() as $author) {
$authors[] = $author->getName();
}
return implode($separator, $authors);
}
public function addAuthor($author)
{
$this->authors[] = $author;
}
public function addBookAuthor(BookAuthor $bookAuthor)
{
$this->bookAuthors[] = $bookAuthor;
}
public function removeBookAuthor(BookAuthor $bookAuthor)
{
$this->bookAuthors->removeElement($bookAuthor);
}
// TODO needed by admin; why?
public function addBookAuthors(BookAuthor $bookAuthor) { $this->addBookAuthor($bookAuthor); }
public function setBookAuthors($bookAuthors) { $this->bookAuthors = $bookAuthors; }
public function getBookAuthors() { return $this->bookAuthors; }
public function getBookTexts() { return $this->bookTexts; }
public function setBookTexts($bookTexts) { $this->bookTexts = $bookTexts; }
public function addBookText(BookText $bookText)
{
$this->bookTexts[] = $bookText;
}
public function getTexts() { return $this->texts; }
public function setTexts($texts)
{
$bookTexts = $this->getBookTexts();
foreach ($texts as $key => $text) {
$bookText = $bookTexts->get($key);
if ($bookText === null) {
$bookText = new BookText;
$bookText->setBook($this);
$bookText->setShareInfo(true);
$this->addBookText($bookText);
}
$bookText->setText($text);
$bookText->setPos($key);
}
$keysToRemove = array_diff($bookTexts->getKeys(), array_keys($texts));
foreach ($keysToRemove as $key) {
$bookTexts->remove($key);
}
$this->textsNeedUpdate = false;
}
private $textsNeedUpdate = false;
public function textsNeedUpdate()
{
return $this->textsNeedUpdate;
}
public function setLinks($links) { $this->links = $links; }
public function getLinks() { return $this->links; }
public function addLink(BookLink $link)
{
$this->links[] = $link;
}
public function removeLink(BookLink $link)
{
$this->links->removeElement($link);
}
// TODO needed by admin; why?
public function addLinks(BookLink $link) { $this->addLink($link); }
public function setHasAnno($has_anno) { $this->has_anno = $has_anno; }
public function getHasAnno() { return $this->has_anno; }
public function hasAnno() { return $this->has_anno; }
public function setHasCover($has_cover) { $this->has_cover = $has_cover; }
public function getHasCover() { return $this->has_cover; }
public function hasCover() { return $this->has_cover; }
public function setSequence($sequence) { $this->sequence = $sequence; }
public function getSequence() { return $this->sequence; }
public function getSequenceSlug()
{
return $this->sequence ? $this->sequence->getSlug() : null;
}
public function setSeqnr($seqnr) { $this->seqnr = $seqnr; }
public function getSeqnr() { return $this->seqnr; }
public function setCategory($category) { $this->category = $category; }
public function getCategory() { return $this->category; }
public function getCategorySlug()
{
return $this->category ? $this->category->getSlug() : null;
}
public function setCreatedAt($created_at) { $this->created_at = $created_at; }
public function getCreatedAt() { return $this->created_at; }
public function getSfbg()
{
return $this->getLink('SFBG');
}
public function getPuk()
{
return $this->getLink('ПУК!');
}
public function getLink($name)
{
$links = $this->getLinks();
foreach ($links as $link) {
if ($link->getSiteName() == $name) {
return $link;
}
}
return null;
}
private $textIds = array();
private $textsById = array();
static protected $annotationDir = 'book-anno';
static protected $infoDir = 'book-info';
protected $covers = array();
public function getDocId()
{
return 'http://chitanka.info/book/' . $this->id;
}
//public function getType() { return 'book'; }
public function getAuthor()
{
return $this->title_author;
}
public function getMainAuthors()
{
if ( ! isset($this->mainAuthors) ) {
$this->mainAuthors = array();
foreach ($this->getTextsById() as $text) {
if ( self::isMainWorkType($text->getType()) ) {
foreach ($text->getAuthors() as $author) {
$this->mainAuthors[$author->getId()] = $author;
}
}
}
}
return $this->mainAuthors;
}
public function getAuthorSlugs()
{
$slugs = array();
foreach ($this->getAuthors() as $author/*@var $author Person*/) {
$slugs[] = $author->getSlug();
}
return $slugs;
}
static public function isMainWorkType($type)
{
return ! in_array($type, array('intro', 'outro'/*, 'interview', 'article'*/));
}
public function getAuthorsBy($type)
{
if ( ! isset($this->authorsBy[$type]) ) {
$this->authorsBy[$type] = array();
foreach ($this->getTextsById() as $text) {
if ($text->getType() == $type) {
foreach ($text->getAuthors() as $author) {
$this->authorsBy[$type][$author['id']] = $author;
}
}
}
}
return $this->authorsBy[$type];
}
public function getTranslators()
{
if ( ! isset($this->translators) ) {
$this->translators = array();
$seen = array();
foreach ($this->getTexts() as $text) {
foreach ($text->getTranslators() as $translator) {
if ( ! in_array($translator->getId(), $seen) ) {
$this->translators[] = $translator;
$seen[] = $translator->getId();
}
}
}
}
return $this->translators;
}
public function getLangOld()
{
if ( ! isset($this->lang) ) {
$langs = array();
foreach ($this->getTextsById() as $text) {
if ( ! isset($langs[$text->lang]) ) {
$langs[$text->lang] = 0;
}
$langs[$text->lang]++;
}
arsort($langs);
list($this->lang,) = each($langs);
}
return $this->lang;
}
public function getOrigLangOld()
{
if ( ! isset($this->orig_lang) ) {
$langs = array();
foreach ($this->getTextsById() as $text) {
if ( ! isset($langs[$text->orig_lang]) ) {
$langs[$text->orig_lang] = 0;
}
$langs[$text->orig_lang]++;
}
arsort($langs);
list($this->orig_lang,) = each($langs);
}
return $this->orig_lang;
}
public function getYearOld()
{
if ( ! isset($this->year) ) {
$texts = $this->getTextsById();
$text = array_shift($texts);
$this->year = $text->year;
}
return $this->year;
}
public function getTransYearOld()
{
if ( ! isset($this->trans_year) ) {
$texts = $this->getTextsById();
$text = array_shift($texts);
$this->trans_year = $text->trans_year;
}
return $this->trans_year;
}
static public function newFromId($id)
{
$db = Setup::db();
$res = $db->select(DBT_BOOK, array('id' => $id));
$data = $db->fetchAssoc($res);
$book = new Book;
foreach ($data as $field => $value) {
$book->$field = $value;
}
return $book;
}
static public function newFromArray($fields)
{
$book = new Book;
foreach ($fields as $field => $value) {
$book->$field = $value;
}
return $book;
}
public function withAutohide()
{
return $this->getTemplate()->hasAutohide();
}
public function getTemplateAsXhtml()
{
return $this->getTemplate()->getAsXhtml();
}
private $template;
/** @return Content\BookTemplate */
public function getTemplate()
{
return $this->template ?: $this->template = new Content\BookTemplate($this);
}
public function getRawTemplate()
{
return $this->getTemplate()->getContent();
}
public function setRawTemplate($template)
{
$this->getTemplate()->setContent($template);
$this->textsNeedUpdate = true;
}
public function getTextIdsFromTemplate()
{
return $this->getTemplate()->getTextIds();
}
public function getCover($width = null)
{
$this->initCovers();
return is_null($width) ? $this->covers['front'] : Legacy::genThumbnail($this->covers['front'], $width);
}
public function getBackCover($width = null)
{
$this->initCovers();
return is_null($width) ? $this->covers['back'] : Legacy::genThumbnail($this->covers['back'], $width);
}
static protected $exts = array('.jpg');
public function initCovers()
{
if (empty($this->covers)) {
$this->covers['front'] = $this->covers['back'] = null;
$covers = self::getCovers($this->id);
if ( ! empty($covers)) {
$this->covers['front'] = $covers[0];
} else {
// there should not be any covers by texts
/*foreach ($this->getTextIds() as $textId) {
$covers = self::getCovers($textId);
if ( ! empty($covers) ) {
$this->covers['front'] = $covers[0];
break;
}
}*/
}
if ($this->covers['front']) {
$back = preg_replace('/(.+)\.(\w+)$/', '$1-back.$2', $this->covers['front']);
if (file_exists($back)) {
$this->covers['back'] = $back;
}
}
}
}
/**
* @param $id Text or book ID
* @param $defCover Default covers if there aren’t any for $id
*/
static public function getCovers($id, $defCover = null)
{
$key = 'book-cover-content';
$bases = array(Legacy::getContentFilePath($key, $id));
if ( ! empty($defCover)) {
$bases[] = Legacy::getContentFilePath($key, $defCover);
}
$coverFiles = Ary::cartesianProduct($bases, self::$exts);
$covers = array();
foreach ($coverFiles as $file) {
if (file_exists($file)) {
$covers[] = $file;
// search for more images of the form “ID-DIGIT.EXT”
for ($i = 2; /* infinity */; $i++) {
$efile = strtr($file, array('.' => "-$i."));
if (file_exists($efile)) {
$covers[] = $efile;
} else {
break;
}
}
break; // don’t check other extensions
}
}
return $covers;
}
static public function renameCover($cover, $newname) {
$rexts = strtr(implode('|', self::$exts), array('.'=>'\.'));
return preg_replace("/\d+(-\d+)?($rexts)/", "$newname$1$2", $cover);
}
public function getImages()
{
return array_merge($this->getLocalImages(), $this->getTextImages());
}
public function getThumbImages()
{
return $this->getTextThumbImages();
}
public function getLocalImages()
{
$images = array();
$dir = Legacy::getInternalContentFilePath('book-img', $this->id);
foreach (glob("$dir/*") as $img) {
$images[] = $img;
}
return $images;
}
public function getTextImages()
{
$images = array();
foreach ($this->getTexts() as $text) {
$images = array_merge($images, $text->getImages());
}
return $images;
}
public function getTextThumbImages()
{
$images = array();
foreach ($this->getTexts() as $text) {
$images = array_merge($images, $text->getThumbImages());
}
return $images;
}
public function getLabels()
{
$labels = array();
foreach ($this->getTexts() as $text) {
foreach ($text->getLabels() as $label) {
$labels[] = $label->getName();
}
}
$labels = array_unique($labels);
return $labels;
}
public function getContentAsSfb()
{
if (!$this->isInSfbFormat()) {
return null;
}
return $this->getTitleAsSfb() . SfbConverter::EOL
. $this->getAllAnnotationsAsSfb()
. $this->getMainBodyAsSfb()
. $this->getInfoAsSfb();
}
public function getMainBodyAsSfb()
{
return $this->getTemplate()->generateSfb();
}
public function getMainBodyAsSfbFile()
{
if ( isset($this->_mainBodyAsSfbFile) ) {
return $this->_mainBodyAsSfbFile;
}
$this->_mainBodyAsSfbFile = tempnam(sys_get_temp_dir(), 'book-');
file_put_contents($this->_mainBodyAsSfbFile, $this->getMainBodyAsSfb());
return $this->_mainBodyAsSfbFile;
}
/**
* Return the author of a text if he/she is not on the book title
*/
public function getBookAuthorIfNotInTitle($text)
{
$bookAuthorsIds = $this->getAuthorIds();
$authors = array();
foreach ($text->getAuthors() as $author) {
if ( ! in_array($author->getId(), $bookAuthorsIds)) {
$authors[] = $author;
}
}
return $authors;
}
public function getTitleAsSfb()
{
$sfb = '';
$prefix = SfbConverter::HEADER . SfbConverter::CMD_DELIM;
if ('' != $authors = $this->getAuthorsPlain()) {
$sfb .= $prefix . $authors . SfbConverter::EOL;
}
$sfb .= $prefix . $this->title . SfbConverter::EOL;
if ( ! empty($this->subtitle) ) {
$sfb .= $prefix . $this->subtitle . SfbConverter::EOL;
}
return $sfb;
}
public function getAllAnnotationsAsSfb()
{
if ( ($text = $this->getAnnotationAsSfb()) ) {
return $text;
}
return $this->getTextAnnotations();
}
public function getAnnotationAsXhtml($imgDir = null)
{
if ($imgDir === null) {
$imgDir = 'IMG_DIR_PREFIX' . Legacy::getContentFilePath('book-img', $this->id);
}
return parent::getAnnotationAsXhtml($imgDir);
}
/* TODO remove: there should not be any annotations by texts */
public function getTextAnnotations()
{
return '';
$annotations = array();
foreach ($this->getTextsById() as $text) {
$annotation = $text->getAnnotation();
if ($annotation != '') {
$annotations[$text->title] = $annotation;
}
}
if (empty($annotations)) {
return '';
}
$bannotation = '';
$putTitles = count($annotations) > 1;
foreach ($annotations as $title => $annotation) {
if ($putTitles) {
$bannotation .= SfbConverter::EOL . SfbConverter::EOL
. SfbConverter::SUBHEADER . SfbConverter::CMD_DELIM . $title
. SfbConverter::EOL;
}
$bannotation .= $annotation;
}
return SfbConverter::ANNO_S . SfbConverter::EOL
. rtrim($bannotation) . SfbConverter::EOL
. SfbConverter::ANNO_E . SfbConverter::EOL;
}
public function getInfoAsSfb()
{
return SfbConverter::INFO_S . SfbConverter::EOL
. SfbConverter::CMD_DELIM . $this->getOriginMarker() . SfbConverter::EOL
. rtrim($this->getExtraInfo()) . SfbConverter::EOL
. SfbConverter::INFO_E . SfbConverter::EOL;
}
public function getOriginMarker()
{
return sprintf('Свалено от „Моята библиотека“: %s', $this->getDocId());
}
public function getContentAsFb2()
{
if (!$this->isInSfbFormat()) {
return null;
}
$imgdir = $this->initTmpImagesDir();
$conv = new SfbToFb2Converter($this->getContentAsSfb(), $imgdir);
$conv->setObjectCount(1);
$conv->setSubtitle($this->subtitle);
$conv->setGenre($this->getGenresForFb2());
$conv->setKeywords( implode(', ', $this->getLabels()) );
$conv->setTextDate($this->getYear());
if ( ($cover = $this->getCover($this->fb2CoverWidth)) ) {
$conv->addCoverpage($cover);
}
$conv->setLang($this->getLang());
$orig_lang = $this->getOrigLang();
$conv->setSrcLang(empty($orig_lang) ? '?' : $orig_lang);
foreach ($this->getTranslators() as $translator) {
$conv->addTranslator($translator->getName());
}
$conv->setDocId($this->getDocId());
$conv->setDocAuthor('<NAME>');
$conv->enablePrettyOutput();
$content = $conv->convert()->getContent();
return $content;
}
private function getGenresForFb2()
{
$genres = array();
foreach ($this->getTexts() as $text) {
$genres = array_merge($genres, $text->getGenresForFb2());
}
$genres = array_unique($genres);
return $genres;
}
public function getHeaders()
{
if ( isset($this->_headers) ) {
return $this->_headers;
}
require_once __DIR__ . '/../Legacy/SfbParserSimple.php';
$this->_headers = array();
foreach (\Chitanka\LibBundle\Legacy\makeDbRows($this->getMainBodyAsSfbFile(), 4) as $row) {
$header = new TextHeader;
$header->setNr($row[0]);
$header->setLevel($row[1]);
$header->setName($row[2]);
$header->setFpos($row[3]);
$header->setLinecnt($row[4]);
$this->_headers[] = $header;
}
return $this->_headers;
}
public function getEpubChunks($imgDir)
{
return $this->getEpubChunksFrom($this->getMainBodyAsSfbFile(), $imgDir);
}
public function initTmpImagesDir()
{
$dir = sys_get_temp_dir() . '/' . uniqid();
mkdir($dir);
foreach ($this->getImages() as $image) {
copy($image, $dir.'/'.basename($image));
}
return $dir;
}
public function getNameForFile()
{
return trim("$this->title_author - $this->title - $this->subtitle-$this->id-b", '- ');
}
public function getTextIds()
{
if ( empty($this->textIds) ) {
preg_match_all('/\{(text|file):(\d+)/', $this->getTemplate()->getContent(), $matches);
$this->textIds = $matches[2];
}
return $this->textIds;
}
public function getTextById($textId)
{
$texts = $this->getTextsById();
return isset($texts[$textId]) ? $texts[$textId] : null;
}
public function getTextsById()
{
if ( empty($this->textsById) ) {
foreach ($this->getTextIds() as $id) {
$this->textsById[$id] = null;
}
foreach ($this->getTexts() as $text) {
$this->textsById[$text->getId()] = $text;
}
foreach ($this->textsById as $id => $text) {
if (is_null($text)) {
$text = new Text($id);
$this->textsById[$id] = $text;
}
}
}
return $this->textsById;
}
public function isGamebook()
{
return false;
}
public function isFromSameAuthor($text)
{
return $this->getAuthorIds() == $text->getAuthorIds();
}
/** TODO set for a books with only one novel */
public function getPlainSeriesInfo()
{
return '';
}
public function getPlainTranslationInfo()
{
$info = array();
foreach ($this->getTranslators() as $translator) {
$info[] = $translator->getName();
}
return sprintf('Превод: %s', implode(', ', $info));
}
public function getDataAsPlain()
{
$authors = implode($this->getAuthorSlugs());
return <<<EOS
title = {$this->getTitle()}
subtitle = {$this->getSubtitle()}
title_extra = {$this->getTitleExtra()}
authors = $authors
slug = {$this->getSlug()}
lang = {$this->getLang()}
orig_title = {$this->getOrigTitle()}
orig_lang = {$this->getOrigLang()}
year = {$this->getYear()}
sequence = {$this->getSequenceSlug()}
seq_nr = {$this->getSeqnr()}
category = {$this->getCategorySlug()}
type = {$this->getType()}
id = {$this->getId()}
EOS;
}
public function getDatafiles()
{
$files = array();
$files['book'] = Legacy::getContentFilePath('book', $this->id);
if ($this->hasCover()) {
$files['book-cover'] = Legacy::getContentFilePath('book-cover', $this->id) . '.max.jpg';
}
if ($this->hasAnno()) {
$files['book-anno'] = Legacy::getContentFilePath('book-anno', $this->id);
}
$files['book-info'] = Legacy::getContentFilePath('book-info', $this->id);
return $files;
}
public function setDatafiles($f) {} // dummy for sonata admin
public function getStaticFile($format)
{
if (!in_array($format, array('djvu', 'pdf'))) {
throw new \Exception("Format $format is not a valid static format for a book.");
}
return Legacy::getContentFilePath('book-'.$format, $this->id);
}
##################
# legacy pic stuff
##################
const
MIRRORS_FILE = 'MIRRORS',
INFO_FILE = 'INFO',
THUMB_DIR = 'thumb',
THUMBS_FILE_TPL = 'thumbs-%d.jpg',
MAX_JOINED_THUMBS = 50;
public function getSeriesName($pic = null) {
if ( is_null($pic) ) {
$pic = $this;
}
if ( empty($pic->series) ) {
return '';
}
$name = $pic->seriesName;
$picType = picType($pic->seriesType);
if ( ! empty($picType) ) {
$name = "$picType „{$name}“";
}
return $name;
}
public function getIssueName($pic = null) {
if ( is_null($pic) ) {
$pic = $this;
}
return $pic->__toString();
}
public function getFiles()
{
if ( isset($this->_files) ) {
return $this->_files;
}
$dir = Legacy::getContentFilePath('book-pic', $this->id);
$ignore = array(self::MIRRORS_FILE, self::THUMB_DIR);
$files = array();
foreach (scandir($dir) as $file) {
if ( $file[0] == '.' || in_array($file, $ignore) ) {
continue;
}
$files[] = $file;
}
sort($files);
return $this->_files = $files;
}
// deprecated
private function getMirrors()
{
return array();
}
public function getDocRoot($cache = true)
{
if ( isset($this->_docRoot) && $cache ) {
return $this->_docRoot;
}
$mirrors = $this->getMirrors();
if ( empty($mirrors) ) {
$this->_docRoot = '';
} else {
shuffle($mirrors);
$this->_docRoot = rtrim($mirrors[0], '/') . '/';
}
return $this->_docRoot;
}
public function getImageDir()
{
if ( ! isset($this->_imageDir) ) {
$this->_imageDir = Legacy::getContentFilePath('book-pic', $this->id);
}
return $this->_imageDir;
}
public function getThumbDir()
{
if ( ! isset($this->_thumbDir) ) {
$this->_thumbDir = $this->getImageDir() .'/'. self::THUMB_DIR;
}
return $this->_thumbDir;
}
public function getWebImageDir()
{
if ( ! isset($this->_webImageDir) ) {
$this->_webImageDir = $this->getDocRoot() . $this->getImageDir();
}
return $this->_webImageDir;
}
public function getWebThumbDir()
{
if ( ! isset($this->_webThumbDir) ) {
$this->_webThumbDir = $this->getDocRoot() . $this->getThumbDir();
}
return $this->_webThumbDir;
}
public function getThumbFile($currentPage)
{
$currentJoinedFile = floor($currentPage / self::MAX_JOINED_THUMBS);
return sprintf(self::THUMBS_FILE_TPL, $currentJoinedFile);
}
public function getThumbClass($currentPage)
{
return 'th' . ($currentPage % self::MAX_JOINED_THUMBS);
}
public function getSiblings()
{
if ( isset($this->_siblings) ) {
return $this->_siblings;
}
$qa = array(
'SELECT' => 'p.*, s.name seriesName, s.type seriesType',
'FROM' => DBT_PIC .' p',
'LEFT JOIN' => array(
DBT_PIC_SERIES .' s' => 'p.series = s.id'
),
'WHERE' => array(
'series' => $this->series,
'p.series' => array('>', 0),
),
'ORDER BY' => 'sernr ASC'
);
$db = Setup::db();
$res = $db->extselect($qa);
$siblings = array();
while ( $row = $db->fetchAssoc($res) ) {
$siblings[ $row['id'] ] = new PicWork($row);
}
return $this->_siblings = $siblings;
}
public function getNextSibling() {
if ( empty($this->series) ) {
return false;
}
$dbkey = array('series' => $this->series);
if ($this->sernr == 0) {
$dbkey['p.id'] = array('>', $this->id);
} else {
$dbkey[] = 'sernr = '. ($this->sernr + 1)
. " OR (sernr > $this->sernr AND p.id > $this->id)";
}
return self::newFromDB($dbkey);
}
public function sameAs($otherPic)
{
return $this->id == $otherPic->id;
}
static public function getTypeList()
{
return self::$typeList;
}
private $revisionComment;
public function getRevisionComment()
{
return $this->revisionComment;
}
public function setRevisionComment($comment)
{
$this->revisionComment = $comment;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Command;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Input\InputArgument;
use Symfony\Component\Console\Output\OutputInterface;
class GenerateNewsletterCommand extends CommonDbCommand
{
protected function configure()
{
parent::configure();
$this
->setName('lib:generate-newsletter')
->setDescription('Generate newsletter')
->addArgument('month', InputArgument::REQUIRED, 'Month (3 or 2011-3)')
->setHelp(<<<EOT
The <info>lib:generate-newsletter</info> generates the newsletter for a given month.
EOT
);
}
/**
* Executes the current command.
*
* @param InputInterface $input An InputInterface instance
* @param OutputInterface $output An OutputInterface instance
*
* @return integer 0 if everything went fine, or an error code
*
* @throws \LogicException When this abstract class is not implemented
*/
protected function execute(InputInterface $input, OutputInterface $output)
{
$this->em = $this->getContainer()->get('doctrine.orm.default_entity_manager');
$this->em->getConfiguration()->addCustomHydrationMode('id', 'Chitanka\LibBundle\Hydration\IdHydrator');
$this->em->getConfiguration()->addCustomHydrationMode('key_value', 'Chitanka\LibBundle\Hydration\KeyValueHydrator');
$this->input = $input;
$this->output = $output;
$this->generateNewsletter($input->getArgument('month'));
}
/**
*/
protected function generateNewsletter($month)
{
$this->output->writeln("\n= Книги =\n");
$booksByCat = $this->_getBooks($month);
ksort($booksByCat);
foreach ($booksByCat as $cat => $bookRows) {
$this->output->writeln("\n== $cat ==\n");
ksort($bookRows);
foreach ($bookRows as $bookRow) {
$this->output->writeln($bookRow);
}
}
$this->output->writeln("\n\n= Произведения, невключени в книги =\n");
$textRows = $this->_getTexts($month);
ksort($textRows);
foreach ($textRows as $textRow) {
$this->output->writeln($textRow);
}
}
private function _getBooks($month)
{
$repo = $this->em->getRepository('LibBundle:BookRevision');
$booksByCat = array();
#foreach ($repo->getByDate(array('2011-07-01', '2011-08-31 23:59'), 1, null, false) as $revision) {
foreach ($repo->getByMonth($month) as $revision) {
$authors = array();
foreach ($revision['book']['authors'] as $author) {
$authors[] = $author['name'];
}
$bookKey = $revision['book']['title'] . $revision['book']['subtitle'];
$cat = $revision['book']['category']['name'];
$booksByCat[$cat][$bookKey] = sprintf('* „%s“%s%s — http://chitanka.info/book/%d',
$revision['book']['title'],
($revision['book']['subtitle'] ? " ({$revision['book']['subtitle']})" : ''),
($authors ? ' от ' . implode(', ', $authors) : ''),
$revision['book']['id']);
}
return $booksByCat;
}
// TODO fetch only texts w/o books
private function _getTexts($month)
{
$repo = $this->em->getRepository('LibBundle:TextRevision');
$texts = array();
#foreach ($repo->getByDate(array('2011-07-01', '2011-08-31 23:59'), 1, null, false) as $revision) {
foreach ($repo->getByMonth($month) as $revision) {
$authors = array();
foreach ($revision['text']['authors'] as $author) {
$authors[] = $author['name'];
}
$key = $revision['text']['title'] . $revision['text']['subtitle'];
$texts[$key] = sprintf('* „%s“%s%s — http://chitanka.info/text/%d',
$revision['text']['title'],
($revision['text']['subtitle'] ? " ({$revision['text']['subtitle']})" : ''),
($authors ? ' от ' . implode(', ', $authors) : ''),
$revision['text']['id']);
}
return $texts;
}
}
<file_sep><?php
function exitWithMessage($template = 'error', $retryAfter = 300) {
header('HTTP/1.0 503 Service Temporarily Unavailable');
header('Status: 503 Service Temporarily Unavailable');
header("Retry-After: $retryAfter");
readfile(__DIR__ . "/$template.html");
exit;
}
function isCacheable() {
return $_SERVER['REQUEST_METHOD'] == 'GET' && !array_key_exists('mlt', $_COOKIE);
}
class Cache {
private $file;
private $request;
private $debug = false;
public function __construct($requestUri, $cacheDir) {
$hash = md5($requestUri);
$this->file = new CacheFile("$cacheDir/$hash[0]/$hash[1]/$hash[2]/$hash");
$this->request = $requestUri;
}
public function get() {
if ( ! $this->file->exists()) {
return null;
}
$ttl = $this->file->getRemainingTtl();
if ($ttl <= 0) {
$this->purge();
return null;
}
$this->log("=== CACHE HIT");
return array(
'data' => $this->file->read(),
'ttl' => $ttl,
);
}
public function set($content, $ttl) {
if ( ! $ttl) {
return;
}
$this->file->write($content);
$this->file->setTtl($ttl);
$this->log("+++ CACHE MISS ($ttl)");
}
private function purge() {
$this->file->delete();
$this->log('--- CACHE PURGE');
}
private function log($msg) {
if ($this->debug) {
error_log("$msg - $this->request");
}
}
}
class CacheFile {
private $name;
public function __construct($name) {
$this->name = $name;
}
public function exists() {
return file_exists($this->name);
}
public function write($content) {
$dir = dirname($this->name);
if ( ! file_exists($dir)) {
mkdir($dir, 0777, true);
}
file_put_contents($this->name, gzdeflate(ltrim($content)));
}
public function read() {
$content = file_get_contents($this->name);
if (empty($content)) {
return $content;
}
return gzinflate($content);
}
public function delete() {
unlink($this->name);
unlink("$this->name.ttl");
}
public function setTtl($value) {
file_put_contents("$this->name.ttl", $value);
}
public function getTtl() {
return file_get_contents("$this->name.ttl");
}
public function getRemainingTtl() {
$origTtl = $this->getTtl() + rand(0, 30) /* guard for race conditions */;
return $origTtl - $this->getAge();
}
public function getAge() {
return time() - filemtime($this->name);
}
}
$isCacheable = isCacheable();
if ($isCacheable) {
$requestUri = $_SERVER['REQUEST_URI'];
if (isset($_SERVER['HTTP_X_REQUESTED_WITH']) && $_SERVER['HTTP_X_REQUESTED_WITH'] == 'XMLHttpRequest') {
$requestUri .= '.ajax';
}
$cache = new Cache($requestUri, __DIR__.'/../app/cache/simple_http_cache');
if (null !== ($cachedContent = $cache->get())) {
header("Cache-Control: public, max-age=".$cachedContent['ttl']);
echo $cachedContent['data'];
return;
}
}
// uncomment to enter maintenance mode
// DO NOT remove next line - it is used by the auto-update command
//exitWithMessage('maintenance');
use Symfony\Component\ClassLoader\ApcClassLoader;
use Symfony\Component\HttpFoundation\Request;
// allow generated files (cache, logs) to be world-writable
umask(0000);
$rootDir = __DIR__.'/..';
$loader = require $rootDir.'/app/bootstrap.php.cache';
try {
// Use APC for autoloading to improve performance
$apcLoader = new ApcClassLoader('chitanka', $loader);
$loader->unregister();
$apcLoader->register(true);
} catch (\RuntimeException $e) {
// APC not enabled
}
require $rootDir.'/app/AppKernel.php';
//require $rootDir.'/app/AppCache.php';
register_shutdown_function(function(){
$error = error_get_last();
if ($error['type'] == E_ERROR) {
if (preg_match('/parameters\.yml.+does not exist/', $error['message'])) {
header('Location: install.php');
exit;
}
ob_clean();
exitWithMessage('error');
}
});
$kernel = new AppKernel('prod', false);
$kernel->loadClassCache();
//$kernel = new AppCache($kernel);
// When using the HttpCache, we need to call the method explicitly instead of relying on the configuration parameter
//Request::enableHttpMethodParameterOverride();
$request = Request::createFromGlobals();
$response = $kernel->handle($request);
if ($isCacheable && $response->isOk()) {
try {
$cache->set($response->getContent(), $response->getTtl());
} catch (\RuntimeException $e) {
}
}
$response->send();
$kernel->terminate($request, $response);
<file_sep><?php
namespace Chitanka\LibBundle\Controller;
use Symfony\Component\HttpFoundation\Request;
use Symfony\Component\HttpFoundation\Response;
use Chitanka\LibBundle\Pagination\Pager;
use Chitanka\LibBundle\Entity\SearchString;
use Chitanka\LibBundle\Util\String;
class SearchController extends Controller
{
protected $responseAge = 86400; // 24 hours
private $minQueryLength = 3;
private $maxQueryLength = 60;
public function indexAction($_format)
{
if ($_format == 'osd') {
return $this->display("index.$_format");
}
if (($query = $this->getQuery($_format)) instanceof Response) {
return $query;
}
$lists = array(
'persons' => $this->getPersonRepository()->getByNames($query['text'], 15),
'texts' => $this->getTextRepository()->getByTitles($query['text'], 15),
'books' => $this->getBookRepository()->getByTitles($query['text'], 15),
'series' => $this->getSeriesRepository()->getByNames($query['text'], 15),
'sequences' => $this->getSequenceRepository()->getByNames($query['text'], 15),
//'work_entries' => $this->getWorkEntryRepository()->getByTitleOrAuthor($query['text']),
'labels' => $this->getLabelRepository()->getByNames($query['text']),
'categories' => $this->getCategoryRepository()->getByNames($query['text']),
);
$found = array_sum(array_map('count', $lists)) > 0;
if ($found) {
$this->logSearch($query['text']);
} else {
$this->responseStatusCode = 404;
}
$this->view = array(
'query' => $query,
'found' => $found,
) + $lists;
return $this->display("index.$_format");
}
public function personsAction(Request $request, $_format)
{
if ($_format == 'osd') {
return $this->display("Person:search.$_format");
}
if ($_format == 'suggest') {
$items = $descs = $urls = array();
$query = $request->query->get('q');
$persons = $this->getPersonRepository()->getByQuery(array(
'text' => $query,
'by' => 'name',
'match' => 'prefix',
'limit' => 10,
));
foreach ($persons as $person) {
$items[] = $person['name'];
$descs[] = '';
$urls[] = $this->generateUrl('person_show', array('slug' => $person['slug']), true);
}
return $this->displayJson(array($query, $items, $descs, $urls));
}
if (($query = $this->getQuery($_format)) instanceof Response) {
return $query;
}
if (empty($query['by'])) {
$query['by'] = 'name,orig_name,real_name,orig_real_name';
}
$persons = $this->getPersonRepository()->getByQuery($query);
if ( ! ($found = count($persons) > 0)) {
$this->responseStatusCode = 404;
}
$this->view = array(
'query' => $query,
'persons' => $persons,
'found' => $found,
);
return $this->display("Person:search.$_format");
}
public function authorsAction(Request $request, $_format)
{
if ($_format == 'suggest') {
$items = $descs = $urls = array();
$query = $request->query->get('q');
$persons = $this->getPersonRepository()->asAuthor()->getByQuery(array(
'text' => $query,
'by' => 'name',
'match' => 'prefix',
'limit' => 10,
));
foreach ($persons as $person) {
$items[] = $person['name'];
$descs[] = '';
$urls[] = $this->generateUrl('author_show', array('slug' => $person['slug']), true);
}
return $this->displayJson(array($query, $items, $descs, $urls));
}
return $this->display("Author:search.$_format");
}
public function translatorsAction(Request $request, $_format)
{
if ($_format == 'suggest') {
$items = $descs = $urls = array();
$query = $request->query->get('q');
$persons = $this->getPersonRepository()->asTranslator()->getByQuery(array(
'text' => $query,
'by' => 'name',
'match' => 'prefix',
'limit' => 10,
));
foreach ($persons as $person) {
$items[] = $person['name'];
$descs[] = '';
$urls[] = $this->generateUrl('translator_show', array('slug' => $person['slug']), true);
}
return $this->displayJson(array($query, $items, $descs, $urls));
}
return $this->display("Translator:search.$_format");
}
public function textsAction(Request $request, $_format)
{
if ($_format == 'osd') {
return $this->display("Text:search.$_format");
}
if ($_format == 'suggest') {
$items = $descs = $urls = array();
$query = $request->query->get('q');
$texts = $this->getTextRepository()->getByQuery(array(
'text' => $query,
'by' => 'title',
'match' => 'prefix',
'limit' => 10,
));
foreach ($texts as $text) {
$items[] = $text['title'];
$descs[] = '';
$urls[] = $this->generateUrl('text_show', array('id' => $text['id']), true);
}
return $this->displayJson(array($query, $items, $descs, $urls));
}
if (($query = $this->getQuery($_format)) instanceof Response) {
return $query;
}
if (empty($query['by'])) {
$query['by'] = 'title,subtitle,orig_title';
}
$texts = $this->getTextRepository()->getByQuery($query);
if ( ! ($found = count($texts) > 0)) {
$this->responseStatusCode = 404;
}
$this->view = array(
'query' => $query,
'texts' => $texts,
'found' => $found,
);
return $this->display("Text:search.$_format");
}
public function booksAction(Request $request, $_format)
{
if ($_format == 'osd') {
return $this->display("Book:search.$_format");
}
if ($_format == 'suggest') {
$items = $descs = $urls = array();
$query = $request->query->get('q');
$books = $this->getBookRepository()->getByQuery(array(
'text' => $query,
'by' => 'title',
'match' => 'prefix',
'limit' => 10,
));
foreach ($books as $book) {
$items[] = $book['title'];
$descs[] = '';
$urls[] = $this->generateUrl('book_show', array('id' => $book['id']), true);
}
return $this->displayJson(array($query, $items, $descs, $urls));
}
if (($query = $this->getQuery($_format)) instanceof Response) {
return $query;
}
if (empty($query['by'])) {
$query['by'] = 'title,subtitle,orig_title';
}
$books = $this->getBookRepository()->getByQuery($query);
if ( ! ($found = count($books) > 0)) {
$this->responseStatusCode = 404;
}
$this->view = array(
'query' => $query,
'books' => $books,
'found' => $found,
);
return $this->display("Book:search.$_format");
}
public function seriesAction(Request $request, $_format)
{
if ($_format == 'osd') {
return $this->display("Series:search.$_format");
}
if ($_format == 'suggest') {
$items = $descs = $urls = array();
$query = $request->query->get('q');
$series = $this->getSeriesRepository()->getByQuery(array(
'text' => $query,
'by' => 'name',
'match' => 'prefix',
'limit' => 10,
));
foreach ($series as $serie) {
$items[] = $serie['name'];
$descs[] = '';
$urls[] = $this->generateUrl('series_show', array('slug' => $serie['slug']), true);
}
return $this->displayJson(array($query, $items, $descs, $urls));
}
if (($query = $this->getQuery($_format)) instanceof Response) {
return $query;
}
if (empty($query['by'])) {
$query['by'] = 'name,orig_name';
}
$series = $this->getSeriesRepository()->getByQuery($query);
if ( ! ($found = count($series) > 0)) {
$this->responseStatusCode = 404;
}
$this->view = array(
'query' => $query,
'series' => $series,
'found' => $found,
);
return $this->display("Series:search.$_format");
}
public function sequencesAction(Request $request, $_format)
{
if ($_format == 'osd') {
return $this->display("Sequence:search.$_format");
}
if ($_format == 'suggest') {
$items = $descs = $urls = array();
$query = $request->query->get('q');
$sequences = $this->getSequenceRepository()->getByQuery(array(
'text' => $query,
'by' => 'name',
'match' => 'prefix',
'limit' => 10,
));
foreach ($sequences as $sequence) {
$items[] = $sequence['name'];
$descs[] = '';
$urls[] = $this->generateUrl('sequence_show', array('slug' => $sequence['slug']), true);
}
return $this->displayJson(array($query, $items, $descs, $urls));
}
if (($query = $this->getQuery($_format)) instanceof Response) {
return $query;
}
if (empty($query['by'])) {
$query['by'] = 'name';
}
$sequences = $this->getSequenceRepository()->getByQuery($query);
if ( ! ($found = count($sequences) > 0)) {
$this->responseStatusCode = 404;
}
$this->view = array(
'query' => $query,
'sequences' => $sequences,
'found' => $found,
);
return $this->display("Sequence:search.$_format");
}
private function getQuery($_format = 'html')
{
$request = $this->get('request')->query;
$query = trim($request->get('q'));
if (empty($query)) {
$this->view = array(
'latest_strings' => $this->getSearchStringRepository()->getLatest(30),
'top_strings' => $this->getSearchStringRepository()->getTop(30),
);
return $this->display("list_top_strings.$_format");
}
$query = String::fixEncoding($query);
$matchType = $request->get('match');
if ($matchType != 'exact') {
try {
$this->validateQueryLength($query);
} catch (\Exception $e) {
$this->view['message'] = $e->getMessage();
$this->responseStatusCode = 400;
return $this->display("message.$_format");
}
}
return array(
'text' => $query,
'by' => $request->get('by'),
'match' => $matchType,
);
}
private function validateQueryLength($query) {
$queryLength = mb_strlen($query, 'utf-8');
if ($queryLength < $this->minQueryLength) {
throw new \Exception(sprintf('Трябва да въведете поне %d знака.', $this->minQueryLength));
}
if ($queryLength > $this->maxQueryLength) {
throw new \Exception(sprintf('Не може да въвеждате повече от %d знака.', $this->maxQueryLength));
}
}
private function logSearch($query)
{
$searchString = $this->getSearchStringRepository()->findOneBy(array('name' => $query));
if ( ! $searchString) {
$searchString = new SearchString($query);
}
$searchString->incCount();
$this->getEntityManager()->persist($searchString);
$this->getEntityManager()->flush();
}
public function latestAction($limit = 10)
{
$this->responseAge = 600; // 10 minutes
$this->view = array(
'strings' => $this->getSearchStringRepository()->getLatest($limit),
);
return $this->display('top_strings');
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Entity;
use Doctrine\ORM\EntityRepository as DoctrineEntityRepository;
abstract class EntityRepository extends DoctrineEntityRepository
{
protected $queryableFields = array();
public function persist($object)
{
$em = $this->getEntityManager();
$em->persist($object);
$em->flush();
}
public function flush()
{
$this->getEntityManager()->flush();
}
public function count($where = null) {
return $this->getCount($where);
}
public function getCount($where = null) {
$qb = $this->createQueryBuilder('e')->select('COUNT(e.id)');
if ($where) {
$qb->andWhere($where);
}
return $qb->getQuery()->getSingleScalarResult();
}
protected function setPagination($query, $page, $limit)
{
if ($limit) {
$query->setMaxResults($limit)->setFirstResult(($page - 1) * $limit);
}
return $query;
}
public function getRandom($where = null) {
return $this->getRandomQuery($where)->getSingleResult();
}
public function getRandomId($where = null) {
return $this->getRandomQuery($where, 'e.id')->getSingleScalarResult();
}
protected function getRandomQuery($where = null, $select = null) {
$qb = $this->getEntityManager()->createQueryBuilder()
->select($select ?: 'e')
->from($this->getEntityName(), 'e');
if ($where) {
$qb->andWhere($where);
}
$query = $qb->getQuery()
->setMaxResults(1)
->setFirstResult(rand(1, $this->getCount($where)) - 1);
return $query;
}
public function findByIds(array $ids)
{
if (empty($ids)) {
return array();
}
return $this->getQueryBuilder()
->where(sprintf('e.id IN (%s)', implode(',', $ids)))
->getQuery()->getResult();
}
public function getByIds($ids, $orderBy = null)
{
if (empty($ids)) {
return array();
}
return $this->getQueryBuilder($orderBy)
->where(sprintf('e.id IN (%s)', implode(',', $ids)))
->getQuery()->getArrayResult();
}
public function getByQuery($params)
{
if (empty($params['text']) || empty($params['by'])) {
return array();
}
switch ($params['match']) {
case 'exact':
$op = '=';
$param = $params['text'];
break;
case 'prefix':
$op = 'LIKE';
$param = "$params[text]%";
break;
case 'suffix':
$op = 'LIKE';
$param = "%$params[text]";
break;
default:
$op = 'LIKE';
$param = "%$params[text]%";
break;
}
$tests = array();
foreach (explode(',', $params['by']) as $field) {
if (in_array($field, $this->queryableFields)) {
$tests[] = "e.$field $op ?1";
}
}
if (empty($tests)) {
return array();
}
$query = $this->getQueryBuilder()
->where(implode(' OR ', $tests))->setParameter(1, $param)
->getQuery();
if (isset($params['limit'])) {
$query->setMaxResults($params['limit']);
}
return $query->getArrayResult();
}
public function getQueryableFields()
{
return $this->queryableFields;
}
public function getQueryBuilder($orderBys = null)
{
$qb = $this->createQueryBuilder('e');
if ($orderBys) {
foreach (explode(',', $orderBys) as $orderBy) {
$orderBy = ltrim($orderBy);
if (strpos($orderBy, ' ') === false) {
$field = $orderBy;
$order = 'asc';
} else {
list($field, $order) = explode(' ', ltrim($orderBy));
}
$qb->addOrderBy($field, $order);
}
}
return $qb;
}
protected function stringForLikeClause($s)
{
return "%".str_replace(' ', '% ', $s)."%";
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Command;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Input\InputArgument;
use Symfony\Component\Console\Input\InputOption;
use Symfony\Component\Console\Output\OutputInterface;
class UpdateHeadersDbCommand extends CommonDbCommand
{
protected function configure()
{
parent::configure();
$this
->setName('db:update-headers')
->setDescription('Update text headers in the database')
->addArgument('texts', InputArgument::OPTIONAL, 'Texts which headers should be updated (comma separated)')
->addOption('dump-sql', null, InputOption::VALUE_NONE, 'Output SQL queries instead of executing them')
->setHelp(<<<EOT
The <info>db:update-headers</info> command updates the text headers in the database.
EOT
);
}
/**
* Executes the current command.
*
* @param InputInterface $input An InputInterface instance
* @param OutputInterface $output An OutputInterface instance
*
* @return integer 0 if everything went fine, or an error code
*
* @throws \LogicException When this abstract class is not implemented
*/
protected function execute(InputInterface $input, OutputInterface $output)
{
$this->em = $this->getContainer()->get('doctrine.orm.default_entity_manager');
$this->output = $output;
$texts = trim($input->getArgument('texts'));
$dumpSql = $input->getOption('dump-sql') === true;
$this->updateHeaders($texts, $dumpSql);
$output->writeln('/*Done.*/');
}
protected function updateHeaders($texts, $dumpSql)
{
$queries = array();
$dql = 'SELECT t FROM LibBundle:Text t WHERE t.headlevel > 0';
if ($texts) {
$dql .= " AND t.id IN ($texts)";
}
$iterableResult = $this->em->createQuery($dql)->iterate();
foreach ($iterableResult AS $row) {
$text = $row[0];
if ($text->isCompilation()) {
$file = tempnam(sys_get_temp_dir(), 'text');
file_put_contents($file, $text->getRawContent());
} else {
$file = $this->webDir($text->getMainContentFile());
}
$queries = array_merge($queries, $this->buildTextHeadersUpdateQuery($file, $text->getId(), $text->getHeadlevel()));
$this->em->detach($text);
}
if ($dumpSql) {
$this->printQueries($queries);
} else {
$this->executeUpdates($queries, $this->em->getConnection());
}
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Command;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Input\InputOption;
use Symfony\Component\Console\Input\ArrayInput;
use Symfony\Component\Console\Output\OutputInterface;
use Symfony\Component\Filesystem\Exception\IOException;
use Chitanka\LibBundle\Service\Mutex;
use Chitanka\LibBundle\Service\DbUpdater;
use Chitanka\LibBundle\Service\FileUpdater;
use Chitanka\LibBundle\Service\SourceUpdater;
class AutoUpdateCommand extends CommonDbCommand
{
protected function configure()
{
parent::configure();
$this
->setName('auto-update')
->setDescription('Execute an auto-update of the whole system')
->addOption('no-wait', null, InputOption::VALUE_NONE, 'Skip waiting time at the beginning. Not recommended for mirror servers.')
->addOption('skip-content', null, InputOption::VALUE_NONE, 'Skip content update')
->addOption('skip-db', null, InputOption::VALUE_NONE, 'Skip database update')
->addOption('skip-src', null, InputOption::VALUE_NONE, 'Skip software update')
->setHelp(<<<EOT
The <info>auto-update</info> updates the whole system - software, database, and content.
EOT
);
}
protected function execute(InputInterface $input, OutputInterface $output)
{
$this->output = $output;
$container = $this->getContainer();
$rootDir = $container->getParameter('kernel.root_dir').'/..';
$updateDir = "$rootDir/update";
$mutex = new Mutex($updateDir);
if ( ! $mutex->acquireLock(1800/*secs*/)) {
return;
}
if ($input->getOption('no-wait') === false) {
// this will spread check requests from mirrors in time
sleep(rand(0, 30));
}
if ($input->getOption('skip-src') === false) {
$this->executeSrcUpdate($container->getParameter('update_src_url'), "$updateDir/src", $rootDir);
}
if ($input->getOption('skip-content') === false) {
$this->executeContentUpdate($container->getParameter('update_content_url'), "$updateDir/content", $this->contentDir());
}
if ($input->getOption('skip-db') === false) {
$this->executeDbUpdate($container->getParameter('update_db_url'), "$updateDir/db");
}
$mutex->releaseLock();
$output->writeln('Done.');
}
private function executeDbUpdate($fetchUrl, $updateDir)
{
$zip = $this->fetchUpdate($fetchUrl, $updateDir, date('Y-m-d/1'));
if ( ! $zip) {
return false;
}
$zip->extractTo($updateDir);
$zip->close();
$sqlImporter = $this->createSqlImporter();
$sqlImporter->importFile("$updateDir/db.sql");
unlink("$updateDir/db.sql");
$this->deleteRemovedNoticesIfDisallowed();
return true;
}
private function createSqlImporter()
{
$c = $this->getContainer();
require_once $c->getParameter('kernel.root_dir').'/../maintenance/sql_importer.lib.php';
$dbhost = $c->getParameter('database_host');
$dbname = $c->getParameter('database_name');
$dbport = $c->getParameter('database_port');
$dbuser = $c->getParameter('database_user');
$dbpassword = $c->getParameter('database_password');
$dsn = "mysql:host=$dbhost;dbname=$dbname";
if ($dbport) {
$dsn .= ";port=$dbport";
}
return new \SqlImporter($dsn, $dbuser, $dbpassword);
}
private function deleteRemovedNoticesIfDisallowed()
{
$c = $this->getContainer();
$param = 'allow_removed_notice';
if ($c->hasParameter($param) && $c->getParameter($param) === false) {
$db = $c->get('doctrine.orm.default_entity_manager')->getConnection();
$db->executeUpdate('UPDATE text SET removed_notice = NULL');
$db->executeUpdate('UPDATE book SET removed_notice = NULL');
}
}
private function executeContentUpdate($fetchUrl, $updateDir, $contentDir)
{
$zip = $this->fetchUpdate($fetchUrl, $updateDir, time());
if ( ! $zip) {
return false;
}
$updater = new FileUpdater($contentDir, $updateDir);
$updater->extractArchive($zip);
return true;
}
private function executeSrcUpdate($fetchUrl, $updateDir, $rootDir)
{
$zip = $this->fetchUpdate($fetchUrl, $updateDir, time());
if ( ! $zip) {
return false;
}
$updater = new SourceUpdater($rootDir, $updateDir);
$updater->lockFrontController();
$updater->extractArchive($zip);
$this->clearAppCache();
$updater->unlockFrontController();
return true;
}
private function clearAppCache()
{
$cacheDir = $this->getApplication()->getKernel()->getCacheDir();
$cacheDirOld = $cacheDir.'_old_'.time();
$fs = new \Symfony\Component\Filesystem\Filesystem;
try {
$fs->rename($cacheDir, $cacheDirOld);
$fs->mkdir($cacheDir);
$this->runCommand('cache:warmup');
$this->runCommand('cache:create-cache-class');
$fs->remove($cacheDirOld);
} catch (IOException $e) {
error_log("Auto-update: ".$e->getMessage());
}
}
private function runCommand($commandName)
{
$php = isset($_SERVER['_']) ? $_SERVER['_'] : PHP_BINDIR.'/php'; // PHP_BINARY available since 5.4
$rootDir = $this->getApplication()->getKernel()->getRootDir();
$environment = $this->getApplication()->getKernel()->getEnvironment();
shell_exec("$php $rootDir/console $commandName --env=$environment");
}
/** @return \ZipArchive */
private function fetchUpdate($fetchUrl, $updateDir, $now)
{
$url = $this->prepareFetchUrl($fetchUrl, $updateDir, $now);
if ($url == null) {
return false;
}
$this->output->writeln("Fetching update from $url");
try {
$response = $this->downloadUpdate($url, $updateDir);
} catch (\RuntimeException $e) {
error_log("fetch error by $url ({$e->getMessage()})");
return false;
}
if ($response->isRedirection()) { // most probably not modified - 304
return false;
}
if ( ! $response->isSuccessful()) {
error_log("fetch error by $url (code {$response->getStatusCode()})");
return false;
}
return $this->initZipFileFromContent($response->getContent());
}
private function prepareFetchUrl($fetchUrl, $updateDir, $now)
{
$lastmodFile = "$updateDir/.last";
if ( ! file_exists($lastmodFile)) {
file_put_contents($lastmodFile, $now);
return null;
}
$lastmod = trim(file_get_contents($lastmodFile));
return "$fetchUrl/$lastmod";
}
private function downloadUpdate($url, $updateDir)
{
$browser = $this->getContainer()->get('buzz');
$client = new \Chitanka\LibBundle\Service\ResumeCurlClient();
$client->setSaveDir($updateDir);
$browser->setClient($client);
$browser->addListener($client);
return $browser->get($url, array('User-Agent: Mylib (http://chitanka.info)'));
}
/** @return \ZipArchive */
private function initZipFileFromContent($content)
{
if (empty($content)) {
return false;
}
$tmpfile = sys_get_temp_dir().'/chitanka-'.uniqid().'.zip';
file_put_contents($tmpfile, $content);
$zip = new \ZipArchive;
if ($zip->open($tmpfile) === true) {
return $zip;
}
return false;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Entity;
use Doctrine\ORM\Mapping as ORM;
/**
* @ORM\Entity
* @ORM\Table(name="text_translator",
* uniqueConstraints={@ORM\UniqueConstraint(name="person_text_uniq", columns={"person_id", "text_id"})},
* indexes={
* @ORM\Index(name="text_idx", columns={"text_id"})}
* )
*/
class TextTranslator extends Entity
{
/**
* @ORM\Column(type="integer")
* @ORM\Id
* @ORM\GeneratedValue(strategy="CUSTOM")
* @ORM\CustomIdGenerator(class="Chitanka\LibBundle\Doctrine\CustomIdGenerator")
*/
private $id;
/**
* @var integer $person
* @ORM\ManyToOne(targetEntity="Person", inversedBy="textTranslators")
*/
private $person;
/**
* @var integer $text
* @ORM\ManyToOne(targetEntity="Text", inversedBy="textTranslators")
*/
private $text;
/**
* @var integer $pos
* @ORM\Column(type="smallint", nullable=true)
*/
private $pos;
/**
* @var integer $year
* @ORM\Column(type="smallint", nullable=true)
*/
private $year;
public function equals(TextTranslator $textTranslator)
{
return $this->getId() == $textTranslator->getId();
}
public function getId() { return $this->id; }
public function setPerson($person) { $this->person = $person; }
public function getPerson() { return $this->person; }
public function setText($text) { $this->text = $text; }
public function getText() { return $this->text; }
/**
* Set pos
*
* @param integer $pos
*/
public function setPos($pos)
{
$this->pos = $pos;
}
/**
* Get pos
*
* @return integer $pos
*/
public function getPos()
{
return $this->pos;
}
/**
* Set year
*
* @param integer $year
*/
public function setYear($year)
{
$this->year = $year;
}
/**
* Get year
*
* @return integer $year
*/
public function getYear()
{
return $this->year;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller;
use Symfony\Component\HttpKernel\Exception\NotFoundHttpException;
use Chitanka\LibBundle\Pagination\Pager;
use Chitanka\LibBundle\Util\String;
class SeriesController extends Controller
{
protected $responseAge = 86400; // 24 hours
public function indexAction($_format)
{
return $this->display("index.$_format");
}
public function listByAlphaAction($letter, $page, $_format)
{
$repo = $this->getSeriesRepository();
$limit = 50;
$prefix = $letter == '-' ? null : $letter;
$this->view = array(
'letter' => $letter,
'series' => $repo->getByPrefix($prefix, $page, $limit),
'pager' => new Pager(array(
'page' => $page,
'limit' => $limit,
'total' => $repo->countByPrefix($prefix)
)),
'route_params' => array('letter' => $letter),
);
return $this->display("list_by_alpha.$_format");
}
public function showAction($slug, $_format)
{
$slug = String::slugify($slug);
$series = $this->getSeriesRepository()->findBySlug($slug);
if ($series === null) {
throw new NotFoundHttpException("Няма серия с код $slug.");
}
$this->view = array(
'series' => $series,
'texts' => $this->getTextRepository()->getBySeries($series),
);
return $this->display("show.$_format");
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Legacy;
use Chitanka\LibBundle\Util\String;
class MailPage extends Page {
protected
$action = 'mail',
$mailSuccessMessage,
$mailFailureMessage,
$mailTo,
$mailFrom,
$mailSubject = '',
$extraMailHeaders = array()
;
public function __construct($fields) {
parent::__construct($fields);
$this->logFile = $this->logDir . '/email.log';
// $this->name = $this->request->value('name', ADMIN);
// $this->email = $this->request->value('email', ADMIN_EMAIL);
$this->name = $this->request->value('name');
$this->email = $this->request->value('email');
$this->mailToName = ADMIN;
$this->mailToEmail = ADMIN_EMAIL;
$this->mailFromName = SITENAME;
$this->mailFromEmail = SITE_EMAIL;
$this->mailSubject = 'Тема на писмото';
$this->mailSuccessMessage = 'Съобщението беше изпратено.';
$this->mailFailureMessage = 'Изглежда е станал някакъв фал при
изпращането. Ако желаете, пробвайте още веднъж.';
$this->mailMessage = '';
}
protected function processSubmission() {
$mailer = $this->controller->get('mailer');
//$mail->setReturnPath($this->mailFromEmail);
$messageBody = $this->makeMailMessage();
Legacy::fillOnEmpty($this->mailFromEmail, '<EMAIL>');
Legacy::fillOnEmpty($this->mailFromName, 'Анонимен');
$from = array($this->mailFromEmail => $this->mailFromName);
try {
$message = \Swift_Message::newInstance($this->mailSubject)
->setFrom($from)
->setTo(array($this->mailToEmail => $this->mailToName))
->setBody($messageBody);
$headers = $message->getHeaders();
//$headers->addPathHeader('Return-Path', $this->mailFromEmail);
$headers->addMailboxHeader('Reply-To', $from);
$headers->addTextHeader('X-Mailer', 'Mylib');
}
catch (Exception $e) {
$this->addMessage('Станал е някакъв гаф. Може адреса да не е правилен.', true);
file_put_contents($this->logFile, $e, FILE_APPEND);
return $this->buildContent();
}
$this->logEmail($messageBody);
try {
$result = $mailer->send($message);
}
catch (\Exception $e) {
$result = false;
file_put_contents($this->logFile, $e, FILE_APPEND);
}
if ( $result == 0 ) {
$this->addMessage($this->mailFailureMessage /*.
'<br />Съобщението за грешка, между другото, гласи: <code>'.
String::myhtmlentities( $res->getMessage() ) .'</code>'*/, true);
return $this->buildContent();
}
$this->addMessage($this->mailSuccessMessage);
return $this->makeSubmissionReturn();
}
protected function buildContent() {
return $this->makeForm();
}
protected function setMailHeaders($mail) {
$mail->setFrom($this->mailFromEmail, $this->mailFromName);
$mail->setReturnPath($this->mailFromEmail);
$mail->addHeader('X-Mailer', 'Mylib');
foreach ( $this->extraMailHeaders as $name => $value ) {
$mail->addHeader($name, $value);
}
return $mail;
}
protected function makeSubmissionReturn() {
return $this->mailSuccessMessage;
}
protected function makeForm() { return ''; }
protected function makeMailMessage() { return $this->mailMessage; }
protected function logEmail($message, $headers = array()) {
$sheaders = '';
foreach ($headers as $header => $value) {
$sheaders .= "$header: $value\n";
}
$date = date('Y-m-d H:i:s');
$logString = <<<EOS
+++ EMAIL +++
[$date]
$sheaders
Subject: $this->mailSubject
To: $this->mailToName <$this->mailToEmail>
Message:
$message
--- EMAIL ---
EOS;
file_put_contents($this->logFile, $logString, FILE_APPEND);
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Command;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Input\InputArgument;
use Symfony\Component\Console\Output\OutputInterface;
use Chitanka\LibBundle\Entity\WorkEntry;
use Chitanka\LibBundle\Service\Notifier;
class NotifyUsersForOldWorkEntriesCommand extends CommonDbCommand
{
protected function configure()
{
parent::configure();
$this
->setName('lib:notify-old-work-entries')
->setDescription('Notify all users with too old work entries')
->addArgument('age', InputArgument::REQUIRED, 'Threshold age for notification (in months)')
->addArgument('stalk-interval', InputArgument::OPTIONAL, 'Number of days between two subsequent notifications. Default: 7', 7)
->addArgument('skip-users', InputArgument::OPTIONAL, 'List of users by name which should not get notifications. Format: USERNAME1[,USERNAME2]*')
->setHelp(<<<EOT
Notify all users with too old work entries.
EOT
);
}
protected function execute(InputInterface $input, OutputInterface $output)
{
$oldEntries = $this->getRepository('WorkEntry')->findOlderThan($this->getThresholdDate($input));
$skippedUsers = $this->getSkippedUsers($input);
$notifer = new Notifier($this->getContainer()->get('mailer'));
foreach ($oldEntries as $entry) {
if ($this->shouldSkipEntry($entry, $skippedUsers)) {
continue;
}
$this->sendNotification($notifer, $entry, $input->getArgument('stalk-interval'));
}
$this->getEntityManager()->flush();
$output->writeln('/*Done.*/');
}
private function sendNotification(Notifier $notifier, WorkEntry $entry, $stalkInterval)
{
if ($entry->isNotifiedWithin("$stalkInterval days")) {
return;
}
$notifier->sendMailByOldWorkEntry($entry);
$entry->setLastNotificationDate(new \DateTime);
}
private function getThresholdDate(InputInterface $input)
{
$age = $input->getArgument('age');
return date('Y-m-d', strtotime("-$age months"));
}
private function getSkippedUsers(InputInterface $input)
{
return explode(',', $input->getArgument('skip-users'));
}
private function shouldSkipEntry(WorkEntry $entry, array $skippedUsers)
{
return in_array($entry->getUser()->getUsername(), $skippedUsers);
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller;
class NewsController extends Controller
{
public function indexAction($page)
{
$_REQUEST['page'] = $page;
return $this->legacyPage('News');
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Util;
class Datetime
{
static public function endOfMonth($month)
{
list($y, $m) = explode('-', $month);
$lastday = $m == 2
? ($y % 4 ? 28 : ($y % 100 ? 29 : ($y % 400 ? 28 : 29)))
: (($m - 1) % 7 % 2 ? 30 : 31);
return "$month-$lastday 23:59:59";
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Service;
class FeedService
{
public function cleanup($contents)
{
$contents = $this->removeScriptContent($contents);
$contents = $this->removeImageBeacons($contents);
return $contents;
}
public function removeScriptContent($contents)
{
$dirtyContents = $contents;
while (true) {
$cleanedContents = preg_replace('|<\s*script[^>]*>.*<\s*/\s*script\s*>|Ums', '', $dirtyContents);
if ($cleanedContents === $dirtyContents) {
return $cleanedContents;
}
$dirtyContents = $cleanedContents;
}
}
public function removeImageBeacons($contents)
{
$minWidthOrHeight = 4;
return preg_replace_callback('|<\s*img [^>]+>|', function($match) use ($minWidthOrHeight) {
foreach (explode(' ', $match[0]) as $attr) {
if (strpos($attr, '=') === false) {
continue;
}
list($name, $value) = explode('=', $attr);
if ($name != 'width' && $name != 'height') {
continue;
}
$intValue = trim($value, '\'"');
if ($intValue < $minWidthOrHeight) {
return '';
}
}
return $match[0];
}, $contents);
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Listener;
//use Symfony\Bundle\FrameworkBundle\Debug\EventDispatcher;
use Symfony\Bundle\FrameworkBundle\EventDispatcher;
class Listener
{
public function run()
{
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller\Admin;
class ForeignBookController extends CRUDController {
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller\Admin;
class TextController extends CRUDController {
}
<file_sep><?php
/*
This tiny script receives base names of javascript files
and serves them as a combined one.
It expects a comma as a delimiter of the file names.
The file names should not have an extension,
".js" is automatically appended.
*/
$query = sanitizeInput($_SERVER['QUERY_STRING']);
$curdir = dirname(__FILE__);
$path = strpos($curdir, '/bundles/') === false
? "$curdir/../../../../../../web/cache"
: "$curdir/../../../cache";
$combiFile = $path . sanitizeInput($_SERVER['REQUEST_URI']);
if ( ! file_exists($combiFile) ) {
createCombiFile($query, $combiFile);
}
header('Content-Type: application/x-javascript; charset=UTF-8');
header('Expires: Sun, 17-Jan-2038 19:14:07 GMT');
header('Cache-Control: max-age=315360000'); // 10 years
header('Last-Modified: Sun, 01 Jan 2001 00:00:01 GMT');
readfile($combiFile);
function createCombiFile($query, $combiName)
{
$fileExt = '.js';
$query = strtr($query, array($fileExt => ''));
$out = '';
foreach ( explode(',', $query) as $name ) {
$maxFile = "$name$fileExt";
$minFile = "$name.min$fileExt";
$file = '';
$isDebug = isset($_REQUEST['debug']) && $_REQUEST['debug'];
if ( ! $isDebug && file_exists($minFile) ) {
$file = $minFile;
} else if ( file_exists( $maxFile ) ) {
$file = $maxFile;
}
if ( ! empty($file) ) {
$out .= "/*=$file*/\n" . file_get_contents($file) . "\n";
}
}
$dir = dirname($combiName);
if ( ! file_exists($dir)) {
mkdir($dir, 0777, true);
}
file_put_contents($combiName, $out);
}
function sanitizeInput($input) {
$input = preg_replace('#[^/a-zA-Z\d,._-]#', '', $input);
$input = strtr($input, array('..' => '.'));
return $input;
}
<file_sep><?php
namespace Chitanka\LibBundle\Entity;
use Doctrine\ORM\Mapping as ORM;
use Symfony\Bridge\Doctrine\Validator\Constraints\UniqueEntity;
use Chitanka\LibBundle\Util\String;
/**
* @ORM\Entity(repositoryClass="Chitanka\LibBundle\Entity\SequenceRepository")
* @ORM\Table(name="sequence",
* indexes={
* @ORM\Index(name="name_idx", columns={"name"}),
* @ORM\Index(name="publisher_idx", columns={"publisher"})}
* )
* @UniqueEntity(fields="slug", message="This slug is already in use.")
*/
class Sequence extends Entity
{
/**
* @ORM\Column(type="integer")
* @ORM\Id
* @ORM\GeneratedValue(strategy="CUSTOM")
* @ORM\CustomIdGenerator(class="Chitanka\LibBundle\Doctrine\CustomIdGenerator")
*/
private $id;
/**
* @var string $slug
* @ORM\Column(type="string", length=50, unique=true)
*/
private $slug = '';
/**
* @var string $name
* @ORM\Column(type="string", length=100)
*/
private $name = '';
/**
* @var string
* @ORM\Column(type="string", length=100, nullable=true)
*/
private $publisher = '';
/**
* @ORM\Column(type="boolean")
*/
private $is_seqnr_visible = true;
/**
* @var array
* @ORM\OneToMany(targetEntity="Book", mappedBy="sequence")
* @ORM\OrderBy({"seqnr" = "ASC"})
*/
private $books;
public function getId() { return $this->id; }
public function setSlug($slug) { $this->slug = String::slugify($slug); }
public function getSlug() { return $this->slug; }
public function setName($name) { $this->name = $name; }
public function getName() { return $this->name; }
public function setPublisher($publisher) { $this->publisher = $publisher; }
public function getPublisher() { return $this->publisher; }
public function setIsSeqnrVisible($is_seqnr_visible) { $this->is_seqnr_visible = $is_seqnr_visible; }
public function getIsSeqnrVisible() { return $this->is_seqnr_visible; }
public function isSeqnrVisible() { return $this->is_seqnr_visible; }
public function getBooks() { return $this->books; }
public function __toString()
{
return $this->name;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Legacy;
use Chitanka\LibBundle\Util\Number;
use Chitanka\LibBundle\Entity\Text;
class TextratingPage extends TextPage {
protected
$action = 'textrating',
$includeUserLinks = true;
public function __construct($fields) {
parent::__construct($fields);
$this->title = 'Оценки';
$this->textId = (int) $this->request->value( self::FF_TEXT_ID, 0, 1 );
$this->username = $this->request->value('username');
}
protected function buildContent() {
if ( ! empty( $this->username ) ) {
return $this->makeListByUser();
}
return $this->makeListByText();
}
protected function makeListByText($limit = 0, $offset = 0) {
$this->initData();
if ( !is_object($this->work) ) {
return '';
}
$qa = array(
'SELECT' => 'tr.rating, tr.date, u.username',
'FROM' => DBT_TEXT_RATING .' tr',
'LEFT JOIN' => array(
DBT_USER .' u' => 'tr.user_id = u.id',
),
'WHERE' => array('tr.text_id' => $this->textId),
'ORDER BY' => "tr.date DESC",
'LIMIT' => array($offset, $limit),
);
$this->data = array();
$this->db->iterateOverResult(
$this->db->extselectQ($qa), 'makeListByTextItem', $this);
$this->title = 'Оценки за ' . $this->makeTextLinkWithAuthor($this->work);
if ( empty($this->data) ) {
return '<p class="no-items">Няма дадени оценки.</p>';
}
$this->data = array_merge(array(
array(
array( array('type' => 'header'), 'Дата'),
array( array('type' => 'header'), 'Потребител'),
array( array('type' => 'header'), 'Оценка'),
)
), $this->data);
return $this->out->simpleTable($this->title, $this->data);
}
public function makeListByTextItem($dbrow) {
$this->data[] = array(
Legacy::humanDate($dbrow['date']),
$this->includeUserLinks ? $this->makeUserLink($dbrow['username']) : $dbrow['username'],
$dbrow['rating']
);
}
protected function makeListByUser($limit = 0, $offset = 0) {
$qa = array(
'SELECT' => 'GROUP_CONCAT(a.name ORDER BY aof.pos) author,
t.id textId, t.title, tr.rating, tr.date',
'FROM' => DBT_TEXT_RATING .' tr',
'LEFT JOIN' => array(
DBT_TEXT .' t' => 'tr.text_id = t.id',
DBT_AUTHOR_OF .' aof' => 't.id = aof.text_id',
DBT_PERSON .' a' => 'aof.person_id = a.id',
),
'WHERE' => array('tr.user_id IN ('
. $this->db->selectQ(DBT_USER,
array('username' => $this->username),
'id')
. ')'),
'GROUP BY' => 't.id',
'ORDER BY' => "tr.date DESC",
'LIMIT' => array($offset, $limit),
);
$this->data = array(
array(
array( array('type' => 'header'), 'Дата'),
array( array('type' => 'header'), 'Заглавие'),
array( array('type' => 'header'), 'Оценка'),
)
);
$this->db->iterateOverResult(
$this->db->extselectQ($qa), 'makeListByUserItem', $this);
$this->title = 'Оценки от ' . $this->makeUserLink( $this->username );
return $this->out->simpleTable($this->title, $this->data);
}
public function makeListByUserItem($dbrow) {
$this->data[] = array(
Legacy::humanDate($dbrow['date']),
$this->makeSimpleTextLink($dbrow['title'], $dbrow['textId'])
. $this->makeFromAuthorSuffix($dbrow),
$dbrow['rating']
);
}
protected function initData() {
$this->work = $this->controller->getRepository('Text')->find($this->textId);
if ( !is_object($this->work) ) {
$this->addMessage("Няма такова произведение (номер $this->textId).", true);
return false;
}
return true;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Admin;
use Sonata\AdminBundle\Form\FormMapper;
use Sonata\AdminBundle\Datagrid\DatagridMapper;
use Sonata\AdminBundle\Datagrid\ListMapper;
use Sonata\AdminBundle\Show\ShowMapper;
use Sonata\AdminBundle\Route\RouteCollection;
use Symfony\Component\Form\FormEvents;
use Doctrine\ORM\EntityManager;
use Chitanka\LibBundle\Entity\Book;
use Chitanka\LibBundle\Entity\BookRevision;
use Chitanka\LibBundle\Util\Language;
class BookAdmin extends Admin
{
protected $baseRoutePattern = 'book';
protected $baseRouteName = 'admin_book';
protected $translationDomain = 'admin';
public $extraActions = 'LibBundle:BookAdmin:extra_actions.html.twig';
private $em;
public function setEntityManager(EntityManager $em)
{
$this->em = $em;
}
protected function configureRoutes(RouteCollection $collection)
{
$collection->remove('create');
}
protected function configureShowField(ShowMapper $showMapper)
{
$showMapper
->add('slug')
->add('title')
->add('authors')
->add('subtitle')
->add('title_extra')
->add('orig_title')
->add('lang')
->add('orig_lang')
->add('year')
//->add('trans_year')
->add('type')
->add('sequence')
->add('seqnr')
->add('category')
->add('removed_notice')
->add('texts')
->add('links', null, array('label' => 'Site Links'))
->add('created_at')
;
}
protected function configureListFields(ListMapper $listMapper)
{
$listMapper
->add('url', 'string', array('template' => 'LibBundle:BookAdmin:list_url.html.twig'))
->add('slug')
->addIdentifier('title')
->add('id')
->add('type')
->add('sfbg', 'string', array('template' => 'LibBundle:BookAdmin:list_sfbg.html.twig'))
->add('puk', 'string', array('template' => 'LibBundle:BookAdmin:list_puk.html.twig'))
->add('_action', 'actions', array(
'actions' => array(
'view' => array(),
'edit' => array(),
'delete' => array(),
)
))
;
}
//public $preFormContent = 'LibBundle:BookAdmin:form_datafiles.html.twig';
protected function configureFormFields(FormMapper $formMapper)
{
$formMapper
//->add('sfbg', 'string', array('template' => 'LibBundle:BookAdmin:form_sfbg.html.twig'))
//->add('datafiles', 'string', array('template' => 'LibBundle:BookAdmin:form_datafiles.html.twig'))
->with('General attributes')
->add('slug')
->add('title')
->add('lang', 'choice', array('choices' => Language::getLangs()))
->add('orig_lang', 'choice', array('required' => false, 'choices' => Language::getLangs()))
->add('type', 'choice', array('choices' => Book::getTypeList()))
->add('bookAuthors', 'sonata_type_collection', array(
'by_reference' => false,
'required' => false,
), array(
'edit' => 'inline',
'inline' => 'table',
))
->end()
->with('Extra attributes')
->add('subtitle', null, array('required' => false))
->add('title_extra', null, array('required' => false))
->add('orig_title', null, array('required' => false))
->add('year')
//->add('trans_year', null, array('required' => false))
->add('sequence', null, array('required' => false, 'query_builder' => function ($repo) {
return $repo->createQueryBuilder('e')->orderBy('e.name');
}))
->add('seqnr', null, array('required' => false))
->add('category', null, array('required' => false, 'query_builder' => function ($repo) {
return $repo->createQueryBuilder('e')->orderBy('e.name');
}))
->add('links', 'sonata_type_collection', array(
'by_reference' => false,
'required' => false,
'label' => 'Site Links',
), array(
'edit' => 'inline',
'inline' => 'table',
'sortable' => 'site_id'
))
->end()
->with('Textual content')
->add('raw_template', 'textarea', array(
'label' => 'Template',
'required' => false,
'trim' => false,
'attr' => array(
'class' => 'span12',
),
))
->add('annotation', 'textarea', array(
'required' => false,
'trim' => false,
'attr' => array(
'class' => 'span12',
),
))
->add('extra_info', 'textarea', array(
'required' => false,
'trim' => false,
'attr' => array(
'class' => 'span12',
),
))
->add('revision_comment', 'text', array('required' => false))
->add('removed_notice')
->end()
;
$formMapper->getFormBuilder()->addEventListener(FormEvents::PRE_BIND, array($this, 'fixNewLines'));
}
protected function configureDatagridFilters(DatagridMapper $datagrid)
{
$datagrid
->add('title')
->add('subtitle')
->add('type')
->add('has_cover')
->add('has_anno')
;
}
public function preUpdate($book)
{
foreach ($book->getLinks() as $link) {
$link->setBook($book);
}
foreach ($book->getBookAuthors() as $bookAuthor) {
if ($bookAuthor->getPerson()) {
$bookAuthor->setBook($book);
}
}
if ($book->textsNeedUpdate()) {
$textRepo = $this->em->getRepository('LibBundle:Text');
$texts = $textRepo->findByIds($book->getTextIdsFromTemplate());
$book->setTexts($texts);
}
if ($book->getRevisionComment()) {
$revision = new BookRevision;
$revision->setComment($book->getRevisionComment());
$revision->setBook($book);
$revision->setDate(new \DateTime);
$revision->setFirst(false);
$book->addRevision($revision);
}
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller;
class MainController extends Controller
{
public function indexAction()
{
$this->responseAge = 600;
$this->view = array(
'siteNotices' => $this->getSiteNoticeRepository()->findForFrontPage(),
);
return $this->display('index');
}
public function aboutAction()
{
return $this->legacyPage('About');
}
public function rulesAction()
{
return $this->legacyPage('Rules');
}
public function blacklistAction()
{
return $this->legacyPage('Blacklist');
}
public function defaultAction()
{
return $this->notFoundAction();
}
public function notFoundAction()
{
$response = $this->display('not_found');
$response->setStatusCode(404);
return $response;
}
public function redirectAction($route)
{
return $this->redirect($route, true);
}
public function siteboxAction()
{
$data = array(
'site' => $this->getSiteRepository()->getRandom()
);
return $this->render('LibBundle:Main:sitebox.html.twig', $data);
}
public function lastBooksAction($limit = 3)
{
$this->view = array(
'revisions' => $this->getBookRevisionRepository()->getLatest($limit, 1, false),
);
return $this->display('last_books');
}
public function lastTextsAction($limit = 20)
{
$this->view = array(
'revisions' => $this->getTextRevisionRepository()->getLatest($limit, 1, false),
);
return $this->display('last_texts');
}
public function catalogAction($_format)
{
return $this->display("catalog.$_format");
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Legacy;
use Chitanka\LibBundle\Util\String;
use Chitanka\LibBundle\Util\Number;
class FeedPage extends Page {
protected
$action = 'feed',
$validObjs = array('work'),
$defObj = 'work', $defFeedType = 'rss', $defListLimit = 25,
$maxListLimit = 200,
/** See http://validator.w3.org/feed/docs/warning/SecurityRisk.html */
$dangerous_desc_tags = array(
'comment',
'embed',
'link',
'listing',
'meta',
'noscript',
'object',
'plaintext',
'script',
'xmp',
)
;
public function __construct($fields) {
parent::__construct($fields);
$this->title = 'Зоб за новинарски четци';
$this->feedDescription = 'Универсална електронна библиотека';
$this->server = $this->request->server();
$this->root = $this->server . $this->root;
$this->contentType = 'application/rss+xml';
$this->obj = Legacy::normVal(
$this->request->value('type', $this->defObj),
$this->validObjs, $this->defObj);
$this->llimit = Number::normInt(
(int) $this->request->value('count', $this->defListLimit),
$this->maxListLimit);
$this->feedtype = 'rss';
$this->langCode = 'bg';
}
public function title() {
return "$this->title — $this->sitename";
}
protected function buildContent() {
$ftPref = ucfirst($this->feedtype);
$myfields = array('root' => $this->root);
$pagename = '';
switch ($this->obj) {
case 'work':
$makeItemFunc = 'make'. $ftPref . ucfirst($this->obj) .'Item';
$pagename = $this->obj;
$myfields['objId'] = 0;
break;
}
$bufferq = false;
if ($this->obj == 'work') {
$bufferq = true;
$myfields['showProgressbar'] = false;
}
$this->basepage = Setup::getPage(ucfirst($pagename), $this->controller, $this->container, false);
$this->basepage->setFields($myfields);
$makeFunc = 'make'.$ftPref.'Feed';
$q = $this->basepage->makeSqlQuery($this->llimit, 0, 'DESC');
$this->title = $this->basepage->title();
$this->fullContent = $this->$makeFunc($q, $makeItemFunc, $bufferq);
$this->addTemplates();
$feed = Legacy::expandTemplates($this->fullContent);
$page = <<<FEED
<?xml version="1.0" encoding="utf-8"?>
<rss version="2.0"
xmlns:content="http://purl.org/rss/1.0/modules/content/"
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:atom="http://www.w3.org/2005/Atom">
<channel>
$feed
</channel>
</rss>
FEED;
header("Content-Type: $this->contentType; charset=UTF-8");
header('Content-Length: '. strlen($page));
echo $page;
exit;
return '';
}
protected function makeRssFeed($query, $makeItemFunc, $bufferq) {
$request_uri = $this->request->requestUri(true);
$ch =
$this->makeXmlElement('title', $this->title() ) .
$this->makeXmlElement('link', $this->server) .
$this->makeXmlElement('description', $this->feedDescription) .
$this->makeXmlElement('language', $this->langCode) .
$this->makeXmlElement('lastBuildDate', $this->makeRssDate()) .
$this->makeXmlElement('generator', 'mylib') .
$this->db->iterateOverResult($query, $makeItemFunc, $this, $bufferq);
return <<<EOS
$ch
<atom:link href="$request_uri" rel="self" type="application/rss+xml" />
EOS;
}
public function makeRssWorkItem($dbrow) {
extract($dbrow);
$dbrow['showtitle'] = $dbrow['showtime'] = false;
$dbrow['expandinfo'] = true;
$description = $this->basepage->makeWorkListItem($dbrow, false);
$time = $date;
$link = $this->controller->generateUrl('workroom', array(), true) . "#e$id";
$guid = "$link-$status-$progress";
if ( $type == 1 && $status >= WorkPage::MAX_SCAN_STATUS ) {
$guid .= '-'. $this->formatDateForGuid($date);
}
$creator = $username;
$data = compact('title', 'link', 'time', 'guid', 'description', 'creator');
return $this->makeRssItem($data);
}
public function makeRssItem($data) {
extract($data);
if ( empty($title) ) $title = strtr($time, array(' 00:00:00' => ''));
if (empty($creator)) $creator = $this->sitename;
// unescape escaped ampersands to prevent double escaping them later
$link = strtr($link, array('&' => '&'));
if (empty($guid)) $guid = $link;
$src = empty($source) || strpos($source, 'http') === false ? '' : $source;
$lvl = 2;
$description = str_replace('href="/', 'href="'.$this->server.'/', $description);
return "\n\t<item>".
$this->makeXmlElement('title', strip_tags($title), $lvl) .
$this->makeXmlElement('dc:creator', $creator, $lvl) .
$this->makeXmlElement('link', $link, $lvl) .
$this->makeXmlElement('pubDate', $this->makeRssDate($time), $lvl) .
$this->makeXmlElement('guid', $guid, $lvl) .
$this->makeXmlElement('description',
$this->escape_element( $description ), $lvl) .
$this->makeXmlElement('source', $src, $lvl, array('url'=>$src)) .
"\n\t</item>";
}
protected function makeXmlElement($name, $content, $level = 1, $attrs = array()) {
if ( empty($content) ) {
return '';
}
$content = String::myhtmlspecialchars($content);
$elem = $this->out->xmlElement($name, $content, $attrs);
return "\n". str_repeat("\t", $level) . $elem;
}
protected function makeRssDate($isodate = NULL) {
$format = 'r';
return empty($isodate) ? date($format) : date($format, strtotime($isodate));
}
protected function formatDateForGuid($date) {
return strtr($date, ' :', '__');
}
/**
* Remove dangerous elements along with their content
*/
protected function escape_element( $desc ) {
if ( ! isset( $this->_esc_desc_re ) ) {
$re = '';
foreach ( $this->dangerous_desc_tags as $tag ) {
$re .= "|<$tag.+</$tag>";
}
$this->_esc_desc_re = '!' . ltrim($re, '|') . '!U';
}
return preg_replace( $this->_esc_desc_re, '', $desc );
}
}
<file_sep><?php namespace Chitanka\LibBundle\Service;
use Symfony\Component\HttpKernel\Exception\NotFoundHttpException;
class WikiEngine {
public static function markdownToHtml($markdownContent) {
return Markdown::defaultTransform($markdownContent);
}
private $wikiPath;
private $repo;
public function __construct($wikiPath) {
$this->wikiPath = $wikiPath;
}
public function getPage($filename, $withAncestors = true) {
$filename = $this->sanitizeFileName($filename);
try {
list($metadata, $content) = $this->getPageSections($filename);
} catch (NotFoundHttpException $ex) {
$metadata = '';
$content = null;
}
$ancestors = $withAncestors ? $this->getAncestors($filename) : array();
$page = new WikiPage($filename, $content, $metadata, $ancestors);
return $page;
}
public function getAncestors($filename) {
$ancestors = array();
if (strpos($filename, '/') !== false) {
$ancestorNames = explode('/', $filename);
array_pop($ancestorNames);
$currentAncestorName = '';
foreach ($ancestorNames as $ancestorName) {
$currentAncestorName .= '/'.$ancestorName;
$ancestors[] = $this->getPage($currentAncestorName, false);
}
}
return $ancestors;
}
public function savePage($editSummary, $filename, $content, $title = null, $author = null) {
$fullpath = $this->getFullPath($filename);
$title = $title ? trim($title) : $filename;
$content = trim($content) . "\n";
$fullContent = "Title: $title\n\n$content";
if (!file_exists($dir = dirname($fullpath))) {
mkdir($dir, 0755, true);
}
file_put_contents($fullpath, $fullContent);
$editSummary = '['.$this->sanitizeFileName($filename).'] '.$editSummary;
$this->repo()->stage($fullpath)->commitWithAuthor($editSummary, $author);
}
public function getHistory($filename) {
$commits = $this->repo()->getLog('master', $this->getFullPath($filename), null);
return $commits;
}
protected function getPageSections($filename) {
$fullpath = $this->getFullPath($filename);
if (!file_exists($fullpath)) {
throw new NotFoundHttpException("Page '$filename' does not exist.");
}
$sections = explode("\n\n", file_get_contents($fullpath), 2);
if (count($sections) < 2) {
array_unshift($sections, '');
}
return $sections;
}
protected function getFullPath($filename) {
return $this->wikiPath .'/'. $this->sanitizeFileName($filename);
}
protected function sanitizeFileName($filename) {
$sanitizedFilename = strtr(strtolower($filename), array(
'_' => '-',
));
$sanitizedFilename = preg_replace('#[^a-z\d/.-]#', '', $sanitizedFilename);
$sanitizedFilename = ltrim($sanitizedFilename, '/.');
if (strpos($sanitizedFilename, '.') === false) {
$sanitizedFilename .= '.md';
}
return $sanitizedFilename;
}
/** @return GitRepository */
protected function repo() {
return $this->repo ?: $this->repo = new GitRepository($this->wikiPath);
}
}
class WikiPage {
private $name;
private $format = 'md';
private $content;
private $metadata;
private $ancestors = array();
public function __construct($name, $content, $metadata, $ancestors) {
$this->name = $name;
if (strpos($this->name, '.') !== false) {
list($this->name, $this->format) = explode('.', $this->name, 2);
}
$this->content = $content;
$this->metadata = $metadata;
$this->ancestors = $ancestors;
}
public function exists() {
return $this->content !== null;
}
public function getContent() {
return $this->content;
}
public function getContentHtml() {
if ($this->format == 'md') {
return WikiEngine::markdownToHtml($this->content);
}
return $this->content;
}
public function getName() {
return $this->name;
}
public function getFormat() {
return $this->format;
}
public function getTitle() {
return $this->getMetadata('Title', $this->name);
}
public function getAncestors() {
return $this->ancestors;
}
public function hasAncestors() {
return count($this->ancestors);
}
protected function getMetadata($key, $default = null) {
if (preg_match("/$key: (.+)/", $this->metadata, $matches)) {
return trim($matches[1]);
}
return $default;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller;
use Chitanka\LibBundle\Form\TextCommentForm;
class TextCommentController extends Controller
{
public function indexAction($page, $_format)
{
$this->responseAge = 0;
if ($_format == 'rss') {
$limit = 10;
$this->view = array(
'comments' => $this->getTextCommentRepository()->getLatest($limit),
);
return $this->display("index.$_format");
}
$_REQUEST['page'] = $page;
return $this->legacyPage('Comment');
}
public function listForTextAction($id)
{
$this->responseAge = 0;
$text = $this->getTextRepository()->find($id);
$_REQUEST['id'] = $id;
// $form = TextCommentForm::create($this->get('form.context'), 'comment', array('em' => $this->getEntityManager()));
//
// $form->bind($this->get('request'));
//
// if ($form->isValid()) {
// $form->process();
// }
//
$this->view = array(
'text' => $text,
// 'comments' => $this->getTextCommentRepository()->getByText($text),
// 'form' => $form,
);
// RSS
// $_REQUEST['obj'] = 'comment';
// $_REQUEST['limit'] = 10;
// $_REQUEST[self::FF_TEXT_ID] = $this->textId;
return $this->legacyPage('Comment', 'TextComment:text_comments');
// return $this->display('text_comments');
}
public function latestAction($limit = 5)
{
$this->view = array(
'comments' => $this->getTextCommentRepository()->getLatest($limit),
);
return $this->display('latest_comments');
}
}
<file_sep><?php
if (in_array(@$_SERVER['REMOTE_ADDR'], array('127.0.0.1', '::1'))) {
apc_clear_cache();
apc_clear_cache('user');
apc_clear_cache('opcode');
echo '1';
} else {
die('boo');
}
<file_sep><?php
namespace Chitanka\LibBundle\Entity;
use Doctrine\ORM\Mapping as ORM;
use Symfony\Component\Security\Core\User\UserInterface;
use Symfony\Bridge\Doctrine\Validator\Constraints\UniqueEntity;
#use FOS\UserBundle\Entity\User as BaseUser;
use Chitanka\LibBundle\Legacy\Setup;
use Chitanka\LibBundle\Legacy\Legacy;
/**
* @ORM\Entity(repositoryClass="Chitanka\LibBundle\Entity\UserRepository")
* @ORM\HasLifecycleCallbacks
* @ORM\Table(name="user",
* indexes={
* @ORM\Index(name="realname_idx", columns={"realname"}),
* @ORM\Index(name="email_idx", columns={"email"})}
* )
* @UniqueEntity(fields="username")
*/
class User /*extends BaseUser*/ implements UserInterface
{
/**
* @ORM\Column(type="integer")
* @ORM\Id
* @ORM\GeneratedValue
*/
private $id;
/**
* @var string $username
* @ORM\Column(type="string", length=100, unique=true)
*/
private $username = '~anon';
/**
* @var string $realname
* @ORM\Column(type="string", length=120, nullable=true)
*/
private $realname;
/**
* @var string $password
* @ORM\Column(type="string", length=40)
*/
private $password;
/**
* @var string
* @ORM\Column(type="string", length=100, nullable=true)
*/
private $algorithm;
/**
* @var string $newpassword
* @ORM\Column(type="string", length=40, nullable=true)
*/
private $newpassword;
/**
* @var string $email
* @ORM\Column(type="string", length=100, nullable=true)
*/
private $email;
/**
* @var boolean $allowemail
* @ORM\Column(type="boolean")
*/
private $allowemail = false;
/**
* @var array
* @ORM\Column(type="array")
*/
private $groups = array();
static private $groupList = array(
'user',
'text-label',
'workroom-supervisor',
'workroom-admin',
'admin',
'god',
);
/**
* @var boolean $news
* @ORM\Column(type="boolean")
*/
private $news = false;
/**
* @var array $opts
* @ORM\Column(type="array")
*/
private $opts = array();
/**
* @var integer $login_tries
* @ORM\Column(type="smallint")
*/
private $login_tries = 0;
/**
* @var datetime $registration
* @ORM\Column(type="datetime")
*/
private $registration;
/**
* @var datetime $touched
* @ORM\Column(type="datetime")
*/
private $touched;
/**
* Token used to access private user lists, e.g. read texts
*
* @var string
* @ORM\Column(type="string", length=40, unique=true)
*/
private $token;
/** FIXME doctrine:schema:create does not allow this relation
* @var array
* @ORM\ManyToMany(targetEntity="Text", mappedBy="readers")
* @ORM\JoinTable(name="user_text_read",
* joinColumns={@ORM\JoinColumn(name="user_id")},
* inverseJoinColumns={@ORM\JoinColumn(name="text_id")})
*/
private $readTexts;
/**
* @ORM\OneToMany(targetEntity="Bookmark", mappedBy="user")
*/
private $bookmarks;
public function __construct()
{
$this->touch();
}
public function getId() { return $this->id; }
public function setUsername($username) { $this->username = $username; }
public function getUsername() { return $this->username; }
public function setRealname($realname) { $this->realname = $realname; }
public function getRealname() { return $this->realname; }
public function getName()
{
return $this->getRealname() ?: $this->getUsername();
}
public function setPassword($password, $plain = true)
{
$this->password = $plain ? $this->encodePasswordDB($password) : $password;
$this->algorithm = null;
}
public function getPassword() { return $this->password; }
public function getSalt() { return $this->username; }
public function setNewpassword($password, $plain = true)
{
$this->newpassword = $plain ? $this->encodePasswordDB($password) : $password;
}
public function getNewpassword() { return $this->newpassword; }
public function setAlgorithm($algorithm) { $this->algorithm = $algorithm; }
public function getAlgorithm() { return $this->algorithm; }
public function setEmail($email) { $this->email = $email; }
public function getEmail() { return $this->email; }
public function setAllowemail($allowemail) { $this->allowemail = $allowemail; }
public function getAllowemail() { return $this->allowemail; }
public function allowsEmail() { return $this->allowemail; }
public function setGroups($groups) { $this->groups = $groups; }
public function getGroups() { return $this->groups; }
public function addGroup($group) { $this->groups[] = $group; }
public function addGroups($groupsToAdd) {
$this->groups = array_merge($this->groups, $groupsToAdd);
}
public function removeGroups($groupsToRemove) {
$this->groups = array_diff($this->groups, $groupsToRemove);
}
public function inGroup($groups, $orGod = true) {
$groups = (array) $groups;
if ($orGod) {
$groups[] = 'god';
}
foreach ($groups as $group) {
if (in_array($group, $this->groups)) {
return true;
}
}
return false;
}
public function setNews($news) { $this->news = $news; }
public function getNews() { return $this->news; }
public function setOpts($opts) { $this->opts = $opts; }
public function getOpts() { return $this->opts; }
public function setLoginTries($loginTries) { $this->login_tries = $loginTries; }
public function getLoginTries() { return $this->login_tries; }
public function incLoginTries()
{
$this->login_tries++;
}
public function setRegistration($registration) { $this->registration = $registration; }
public function getRegistration() { return $this->registration; }
public function setTouched($touched) { $this->touched = $touched; }
public function getTouched() { return $this->touched; }
public function setToken($token) { $this->token = $token; }
public function getToken() { return $this->token; }
public function addBookmark($bookmark) { $this->bookmarks[] = $bookmark; }
public function getExtraStylesheets()
{
return isset($this->opts['css']) ? $this->opts['css'] : array();
}
public function getExtraJavascripts()
{
return isset($this->opts['js']) ? $this->opts['js'] : array();
}
public function __toString()
{
return $this->getUsername();
}
public function getRoles()
{
#return array();
return array_map(function($group){
return 'ROLE_' . strtoupper($group);
}, $this->getGroups());
}
public function canPutTextLabel() {
return $this->inGroup('text-label');
}
public function eraseCredentials()
{
$this->id = -1;
$this->username = '~anon';
$this->password = <PASSWORD>;
$this->logout();
}
public function equals(UserInterface $account)
{
return $account->getUsername() === $this->username;
}
public function isAnonymous()
{
return is_null($this->id);
}
public function isAuthenticated()
{
return ! $this->isAnonymous();
}
public function toArray()
{
return array(
'id' => $this->id,
'username' => $this->username,
'realname' => $this->realname,
'password' => $<PASSWORD>,
'algorithm' => $this->algorithm,
'newpassword' => $this->newpassword,
'email' => $this->email,
'allowemail' => $this->allowemail,
'groups' => $this->groups,
'news' => $this->news,
'opts' => $this->opts,
'login_tries' => $this->login_tries,
'registration' => $this->registration,
'touched' => $this->touched,
'token' => $this->token,
);
}
/** @ORM\PrePersist */
public function preInsert()
{
$this->registration = new \DateTime;
$this->token = $this->generateToken();
$this->groups[] = 'user';
}
/** @ORM\PreUpdate */
public function preUpdate()
{
if (empty($this->email)) {
$this->allowemail = false;
}
if (empty($this->opts['css']['custom'])) {
unset($this->opts['css']['custom']);
}
if (empty($this->opts['js']['custom'])) {
unset($this->opts['js']['custom']);
}
}
static public $defOptions = array(
'skin' => 'orange',
'nav' => 'right', // navigation position
'css' => array(),
'js' => array(),
'news' => false, // receive montly newsletter
'allowemail' => true, // allow email from other users
'dlformat' => 'txt.zip', // default format for batch downloading
);
protected
$rights = array(), $options = array(),
$dlTexts = array(),
$isHuman = false;
/** Cookie name for the user ID */
const UID_COOKIE = 'mli';
/** Cookie name for the encrypted user password */
const TOKEN_COOKIE = 'mlt';
/** Cookie name for the user options */
const OPTS_COOKIE = 'mlo';
/** Session key for the User object */
const U_SESSION = 'user';
static public function initUser($repo)
{
if ( self::isSetSession() ) {
$user = self::newFromSession();
} else if ( self::isSetCookie() ) {
$user = self::newFromCookie($repo);
$_SESSION[self::U_SESSION] = $user->toArray();
} else {
$user = new User;
}
return $user;
}
/** @return bool */
static protected function isSetSession() {
return isset($_SESSION[self::U_SESSION]);
}
/** @return bool */
static protected function isSetCookie() {
return isset($_COOKIE[self::UID_COOKIE]) && isset($_COOKIE[self::TOKEN_COOKIE]);
}
static protected function newFromArray($data)
{
$user = new User;
foreach ($data as $field => $value) {
$user->$field = $value;
}
return $user;
}
/** @return User */
static protected function newFromSession() {
return self::newFromArray($_SESSION[self::U_SESSION]);
}
/** @return User */
static protected function newFromCookie($repo) {
$user = $repo->find($_COOKIE[self::UID_COOKIE]);
if ( $user->validateToken($_COOKIE[self::TOKEN_COOKIE], $user->getPassword()) ) {
$user->touch();
$repo->persist($user);
return $user;
}
return new User;
}
static public function randomPassword($passLength = 16) {
$chars = 'abcdefghijkmnopqrstuvwxyz123456789';
$max = strlen($chars) - 1;
$password = '';
for ($i = 0; $i < $passLength; $i++) {
$password .= $chars{mt_rand(0, $max)};
}
return $password;
}
/**
Check a user name for invalid chars.
@param string $username
@return mixed true if the user name is ok, or the invalid character
*/
static public function isValidUsername($username) {
$forbidden = '/+#"(){}[]<>!?|~*$&%=\\';
$len = strlen($forbidden);
for ($i=0; $i < $len; $i++) {
if ( strpos($username, $forbidden{$i}) !== false ) {
return $forbidden{$i};
}
}
return true;
}
static public function getDataByName($username) {
return self::getData( array('username' => $username) );
}
static public function getDataById($userId) {
return self::getData( array('id' => $userId) );
}
static public function getData($dbkey) {
$db = Setup::db();
$res = $db->select(DBT_USER, $dbkey);
if ( $db->numRows($res) == 0) return array();
return $db->fetchAssoc($res);
}
static public function getGroupList()
{
return self::$groupList;
}
public function showName() {
return empty($this->realname) ? $this->username : $this->realname;
}
public function userName() {
return $this->username;
}
public function set($field, $val) {
if ( !isset($this->$field) ) return;
$this->$field = $val;
}
public function options() {
return $this->opts;
}
public function option($opt, $default = '') {
return isset($this->opts[$opt]) ? $this->opts[$opt] : $default;
}
public function setOption($name, $val) {
$this->opts[$name] = $val;
}
public function canExecute($action) {
return true;
}
public function isGod() {
return $this->inGroup('god');
}
public function isHuman() {
return $this->isHuman;
}
public function setIsHuman($isHuman) {
$this->isHuman = $isHuman;
}
/**
* Encode a password in order to save it in the database.
*
* @param string $password
* @return string Encoded password
*/
public function encodePasswordDB($plainPassword)
{
return sha1(str_repeat($plainPassword . $this->username, 2));
}
/**
* Encode a password in order to save it in a cookie.
*
* @param string $password
* @param bool $plainpass Is this a real password or one already stored
* encoded in the database
* @return string Encoded password
*/
public function encodePasswordCookie($password, $plainpass = true)
{
if ($plainpass) {
$password = $this->encodePasswordDB($password);
}
return Legacy::sha1_loop(str_repeat($password, 10), 10);
}
/**
* Validate an entered password.
* Encodes an entered password and compares it to the password from the database.
*
* @param string $inputPass The password from the input
* @param string $dbPass The password stored in the database
* @return bool
*/
public function validatePassword($inputPass)
{
if (empty($this->algorithm)) {
$encodedPass = $this->encodePasswordDB($inputPass);
} else {
eval('$encodedPass = ' . preg_replace('/\$\w+/', "'$inputPass'", $this->algorithm) . ';');
}
return strcmp($encodedPass, $this->password) === 0;
}
public function validateNewPassword($inputPass)
{
return strcmp($this->encodePasswordDB($inputPass), $this->newpassword) === 0;
}
/**
* Validate a token from a cookie.
* Properly encodes the password from the database and compares it to the token.
*
* @param string $cookieToken The token from the cookie
* @param string $dbPass The password stored in the database
* @return bool
*/
public function validateToken($cookieToken, $dbPass) {
$enc = $this->encodePasswordCookie($dbPass, false);
return strcmp($enc, $cookieToken) === 0;
}
public function activateNewPassword()
{
$this->setPassword($this->getNewpassword(), true);
}
public function generateToken()
{
return strtoupper(sha1(str_repeat(uniqid() . $this->username, 2)));
}
public function login($remember = false)
{
// delete a previously generated new password, login_tries
$this->setNewpassword(null, false);
$this->setLoginTries(0);
$this->touch();
$_COOKIE[self::UID_COOKIE] = $this->getId();
$_COOKIE[self::TOKEN_COOKIE] = $this->encodePasswordCookie($this->getPassword(), false);
$cookieExpire = $remember ? null /* default */ : 0 /* end of session */;
$request = Setup::request();
$request->setCookie(self::UID_COOKIE, $_COOKIE[self::UID_COOKIE], $cookieExpire);
$request->setCookie(self::TOKEN_COOKIE, $_COOKIE[self::TOKEN_COOKIE], $cookieExpire);
return $_SESSION[self::U_SESSION] = $this->toArray();
}
public function logout() {
unset($_SESSION[self::U_SESSION]);
unset($_COOKIE[self::UID_COOKIE]);
unset($_COOKIE[self::TOKEN_COOKIE]);
$request = Setup::request();
$request->deleteCookie(self::UID_COOKIE);
$request->deleteCookie(self::TOKEN_COOKIE);
}
public function touch()
{
$this->setTouched(new \DateTime);
}
public function updateSession() {
$_SESSION[self::U_SESSION] = $this->toArray();
}
static public function packOptions( $options ) {
return serialize($options);
}
static public function unpackOptions( $opts_data ) {
if ( ! empty($opts_data) ) {
return unserialize($opts_data);
}
return array();
}
public function getSkinPreference()
{
return array(
'skin' => $this->option('skin', 'orange'),
'menu' => $this->option('nav', 'right'),
);
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller\Admin;
class UserTextContribController extends CRUDController {
}
<file_sep><?php
namespace Chitanka\LibBundle\Entity;
/**
*
*/
class SiteNoticeRepository extends EntityRepository {
public function findForFrontPage() {
return $this->findBy(array('isForFrontPage' => true));
}
public function getGlobalRandom() {
return $this->getRandom('e.isActive = 1 AND e.isForFrontPage = 0');
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Legacy;
use Chitanka\LibBundle\Entity\User;
class SendNewPasswordPage extends MailPage {
protected
$action = 'sendNewPassword';
public function __construct($fields) {
parent::__construct($fields);
$this->title = 'Изпращане на нова парола';
$this->username = $this->request->value('username');
}
protected function processSubmission() {
$key = array('username' => $this->username);
$res = $this->db->select(DBT_USER, $key, 'email');
$data = $this->db->fetchAssoc($res);
if ( empty($data) ) {
$this->addMessage("Не съществува потребител с име <strong>$this->username</strong>.", true);
return $this->buildContent();
}
extract($data);
if ( empty($email) ) {
$this->addMessage("За потребителя <strong>$this->username</strong> не е посочена електронна поща.", true);
return $this->buildContent();
}
$this->mailToName = $this->username;
$this->mailToEmail = $email;
$this->newPassword = User::randomPassword();
$user = $this->controller->getRepository('User')->findOneBy(array('username' => $this->username));
$user->setNewpassword($this->newPassword);
$em = $this->controller->getEntityManager();
$em->persist($user);
$em->flush();
$this->mailSubject = "Нова парола за $this->sitename";
$loginurl = $this->controller->generateUrl('login');
$this->mailSuccessMessage = "Нова парола беше изпратена на електронната поща на <strong>$this->username</strong>. Моля, <a href=\"$loginurl\">влезте отново</a>, след като я получите.";
$this->mailFailureMessage = 'Изпращането на новата парола не сполучи.';
return parent::processSubmission();
}
protected function makeForm() {
$username = $this->out->textField('username', '', $this->username, 25, 255, 2);
$submit = $this->out->submitButton('Изпращане на нова парола', '', 3);
return <<<EOS
<p>Чрез долния формуляр можете да поискате нова парола за влизане в <em>$this->sitename</em>, ако сте забравили сегашната си. Такава обаче може да ви бъде изпратена само ако сте посочили валидна електронна поща в потребителските си данни.</p>
<form action="" method="post">
<fieldset>
<legend>Нова парола</legend>
<label for="username">Потребителско име:</label>
$username
$submit
</fieldset>
</form>
EOS;
}
protected function makeMailMessage() {
return <<<EOS
Здравейте!
Някой (най-вероятно вие) поиска да ви изпратим нова парола за
влизане в $this->sitename (http://chitanka.info). Ако все пак
не сте били вие, можете да не обръщате внимание на това писмо и да
продължите да ползвате сегашната си парола.
Новата ви парола е {$this->newPassword}
След като влезете с нея в $this->sitename, е препоръчително да я
смените с някоя по-лесно запомняща се, за да не се налага пак да
прибягвате до функцията „Изпращане на нова парола“. ;-)
$this->sitename
EOS;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Legacy;
use Chitanka\LibBundle\Util\File;
class CacheManager
{
const ONEDAYSECS = 86400; // 60*60*24
static private
$cacheDir = 'cache/',
$dlDir = 'cache/dl/',
$zipDir = 'zip/',
/** Time to Live for download cache (in hours) */
$dlTtl = 168;
/**
Tells whether a given cache file exists.
If file age is given, older than that files are discarded.
@param $action Page action
@param $id File ID
@param $age File age in days
*/
static public function cacheExists($action, $id, $age = null)
{
$file = self::getPath($action, $id);
if ( ! file_exists($file) ) {
return false;
}
if ( is_null($age) ) {
return true;
}
return (time() - filemtime($file)) < $age * self::ONEDAYSECS;
}
static public function getCache($action, $id, $compressed = false) {
$c = file_get_contents( self::getPath($action, $id) );
return $compressed ? gzinflate($c) : $c;
}
static public function setCache($action, $id, $content, $compressed = false) {
$file = self::getPath($action, $id);
File::myfile_put_contents($file, $compressed ? gzdeflate($content) : $content);
return $content;
}
static public function clearCache($action, $id) {
$file = self::getPath($action, $id);
return file_exists($file) ? unlink($file) : true;
}
static public function dlCacheExists($id, $ext = '') {
return file_exists( self::getDlCachePath($id, $ext) );
}
static public function getDlCache($id, $ext = '') {
$file = self::getDlCachePath($id, $ext);
touch($file); // mark it as fresh
return file_get_contents($file);
}
static public function setDlCache($id, $content, $ext = '') {
return File::myfile_put_contents(self::getDlCachePath($id, $ext), $content);
}
static public function clearDlCache($id) {
$file = self::getDlCachePath($id);
@unlink($file . '.fbi');
@unlink($file . '.fb2');
@unlink($file . '.txt');
self::clearDl($id);
self::clearMirrorCache($id);
return file_exists($file) ? unlink($file) : true;
}
static public function clearMirrorCache($id)
{
foreach (Setup::setting('mirror_sites') as $mirror) {
$url = sprintf('%s/clearCache?texts=%d', $mirror, $id);
Legacy::getFromUrl($url);
}
}
static public function getDlFile($fname) {
$file = self::$dlDir . $fname;
// commented, file can be non-existant
#touch($file); // mark it as fresh
return $file;
}
static public function setDlFile($fname, $fcontent) {
return File::myfile_put_contents(self::$dlDir . $fname, $fcontent);
}
static public function dlFileExists($fname) {
return file_exists(self::$dlDir . $fname);
}
/**
Deletes all download files older than the time to live.
*/
static public function deleteOldDlFiles()
{
// disable until synced with the database
return;
$thresholdTime = time() - self::$dlTtl * 3600;
$dh = opendir(self::$dlDir);
if (!$dh) return;
while (($file = readdir($dh)) !== false) {
if ( $file[0] == '.' ) { continue; }
$fullname = self::$dlDir . $file;
if (filemtime($fullname) < $thresholdTime) {
unlink($fullname);
}
}
closedir($dh);
}
static public function getPath($action, $id) {
$subdir = $action . '/';
settype($id, 'string');
$subsubdir = $id[0] . '/' . $id[1] . '/' . $id[2] . '/';
return self::$cacheDir . $subdir . $subsubdir . $id;
}
static public function getDlCachePath($id, $ext = '') {
return self::$cacheDir . self::$zipDir . Legacy::makeContentFilePath($id) . $ext;
}
static public function getDl($textIds, $format = '')
{
return self::getDlFileByHash( self::getHashForTextIds($textIds, $format) );
}
static public function setDl($textIds, $file, $format = '')
{
$db = Setup::db();
$pk = self::getHashForTextIds($textIds, $format);
$db->insert(DBT_DL_CACHE, array(
"id = $pk",
'file' => $file,
), true, false);
foreach ( (array) $textIds as $textId ) {
$db->insert(DBT_DL_CACHE_TEXT, array(
"dc_id = $pk",
'text_id' => $textId,
), true, false);
}
return $file;
}
static public function clearDl($textIds)
{
$db = Setup::db();
$dctKey = array(
'text_id' => is_array($textIds) ? array('IN', $textIds) : $textIds
);
$hashes = $db->getFieldsMulti(DBT_DL_CACHE_TEXT, $dctKey, 'dc_id');
if ( ! empty($hashes) ) {
self::clearDlFiles($hashes);
$db->delete(DBT_DL_CACHE, array('id' => array('IN', $hashes)));
$db->delete(DBT_DL_CACHE_TEXT, array('dc_id' => array('IN', $hashes)));
}
}
static protected function clearDlFiles($hashes)
{
$files = Setup::db()->getFieldsMulti(DBT_DL_CACHE, array('id' => array('IN', $hashes)), 'file');
foreach ($files as $file) {
self::clearDlFile($file);
}
}
static protected function clearDlFile($file)
{
$file = self::getDlFile($file);
if ( file_exists($file) ) {
return unlink($file);
}
return true;
}
static public function getDlFileByHash($hash)
{
return Setup::db()->getFields(DBT_DL_CACHE, array("id = $hash"), 'file');
}
static protected function getHashForTextIds($textIds, $format = '')
{
if ( is_array($textIds) ) {
$textIds = implode(',', $textIds);
}
return '0x' . substr(md5($textIds . $format), 0, 16);
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Command;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Output\OutputInterface;
use Chitanka\LibBundle\Entity\Person;
class UpdatePersonsFromWikiDbCommand extends CommonDbCommand
{
protected function configure()
{
parent::configure();
$this
->setName('db:update-persons-from-wiki')
->setDescription('Update persons from wiki data')
->setHelp(<<<EOT
The <info>db:update-persons-from-wiki</info> command reads data from the wiki and updates or adds new person entries.
EOT
);
}
/**
* Executes the current command.
*
* @param InputInterface $input An InputInterface instance
* @param OutputInterface $output An OutputInterface instance
*
* @return integer 0 if everything went fine, or an error code
*
* @throws \LogicException When this abstract class is not implemented
*/
protected function execute(InputInterface $input, OutputInterface $output)
{
$this->em = $this->getContainer()->get('doctrine.orm.default_entity_manager');
$this->output = $output;
$this->errors = array();
$this->processWikiPage('Работно ателие/Нови автори');
$output->writeln('Done.');
}
protected function processWikiPage($pageName)
{
$this->output->writeln('Fetching and processing wiki content...');
$wikiPage = $this->_wikiBot()->fetch_page($pageName);
if (preg_match('/== Готови автори ==(.+)== За попълване ==/ms', $wikiPage->text, $m)) {
$personTemplates = trim($m[1]);
if ($personTemplates && $this->updatePersons($personTemplates)) {
$errors = implode("\n\n", $this->errors);
$wikiPage->text = preg_replace('/(== Готови автори ==\n).+(\n== За попълване ==)/ms', "$1$errors\n$2", $wikiPage->text);
$this->_wikiBot()->submit_page($wikiPage, '/* Готови автори */ пренасяне в базата на библиотеката');
}
}
}
protected function updatePersons($wikiContent)
{
$persons = $this->_getPersonsDataFromWikiContent($wikiContent);
$this->output->writeln('Updating persons...');
foreach ($persons as $personData) {
if ($personData['slug'] || $personData['name']) {
$person = $this->createPerson($personData);
if ($this->isNewPersonWithTakenSlug($person)) {
$this->errors[] = "При $personData[name] се генерира идентификатор ({$person->getSlug()}), който вече присъства в базата.";
continue;
}
$this->em->persist($person);
try {
$this->em->flush();
} catch (\PDOException $e) {
$this->errors[] = $e->getMessage();
}
}
}
return count($persons);
}
protected function createPerson($data)
{
if ($data['slug']) {
$person = $this->em->getRepository('LibBundle:Person')->getBySlug($data['slug']);
if ( ! $person) {
$person = new Person;
$person->setSlug($data['slug']);
}
} else {
$person = new Person;
}
if ( ! empty($data['orig_name'])) $person->setOrigName($data['orig_name']);
if ( ! empty($data['name'])) $person->setName($data['name']);
if ( ! empty($data['real_name'])) $person->setRealName($data['real_name']);
if ( ! empty($data['oreal_name'])) $person->setOrealName($data['oreal_name']);
if ( ! empty($data['country'])) $person->setCountry($data['country']);
if ( ! empty($data['info'])) $person->setInfo($data['info']);
return $person;
}
protected function isNewPersonWithTakenSlug($person)
{
return !$person->getId() && $this->em->getRepository('LibBundle:Person')->getBySlug($person->getSlug());
}
private $_wikiBot;
private function _wikiBot()
{
if ($this->_wikiBot == null) {
error_reporting(E_WARNING);
require_once $this->getContainer()->getParameter('kernel.root_dir') . '/../vendor/apibot/apibot.php';
$this->_wikiBot = new \Apibot($logins['chitanka'], array('dump_mode' => 0));
}
return $this->_wikiBot;
}
private function _getPersonsDataFromWikiContent($wikiContent)
{
$templates = $this->_getPersonTemplatesFromWikiContent($wikiContent);
$persons = array();
foreach ($templates as $template) {
$persons[] = $this->_getPersonDataFromWikiContent($template);
}
return $persons;
}
private function _getPersonDataFromWikiContent($template)
{
$wikiVars = $this->_getPersonVarsFromWikiContent($template);
return array(
'slug' => @$wikiVars['идентификатор'],
'name' => @$wikiVars['име'],
'orig_name' => @$wikiVars['оригинално име'],
'real_name' => @$wikiVars['истинско име'],
'oreal_name' => @$wikiVars['оригинално истинско име'],
'country' => @$wikiVars['държава'],
'info' => str_replace('_', ' ', @$wikiVars['уики']),
);
}
private function _getPersonVarsFromWikiContent($template)
{
$wikiVars = array();
foreach (explode("\n", trim($template)) as $row) {
list($wikiVar, $value) = explode('=', ltrim($row, '| '));
$wikiVars[trim($wikiVar)] = trim($value);
}
return $wikiVars;
}
private function _getPersonTemplatesFromWikiContent($wikiContent)
{
if (preg_match_all('|\{\{Нов автор(.+)\}\}|Ums', $wikiContent, $matches)) {
return $matches[1];
}
return array();
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Command;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Input\InputArgument;
use Symfony\Component\Console\Output\OutputInterface;
class ChangeUserGroupsCommand extends CommonDbCommand {
protected function configure() {
parent::configure();
$this->setName('sys:change-user-groups')
->setDescription('Change groups for given users')
->addArgument('users', InputArgument::REQUIRED, 'Users which groups should be modified (comma separated)')
->addArgument('groups', InputArgument::REQUIRED, 'Groups to add or remove (comma separated). Ex.: "+workroom-admin,-admin" adds the user to "workroom-admin" and removes him from "admin"')
;
}
/** @inheritdoc */
protected function execute(InputInterface $input, OutputInterface $output) {
$userNames = $this->readUsers($input);
list($groupsToAdd, $groupsToRemove) = $this->readGroups($input);
$users = $this->getUserRepository()->findByUsernames($userNames);
$this->modifyUserGroups($users, $groupsToAdd, $groupsToRemove);
$output->writeln("Done.");
}
protected function modifyUserGroups($users, $groupsToAdd, $groupsToRemove) {
$em = $this->getEntityManager();
foreach ($users as $user) {
$user->addGroups($groupsToAdd);
$user->removeGroups($groupsToRemove);
$em->persist($user);
}
$em->flush();
}
protected function readUsers(InputInterface $input) {
return array_map('trim', explode(',', $input->getArgument('users')));
}
protected function readGroups(InputInterface $input) {
$groupsToAdd = $groupsToRemove = array();
foreach (array_map('trim', explode(',', $input->getArgument('groups'))) as $groupIdent) {
switch ($groupIdent[0]) {
case '-':
$groupsToRemove[] = substr($groupIdent, 1);
break;
case '+':
$groupsToAdd[] = substr($groupIdent, 1);
break;
default:
$groupsToAdd[] = $groupIdent;
}
}
return array($groupsToAdd, $groupsToRemove);
}
/** @return \Chitanka\LibBundle\Entity\UserRepository */
protected function getUserRepository() {
return $this->getRepository('User');
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Command;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Output\OutputInterface;
class UpdateCountsDbCommand extends CommonDbCommand
{
protected function configure()
{
parent::configure();
$this
->setName('db:update-counts')
->setDescription('Update some total counts in the database')
->setHelp(<<<EOT
The <info>db:update-counts</info> command updates some total counts in the database. For example number of texts by every label, or number of books by every category.
EOT
);
}
/**
* Executes the current command.
*
* @param InputInterface $input An InputInterface instance
* @param OutputInterface $output An OutputInterface instance
*
* @return integer 0 if everything went fine, or an error code
*
* @throws \LogicException When this abstract class is not implemented
*/
protected function execute(InputInterface $input, OutputInterface $output)
{
$this->updateCounts($output, $this->getContainer()->get('doctrine.orm.default_entity_manager'));
$output->writeln('Done.');
}
protected function updateCounts(OutputInterface $output, $em)
{
$this->updateTextCountByLabels($output, $em);
$this->updateTextCountByLabelsParents($output, $em);
$this->updateCommentCountByTexts($output, $em);
$this->updateBookCountByCategories($output, $em);
// disable for now, TODO fix pagination by parent categories
//$this->updateBookCountByCategoriesParents($output, $em);
}
}
<file_sep><?php
return array(
'library_main' => 'Основни',
'library_secondary' => 'Помощни',
'library_links' => 'Връзки',
);
<file_sep><?php
namespace Chitanka\LibBundle\Entity;
use Doctrine\ORM\Mapping as ORM;
/**
* @ORM\Entity
* @ORM\Table(name="book_text")
*/
class BookText extends Entity
{
/**
* @ORM\Column(type="integer")
* @ORM\Id
* @ORM\GeneratedValue(strategy="CUSTOM")
* @ORM\CustomIdGenerator(class="Chitanka\LibBundle\Doctrine\CustomIdGenerator")
*/
private $id;
/**
* @var integer $book
* @ORM\ManyToOne(targetEntity="Book", inversedBy="bookTexts")
*/
private $book;
/**
* @var integer $text
* @ORM\ManyToOne(targetEntity="Text", inversedBy="bookTexts")
*/
private $text;
/**
* @var integer $pos
* @ORM\Column(type="smallint", nullable=true)
*/
private $pos;
/**
* @var boolean $share_info
* @ORM\Column(type="boolean")
*/
private $share_info = true;
public function getId() { return $this->id; }
public function setBook($book) { $this->book = $book; }
public function getBook() { return $this->book; }
public function setText($text) { $this->text = $text; }
public function getText() { return $this->text; }
public function setPos($pos) { $this->pos = $pos; }
public function getPos() { return $this->pos; }
public function setShareInfo($shareInfo) { $this->share_info = $shareInfo; }
public function getShareInfo() { return $this->share_info; }
}
<file_sep><?php
namespace Chitanka\LibBundle;
use Symfony\Component\HttpKernel\Bundle\Bundle;
// use Symfony\Component\DependencyInjection\ContainerInterface;
// use Symfony\Component\DependencyInjection\ParameterBag\ParameterBagInterface;
class LibBundle extends Bundle
{
/**
* {@inheritdoc}
*/
public function getNamespace()
{
return __NAMESPACE__;
}
/**
* {@inheritdoc}
*/
public function getPath()
{
return strtr(__DIR__, '\\', '/');
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Entity;
use Doctrine\ORM\Mapping as ORM;
/**
* @ORM\Entity(repositoryClass="Chitanka\LibBundle\Entity\WorkEntryRepository")
* @ORM\Table(name="work_entry",
* indexes={
* @ORM\Index(name="title_idx", columns={"title"}),
* @ORM\Index(name="author_idx", columns={"author"}),
* @ORM\Index(name="status_idx", columns={"status"}),
* @ORM\Index(name="date_idx", columns={"date"})}
* )
*/
class WorkEntry extends Entity
{
/**
* @ORM\Column(type="integer")
* @ORM\Id
* @ORM\GeneratedValue(strategy="CUSTOM")
* @ORM\CustomIdGenerator(class="Chitanka\LibBundle\Doctrine\CustomIdGenerator")
*/
private $id;
/**
* @var integer $type
* @ORM\Column(type="smallint")
*/
private $type;
/**
* @var string $title
* @ORM\Column(type="string", length=100)
*/
private $title;
/**
* @var string $author
* @ORM\Column(type="string", length=100, nullable=true)
*/
private $author;
/**
* Year of publication on paper or in e-format
* @ORM\Column(type="smallint", nullable=true)
*/
private $pubYear;
/**
* Publisher of the book
* @ORM\Column(type="string", length=100, nullable=true)
*/
private $publisher;
/**
* @var integer $user
* @ORM\ManyToOne(targetEntity="User")
*/
private $user;
/**
* @var text $comment
* @ORM\Column(type="text")
*/
private $comment;
/**
* @var datetime $date
* @ORM\Column(type="datetime")
*/
private $date;
/**
* @var integer $status
* @ORM\Column(type="smallint")
*/
private $status = 0;
/**
* @var integer $progress
* @ORM\Column(type="smallint")
*/
private $progress = 0;
/**
* @var boolean $is_frozen
* @ORM\Column(type="boolean")
*/
private $is_frozen = false;
/**
* @var string $tmpfiles
* @ORM\Column(type="string", length=255, nullable=true)
*/
private $tmpfiles;
/**
* @var integer $tfsize
* @ORM\Column(type="smallint", nullable=true)
*/
private $tfsize;
/**
* @var string $uplfile
* @ORM\Column(type="string", length=255, nullable=true)
*/
private $uplfile;
/**
* Every user gets an automatic e-mail if his entry reaches some predefined
* period without updates. Here we track the date of the most recent notification.
* @ORM\Column(type="datetime", nullable=true)
*/
private $last_notification_date;
/**
* A status managed and seen only from the adminstrator
* @ORM\Column(type="string", length=100, nullable=true)
*/
private $admin_status;
/**
* A comment managed and seen only from the adminstrator
* @ORM\Column(type="text", nullable=true)
*/
private $admin_comment;
/**
* @var datetime
* @ORM\Column(type="datetime", nullable=true)
*/
private $deleted_at;
/**
* @ORM\OneToMany(targetEntity="WorkContrib", mappedBy="entry")
*/
private $contribs;
/**
* @ORM\OneToOne(targetEntity="Thread", inversedBy="workEntry")
*/
private $comment_thread;
public function __toString()
{
return $this->getTitle();
}
public function getId() { return $this->id; }
public function setType($type) { $this->type = $type; }
public function getType() { return $this->type; }
public function setTitle($title) { $this->title = $title; }
public function getTitle() { return $this->title; }
public function setAuthor($author) { $this->author = $author; }
public function getAuthor() { return $this->author; }
public function setPublisher($publisher) { $this->publisher = $publisher; }
public function getPublisher() { return $this->publisher; }
public function setPubYear($pubYear) { $this->pubYear = $pubYear; }
public function getPubYear() { return $this->pubYear; }
public function setUser($user) { $this->user = $user; }
/** @return User */
public function getUser() { return $this->user; }
public function setComment($comment) { $this->comment = $comment; }
public function getComment() { return $this->comment; }
public function setDate($date) { $this->date = $date; }
public function getDate() { return $this->date; }
public function setStatus($status) { $this->status = $status; }
public function getStatus() { return $this->status; }
public function setProgress($progress) { $this->progress = $progress; }
public function getProgress() { return $this->progress; }
public function setIsFrozen($isFrozen) { $this->is_frozen = $isFrozen; }
public function getIsFrozen() { return $this->is_frozen; }
public function setTmpfiles($tmpfiles) { $this->tmpfiles = $tmpfiles; }
public function getTmpfiles() { return $this->tmpfiles; }
public function setTfsize($tfsize) { $this->tfsize = $tfsize; }
public function getTfsize() { return $this->tfsize; }
public function setUplfile($uplfile) { $this->uplfile = $uplfile; }
public function getUplfile() { return $this->uplfile; }
public function setLastNotificationDate($date) { $this->last_notification_date = $date; }
public function getLastNotificationDate() { return $this->last_notification_date; }
public function setAdminStatus($admin_status) { $this->admin_status = $admin_status; }
public function getAdminStatus() { return $this->admin_status; }
public function setAdminComment($admin_comment) { $this->admin_comment = $admin_comment; }
public function getAdminComment() { return $this->admin_comment; }
public function isNotifiedWithin($interval)
{
if ($this->getLastNotificationDate() === null) {
return false;
}
return $this->getLastNotificationDate() > new \DateTime("-$interval");
}
public function setCommentThread(Thread $thread)
{
$this->comment_thread = $thread;
return $this;
}
public function getCommentThread() { return $this->comment_thread; }
public function getDeletedAt() { return $this->deleted_at; }
public function setDeletedAt($deleted_at) { $this->deleted_at = $deleted_at; }
public function delete()
{
$this->setDeletedAt(new \DateTime);
}
public function isDeleted()
{
return $this->deleted_at !== null;
}
public function getContribs() { return $this->contribs; }
public function getOpenContribs()
{
$openContribs = array();
foreach ($this->getContribs() as $contrib/*@var $contrib WorkContrib*/) {
if ( ! $contrib->isFinished() && ! $contrib->isDeleted()) {
$openContribs[] = $contrib;
}
}
return $openContribs;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Listener;
use Doctrine\ORM\Event\PreUpdateEventArgs;
use Chitanka\LibBundle\Entity\TextRating;
class DoctrineListener
{
public function preUpdate(PreUpdateEventArgs $eventArgs)
{echo '#####';exit;
if ($eventArgs->getEntity() instanceof TextRating) {
if ($eventArgs->hasChangedField('rating')) {
$text = $eventArgs->getEntity()->getText();
$text->updateAvgRating($eventArgs->getNewValue('rating'), $eventArgs->getOldValue('rating'));
$eventArgs->getEntityManager()->persist($text);
}
}
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Legacy;
class LsPage extends Page {
protected
$action = 'ls',
$maxSaveSize = 20000000,
$dirs = array(
// key => last_dir_level
'text' => 1,
'text-info' => 1,
'text-anno' => 1,
'cover' => 1,
'book' => 1,
'book-anno' => 1,
'book-info' => 1,
'book-img' => 2,
'book-cover' => 1,
'wiki' => 0,
'info' => 0,
'user' => 0,
'img' => 2,
'pic' => 2,
)
;
public function __construct() {
parent::__construct();
$this->title = 'Преглед на файлове';
$this->dir = $this->request->value('dir', 'text', 1);
$this->days = (float) $this->request->value('days', 7, 2);
$this->copy = (int) $this->request->value('copy', 0);
if ( $this->copy ) {
set_time_limit(600); // 10 минути за копиране на файлове
}
}
protected function buildContent() {
global $contentDirs;
$dirs = $this->dir == '*'
? $this->dirs
: array( $this->dir => $this->dirs[ $this->dir ] );
$o = '';
foreach ($dirs as $dir => $last_dir_level) {
$dir = $contentDirs[$dir];
$this->files = array();
$this->starttime = time() - $this->days * 24*60*60;
$this->processDir($dir, $last_dir_level);
ksort($this->files, SORT_NUMERIC);
$files = array_reverse($this->files, true);
$o .= "<h2>$dir</h2>";
foreach ($this->files as $mtime => $tfiles) {
$date = date('Y-m-d H:i:s', $mtime);
foreach ($tfiles as $file) {
$link = $this->out->link($this->rootd.'/'.$file, $file);
$o .= "$date $link\n";
}
}
}
return $this->makeForm() . '<pre>'. $o .'</pre>';
}
protected function processDir($dir, $last_dir_level = 1, $level = 1) {
$dir = rtrim($dir, '/');
$tfiles = scandir($dir);
foreach ($tfiles as $tfile) {
if ($tfile[0] == '.') { continue; }
$fullname = $dir .'/'. $tfile;
$mtime = filemtime($fullname);
if ( is_dir($fullname) ) {
if ( $level == $last_dir_level && $mtime < $this->starttime ) {
// there are no changed files in this directory, skip it
continue;
}
$this->processDir($fullname, $level + 1);
continue;
}
if ( $mtime > $this->starttime ) {
$this->files[$mtime][] = $fullname;
if ( $this->copy ) {
$destfile = './update'.strstr($fullname, '/');
File::mycopy($fullname, $destfile);
}
}
}
}
protected function splitCopyFile($srcfile, $destfile) {
$fp = fopen($srcfile, 'r');
$i = 1;
$cursize = 0;
$cont = '';
while ( !feof($fp) ) {
$line = fgets($fp);
$cursize += strlen($line);
$cont .= $line;
if ( $cursize > $this->maxSaveSize ) {
file_put_contents($destfile.'.'.$i, $cont);
$cont = '';
$cursize = 0;
$i++;
}
}
fclose($fp);
if ( !empty($cont) ) {
file_put_contents($destfile.'.'.$i, $cont);
}
}
protected function makeForm() {
return <<<EOS
<form action="" method="get">
<div>
Файловете, променени през последните
<input type="" id="days" name="days" size="2" value="$this->days" />
<label for="days">дни</label>
</div>
</form>
EOS;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Entity;
use Doctrine\ORM\Mapping as ORM;
use Doctrine\Common\Collections\ArrayCollection;
use Symfony\Component\Validator\Constraints as Assert;
use Symfony\Component\HttpFoundation\File\UploadedFile;
use Chitanka\LibBundle\Util\Char;
use Chitanka\LibBundle\Util\File;
use Chitanka\LibBundle\Util\Language;
use Chitanka\LibBundle\Util\String;
use Chitanka\LibBundle\Legacy\Legacy;
use Chitanka\LibBundle\Legacy\Setup;
use Chitanka\LibBundle\Legacy\SfbParserSimple;
use Sfblib_SfbConverter as SfbConverter;
use Sfblib_SfbToHtmlConverter as SfbToHtmlConverter;
use Sfblib_SfbToFb2Converter as SfbToFb2Converter;
/**
* @ORM\Entity(repositoryClass="Chitanka\LibBundle\Entity\TextRepository")
* @ORM\HasLifecycleCallbacks
* @ORM\Table(name="text",
* indexes={
* @ORM\Index(name="title_idx", columns={"title"}),
* @ORM\Index(name="subtitle_idx", columns={"subtitle"}),
* @ORM\Index(name="orig_title_idx", columns={"orig_title"}),
* @ORM\Index(name="orig_subtitle_idx", columns={"orig_subtitle"}),
* @ORM\Index(name="type_idx", columns={"type"}),
* @ORM\Index(name="lang_idx", columns={"lang"}),
* @ORM\Index(name="orig_lang_idx", columns={"orig_lang"})}
* )
*/
class Text extends BaseWork
{
/**
* @ORM\Column(type="integer")
* @ORM\Id
* @ORM\GeneratedValue(strategy="CUSTOM")
* @ORM\CustomIdGenerator(class="Chitanka\LibBundle\Doctrine\CustomIdGenerator")
*/
protected $id;
/**
* @var string $slug
* @ORM\Column(type="string", length=50)
*/
private $slug;
/**
* @var string $title
* @ORM\Column(type="string", length=255)
*/
private $title = '';
/**
* @var string $subtitle
* @ORM\Column(type="string", length=255, nullable=true)
*/
private $subtitle;
/**
* @var string $lang
* @ORM\Column(type="string", length=2)
*/
private $lang = 'bg';
/**
* @var integer $trans_year
* @ORM\Column(type="smallint", nullable=true)
*/
private $trans_year;
/**
* @var integer $trans_year2
* @ORM\Column(type="smallint", nullable=true)
*/
private $trans_year2;
/**
* @var string $orig_title
* @ORM\Column(type="string", length=255, nullable=true)
*/
private $orig_title;
/**
* @var string $orig_subtitle
* @ORM\Column(type="string", length=255, nullable=true)
*/
private $orig_subtitle;
/**
* @var string $orig_lang
* @ORM\Column(type="string", length=3)
*/
private $orig_lang;
/**
* @var integer $year
* @ORM\Column(type="smallint", nullable=true)
*/
private $year;
/**
* @var integer $year2
* @ORM\Column(type="smallint", nullable=true)
*/
private $year2;
/**
* @var integer $orig_license
* @ORM\ManyToOne(targetEntity="License")
*/
private $orig_license;
/**
* @var integer $trans_license
* @ORM\ManyToOne(targetEntity="License")
*/
private $trans_license;
/**
* @var string $type
* @ORM\Column(type="string", length=14)
*/
private $type;
/**
* @var integer $series
* @ORM\ManyToOne(targetEntity="Series", inversedBy="texts")
*/
private $series;
/**
* @var integer $sernr
* @ORM\Column(type="smallint", nullable=true)
*/
private $sernr;
/**
* @var integer $sernr2
* @ORM\Column(type="smallint", nullable=true)
*/
private $sernr2;
/**
* @var integer $headlevel
* @ORM\Column(type="smallint")
*/
private $headlevel = 0;
/**
* @var integer $size
* @ORM\Column(type="integer")
*/
private $size;
/**
* @var integer $zsize
* @ORM\Column(type="integer")
*/
private $zsize;
/**
* @var date
* @ORM\Column(type="date")
*/
private $created_at;
/**
* @var string
* @ORM\Column(type="string", length=1000, nullable=true)
*/
private $source;
/**
* @var integer $cur_rev
* @ORM\ManyToOne(targetEntity="TextRevision")
*/
private $cur_rev;
/**
* @var integer $dl_count
* @ORM\Column(type="integer")
*/
private $dl_count = 0;
/**
* @var integer $read_count
* @ORM\Column(type="integer")
*/
private $read_count = 0;
/**
* @var integer $comment_count
* @ORM\Column(type="integer")
*/
private $comment_count = 0;
/**
* @var float $rating
* @ORM\Column(type="float")
*/
private $rating = 0;
/**
* @var integer $votes
* @ORM\Column(type="integer")
*/
private $votes = 0;
/**
* @var boolean $has_anno
* @ORM\Column(type="boolean")
*/
private $has_anno = false;
/**
* @var boolean
* @ORM\Column(type="boolean")
*/
private $has_cover = false;
/*
* @var boolean
* @ORM\Column(type="boolean")
*/
private $has_title_note;
/**
* @var boolean
* @ORM\Column(type="boolean")
*/
private $is_compilation = false;
/**
* An extra note about the text
* @ORM\Column(type="string", length=100, nullable=true)
*/
private $note;
/**
* A notice if the content is removed
* @ORM\Column(type="text", nullable=true)
*/
private $removed_notice;
/**
* @var ArrayCollection
* @ORM\OneToMany(targetEntity="TextAuthor", mappedBy="text", cascade={"persist", "remove"}, orphanRemoval=true)
* @ORM\OrderBy({"pos" = "ASC"})
*/
private $textAuthors;
/**
* @var ArrayCollection
* @ORM\OneToMany(targetEntity="TextTranslator", mappedBy="text", cascade={"persist", "remove"}, orphanRemoval=true)
* @ORM\OrderBy({"pos" = "ASC"})
*/
private $textTranslators;
/**
* @var ArrayCollection
*/
private $authors;
/**
* Comma separated list of author names
*/
private $authorNames;
/**
* Comma separated list of author original names
*/
private $authorOrigNames;
/**
* @var ArrayCollection
*/
private $translators;
/**
* @var ArrayCollection
* @ORM\OneToMany(targetEntity="BookText", mappedBy="text")
*/
private $bookTexts;
/** FIXME doctrine:schema:create does not allow this relation
* @var ArrayCollection
* @ORM\ManyToMany(targetEntity="Book", mappedBy="texts")
* @ORM\JoinTable(name="book_text",
* joinColumns={@ORM\JoinColumn(name="text_id", referencedColumnName="id")},
* inverseJoinColumns={@ORM\JoinColumn(name="book_id", referencedColumnName="id")})
* @ORM\OrderBy({"title" = "ASC"})
*/
private $books;
/**
* @var ArrayCollection
* @ORM\ManyToMany(targetEntity="Label", inversedBy="texts")
* @ORM\OrderBy({"name" = "ASC"})
*/
private $labels;
/**
* @var ArrayCollection
* @ORM\OneToMany(targetEntity="TextHeader", mappedBy="text", cascade={"persist", "remove"}, orphanRemoval=true)
* @ORM\OrderBy({"nr" = "ASC"})
*/
private $headers;
/** FIXME doctrine:schema:create does not allow this relation
* @var ArrayCollection
* @ORM\ManyToMany(targetEntity="User", inversedBy="readTexts")
* @ORM\JoinTable(name="user_text_read",
* joinColumns={@ORM\JoinColumn(name="text_id")},
* inverseJoinColumns={@ORM\JoinColumn(name="user_id")})
*/
private $readers;
/**
* @var ArrayCollection
* @ORM\OneToMany(targetEntity="UserTextContrib", mappedBy="text", cascade={"persist", "remove"}, orphanRemoval=true)
*/
private $userContribs;
/**
* @var ArrayCollection
* @ORM\OneToMany(targetEntity="TextRevision", mappedBy="text", cascade={"persist", "remove"}, orphanRemoval=true)
*/
private $revisions;
/**
* @var ArrayCollection
* @ORM\OneToMany(targetEntity="TextLink", mappedBy="text", cascade={"persist", "remove"}, orphanRemoval=true)
*/
private $links;
/**
* @ORM\Column(type="array", nullable=true)
*/
private $alikes;
public function __construct($id = null)
{
$this->id = $id;
$this->textAuthors = new ArrayCollection;
$this->textTranslators = new ArrayCollection;
$this->authors = new ArrayCollection;
$this->translators = new ArrayCollection;
$this->bookTexts = new ArrayCollection;
$this->books = new ArrayCollection;
$this->labels = new ArrayCollection;
$this->headers = new ArrayCollection;
$this->readers = new ArrayCollection;
$this->userContribs = new ArrayCollection;
$this->links = new ArrayCollection;
// if ( empty($this->year) ) {
// $this->year = $this->author_year;
// }
// if ( empty($this->trans_year) ) {
// $this->trans_year = $this->translator_year;
// }
// $this->subtitle = strtr($this->subtitle, array('\n' => self::TITLE_NEW_LINE));
}
public function __toString()
{
return $this->getTitle();
//return "$this->id";
}
public function getId() { return $this->id; }
public function setSlug($slug) { $this->slug = String::slugify($slug); }
public function getSlug() { return $this->slug; }
public function setTitle($title) { $this->title = $title; }
public function getTitle() { return $this->title; }
public function setSubtitle($subtitle) { $this->subtitle = $subtitle; }
public function getSubtitle() { return $this->subtitle; }
public function setLang($lang) { $this->lang = $lang; }
public function getLang() { return $this->lang; }
public function setTransYear($transYear) { $this->trans_year = $transYear; }
public function getTransYear() { return $this->trans_year; }
public function trans_year() { return $this->trans_year; }
public function setTransYear2($transYear2) { $this->trans_year2 = $transYear2; }
public function getTransYear2() { return $this->trans_year2; }
public function trans_year2() { return $this->trans_year2; }
public function setOrigTitle($origTitle) { $this->orig_title = $origTitle; }
public function getOrigTitle() { return $this->orig_title; }
public function orig_title() { return $this->orig_title; }
public function setOrigSubtitle($origSubtitle) { $this->orig_subtitle = $origSubtitle; }
public function getOrigSubtitle() { return $this->orig_subtitle; }
public function orig_subtitle() { return $this->orig_subtitle; }
public function setOrigLang($origLang) { $this->orig_lang = $origLang; }
public function getOrigLang() { return $this->orig_lang; }
public function orig_lang() { return $this->orig_lang; }
public function setYear($year) { $this->year = $year; }
public function getYear() { return $this->year; }
public function setYear2($year2) { $this->year2 = $year2; }
public function getYear2() { return $this->year2; }
public function setOrigLicense($origLicense) { $this->orig_license = $origLicense; }
public function getOrigLicense() { return $this->orig_license; }
public function orig_license() { return $this->orig_license; }
public function getOrigLicenseCode()
{
return $this->orig_license ? $this->orig_license->getCode() : null;
}
public function setTransLicense($transLicense) { $this->trans_license = $transLicense; }
public function getTransLicense() { return $this->trans_license; }
public function trans_license() { return $this->trans_license; }
public function getTransLicenseCode()
{
return $this->trans_license ? $this->trans_license->getCode() : null;
}
public function setType($type) { $this->type = $type; }
public function getType() { return $this->type; }
public function setCover($cover) { $this->cover = $cover; }
//public function getCover() { return $this->cover; }
public function hasCover() {
return false;
}
public function setSeries($series) { $this->series = $series; }
public function getSeries() { return $this->series; }
public function getSeriesSlug()
{
return $this->series ? $this->series->getSlug() : null;
}
public function setSernr($sernr) { $this->sernr = $sernr; }
public function getSernr() { return $this->sernr; }
public function setSernr2($sernr2) { $this->sernr2 = $sernr2; }
public function getSernr2() { return $this->sernr2; }
public function setCreatedAt($created_at) { $this->created_at = $created_at; }
public function getCreatedAt() { return $this->created_at; }
public function setSource($source) { $this->source = $source; }
public function getSource() { return $this->source; }
public function setCurRev($curRev) { $this->cur_rev = $curRev; }
public function getCurRev() { return $this->cur_rev; }
public function setDlCount($dlCount) { $this->dl_count = $dlCount; }
public function getDlCount() { return $this->dl_count; }
public function setReadCount($readCount) { $this->read_count = $readCount; }
public function getReadCount() { return $this->read_count; }
public function setCommentCount($commentCount) { $this->comment_count = $commentCount; }
public function getCommentCount() { return $this->comment_count; }
public function setRating($rating) { $this->rating = $rating; }
public function getRating() { return $this->rating; }
public function setVotes($votes) { $this->votes = $votes; }
public function getVotes() { return $this->votes; }
public function setHasAnno($hasAnno) { $this->has_anno = $hasAnno; }
public function getHasAnno() { return $this->has_anno; }
// public function setHasTitleNote($hasTitleNote) { $this->has_title_note = $hasTitleNote; }
// public function getHasTitleNote() { return $this->has_title_note; }
public function isCompilation() { return $this->is_compilation; }
public function setNote($note) { $this->note = $note; }
public function getNote() { return $this->note; }
public function setRemovedNotice($removed_notice) { $this->removed_notice = $removed_notice; }
public function getRemovedNotice() { return $this->removed_notice; }
public function getUserContribs() { return $this->userContribs; }
public function setUserContribs($userContribs) { $this->userContribs = $userContribs; }
public function addUserContrib(UserTextContrib $userContrib) {
$this->userContribs[] = $userContrib;
}
public function removeUserContrib(UserTextContrib $userContrib) {
$this->userContribs->removeElement($userContrib);
}
public function addAuthor(Person $author) { $this->authors[] = $author; }
public function getAuthors()
{
if (!isset($this->authors)) {
$this->authors = array();
foreach ($this->getTextAuthors() as $author) {
if ($author->getPos() >= 0) {
$this->authors[] = $author->getPerson();
}
}
}
return $this->authors;
}
public function addTranslator(Person $translator) { $this->translators[] = $translator; }
public function getTranslators()
{
if (!isset($this->translators)) {
$this->translators = array();
foreach ($this->getTextTranslators() as $translator) {
if ($translator->getPos() >= 0) {
$this->translators[] = $translator->getPerson();
}
}
}
return $this->translators;
}
public function addTextAuthor(TextAuthor $textAuthor)
{
$this->textAuthors[] = $textAuthor;
}
public function removeTextAuthor(TextAuthor $textAuthor)
{
$this->textAuthors->removeElement($textAuthor);
}
// TODO needed by admin; why?
public function addTextAuthors(TextAuthor $textAuthor) { $this->addTextAuthor($textAuthor); }
public function setTextAuthors($textAuthors) { $this->textAuthors = $textAuthors; }
public function getTextAuthors() { return $this->textAuthors; }
public function addTextTranslator(TextTranslator $textTranslator)
{
$this->textTranslators[] = $textTranslator;
}
public function removeTextTranslator(TextTranslator $textTranslator)
{
$this->textTranslators->removeElement($textTranslator);
}
// TODO needed by admin; why?
public function addTextTranslators(TextTranslator $textTranslator) { $this->addTextTranslator($textTranslator); }
public function setTextTranslators($textTranslators) { $this->textTranslators = $textTranslators; }
public function getTextTranslators() { return $this->textTranslators; }
public function addBook(Book $book) { $this->books[] = $book; }
public function getBooks() { return $this->books; }
public function getRevisions() { return $this->revisions; }
public function addRevision(TextRevision $revision)
{
$this->revisions[] = $revision;
}
public function setLinks($links) { $this->links = $links; }
public function getLinks() { return $this->links; }
public function addLink(TextLink $link)
{
$this->links[] = $link;
}
// needed by SonataAdmin
public function addLinks(TextLink $link)
{
$this->addLink($link);
}
public function removeLink(TextLink $link)
{
$this->links->removeElement($link);
}
public function setAlikes($alikes) { $this->alikes = $alikes; }
public function getAlikes() { return $this->alikes; }
/**
* Return the main book for the text
*/
public function getBook()
{
if ( ! isset($this->_book)) {
$this->_book = false;
foreach ($this->bookTexts as $bookText) {
if ($bookText->getShareInfo()) {
$this->_book = $bookText->getBook();
break;
}
}
}
return $this->_book;
}
public function addLabel(Label $label) { $this->labels[] = $label; }
public function getLabels() { return $this->labels; }
public function addReader(User $reader) { $this->readers[] = $reader; }
public function getReaders() { return $this->readers; }
static protected $annotationDir = 'text-anno';
static protected $infoDir = 'text-info';
public function getDocId()
{
return 'http://chitanka.info/text/' . $this->id;
}
public function getYearHuman() {
$year2 = empty($this->year2) ? '' : '–'. abs($this->year2);
return $this->year >= 0
? $this->year . $year2
: abs($this->year) . $year2 .' пр.н.е.';
}
public function getTransYearHuman() {
return $this->trans_year . (empty($this->trans_year2) ? '' : '–'.$this->trans_year2);
}
public function getAuthorNameEscaped()
{
if ( preg_match('/[a-z]/', $this->getAuthorOrigNames()) ) {
return Legacy::removeDiacritics( Char::cyr2lat($this->getAuthorOrigNames()) );
}
return Char::cyr2lat($this->getAuthorNames());
}
public function isGamebook()
{
return $this->type == 'gamebook';
}
public function isTranslation()
{
return $this->lang != $this->orig_lang;
}
public function getAuthorNames()
{
if ( ! isset($this->authorNames)) {
$this->authorNames = '';
foreach ($this->getAuthors() as $author) {
$this->authorNames .= $author->getName() . ', ';
}
$this->authorNames = rtrim($this->authorNames, ', ');
}
return $this->authorNames;
}
public function getAuthorsPlain()
{
return $this->getAuthorNames();
}
public function getAuthorOrigNames()
{
if ( ! isset($this->authorOrigNames)) {
$this->authorOrigNames = '';
foreach ($this->getAuthors() as $author) {
$this->authorOrigNames .= $author->getOrigName() . ', ';
}
$this->authorOrigNames = rtrim($this->authorOrigNames, ', ');
}
return $this->authorOrigNames;
}
public function getAuthorSlugs()
{
if ( ! isset($this->authorSlugs)) {
$this->authorSlugs = array();
foreach ($this->getAuthors() as $author) {
$this->authorSlugs[] = $author->getSlug();
}
}
return $this->authorSlugs;
}
public function getTranslatorSlugs()
{
if ( ! isset($this->translatorSlugs)) {
$this->translatorSlugs = array();
foreach ($this->getTranslators() as $translator) {
$this->translatorSlugs[] = $translator->getSlug();
}
}
return $this->translatorSlugs;
}
public function getTitleAsSfb() {
$title = "|\t" . $this->escapeForSfb($this->title);
if ( !empty($this->subtitle) ) {
$title .= "\n|\t" . strtr($this->escapeForSfb($this->subtitle), array(
self::TITLE_NEW_LINE => "\n|\t",
'\n' => "\n|\t",
));
}
if ( $this->hasTitleNote() ) {
$title .= '*';
}
return $title;
}
public function getTitleAsHtml($cnt = 0)
{
$title = $this->getTitle();
if ( $this->hasTitleNote() ) {
$suffix = SfbConverter::createNoteIdSuffix($cnt, 0);
$title .= sprintf('<sup id="ref_%s" class="ref"><a href="#note_%s">[0]</a></sup>', $suffix, $suffix);
}
return "<h1>$title</h1>";
}
public function escapeForSfb($string)
{
return strtr($string, array(
'*' => '\*',
));
}
public function hasTitleNote()
{
if ( ! is_null( $this->_hasTitleNote ) ) {
return $this->_hasTitleNote;
}
$conv = new SfbToHtmlConverter( Legacy::getInternalContentFilePath( 'text', $this->id ) );
return $this->_hasTitleNote = $conv->hasTitleNote();
}
public function getOrigTitleAsSfb() {
if ( $this->orig_lang == $this->lang ) {
return '';
}
$authors = '';
foreach ($this->authors as $author) {
$authors .= ', '. $author->getOrigName();
}
$authors = ltrim($authors, ', ');
$orig_title = $this->orig_title;
if ( !empty($this->orig_subtitle) ) {
$orig_title .= " ({$this->orig_subtitle})";
}
$orig_title .= ', '. $this->getYearHuman();
$orig_title = ltrim($orig_title, ', ');
return rtrim("\t$authors\n\t$orig_title");
}
// TODO remove
public function getCover($width = null)
{
return null;
}
public function getImages()
{
return $this->getImagesFromDir(Legacy::getInternalContentFilePath('img', $this->id));
}
public function getThumbImages()
{
return $this->getImagesFromDir(Legacy::getInternalContentFilePath('img', $this->id) . '/thumb');
}
public function getImagesFromDir($dir)
{
$images = array();
if (is_dir($dir) && ($dh = opendir($dir)) ) {
while (($file = readdir($dh)) !== false) {
$fullname = "$dir/$file";
if ( $file[0] == '.' || /*$file[0] == '_' ||*/
File::isArchive($file) || is_dir($fullname) ) {
continue;
}
$images[] = $fullname;
}
closedir($dh);
}
return $images;
}
public function getFullExtraInfo()
{
return $this->getExtraInfo() . $this->getBookExtraInfo();
}
public function getFullExtraInfoForHtml($imgDirPrefix = '')
{
return $this->_getContentHtml($this->getFullExtraInfo(), $imgDirPrefix);
}
public function getAnnotationHtml($imgDirPrefix = '')
{
return $this->_getContentHtml($this->getAnnotation(), $imgDirPrefix);
}
protected function _getContentHtml($content, $imgDirPrefix)
{
$imgDir = $imgDirPrefix . Legacy::getContentFilePath('img', $this->id);
$conv = new SfbToHtmlConverter($content, $imgDir);
return $conv->convert()->getContent();
}
public function getBookExtraInfo() {
$info = '';
foreach ($this->bookTexts as $bookText) {
if ($bookText->getShareInfo()) {
$file = Legacy::getInternalContentFilePath('book-info', $bookText->getBook()->getId());
if ( file_exists($file) ) {
$info .= "\n\n" . file_get_contents($file);
}
}
}
return $info;
}
public function getPlainTranslationInfo()
{
if ($this->lang == $this->orig_lang) {
return '';
}
$lang = Language::langName($this->orig_lang, false);
if ( ! empty($lang) ) $lang = ' от '.$lang;
$translator = empty($this->translator_name) ? '[Неизвестен]' : $this->translator_name;
$year = $this->getTransYearHuman();
if (empty($year)) $year = '—';
return sprintf('Превод%s: %s, %s', $lang, $translator, $year);
}
public function getPlainSeriesInfo()
{
if (empty($this->series)) {
return null;
}
return sprintf('Част %d от „%s“', $this->sernr, $this->series->getName());
}
public function getNextFromSeries() {
if ( empty($this->series) ) {
return false;
}
$dbkey = array('series_id' => $this->seriesId);
if ($this->sernr == 0) {
$dbkey['t.id'] = array('>', $this->id);
} else {
$dbkey[] = 'sernr = '. ($this->sernr + 1)
. " OR (sernr > $this->sernr AND t.id > $this->id)";
}
return self::newFromDB($dbkey);
}
public function getNextFromBooks() {
$nextWorks = array();
foreach ($this->books as $id => $book) {
$nextWorks[$id] = $this->getNextFromBook($id);
}
return $nextWorks;
}
public function getNextFromBook($book) {
if ( empty($this->books[$book]) ) {
return false;
}
$bookDescr = Legacy::getContentFile('book', $book);
if ( preg_match('/\{'. $this->id . '\}\n\{(\d+)\}/m', $bookDescr, $m) ) {
return self::newFromId($m[1]);
}
return false;
}
public function getPrefaceOfBook($book) {
if ( empty($this->books[$book]) || $this->type == 'intro' ) {
return false;
}
$subkey = array('book_id' => $book);
$subquery = Setup::db()->selectQ(DBT_BOOK_TEXT, $subkey, 'text_id');
$dbkey = array("t.id IN ($subquery)", 't.type' => 'intro');
return self::newFromDB($dbkey);
}
/**
Return fiction book info for this work
*/
public function getFbi()
{
return $this->getFbiMain()
. "\n" . $this->getFbiOriginal()
. "\n" . $this->getFbiDocument()
//. "\n" . $this->getFbiEdition() // not implemented
;
}
protected function getFbiMain()
{
$authors = '';
foreach ($this->getAuthors() as $author) {
$authors .= "\n|Автор = " . $author->getName();
}
$title = $this->title;
if ( ! empty( $this->subtitle ) ) {
$subtitle = strtr($this->subtitle, array(self::TITLE_NEW_LINE => ', '));
$title .= ' (' . trim($subtitle, '()') . ')';
}
$anno = $this->getAnnotation();
$translators = '';
foreach ($this->getTextTranslators() as $textTranslator) {
$year = $textTranslator->getYear() ?: $this->trans_year;
$name = $textTranslator->getPerson()->getName();
$translators .= "\n|Преводач = $name [&$year]";
}
$series = empty($this->series) ? Legacy::workType($this->type, false) : $this->series->getName();
if ( ! empty($this->series) && ! empty( $this->sernr ) ) {
$series .= " [$this->sernr]";
}
$keywords = implode(', ', $this->getLabelsNames());
$origLangView = $this->lang == $this->orig_lang ? '' : $this->orig_lang;
return <<<EOS
{Произведение:$authors
|Заглавие = $title
{Анотация:
$anno
}
|Дата = $this->year
|Корица =
|Език = $this->lang
|Ориг.език = $origLangView$translators
|Поредица = $series
|Жанр =
|Ключови-думи = $keywords
}
EOS;
}
protected function getFbiOriginal()
{
if ( $this->lang == $this->orig_lang ) {
return '';
}
$authors = '';
foreach ($this->getAuthors() as $author) {
$name = $author->getOrigName();
$authors .= "\n|Автор = $name";
}
$title = $this->orig_title;
$subtitle = $this->orig_subtitle;
if ( ! empty( $subtitle ) ) {
$title .= ' (' . trim($subtitle, '()') . ')';
}
if ($this->series) {
$series = $this->series->getOrigName();
if ( ! empty($series) && ! empty( $this->sernr ) ) {
$series .= " [$this->sernr]";
}
} else {
$series = '';
}
return <<<EOS
{Оригинал:$authors
|Заглавие = $title
|Дата = $this->year
|Език = $this->orig_lang
|Поредица = $series
}
EOS;
}
protected function getFbiDocument()
{
$date = date('Y-m-d H:i:s');
list($history, $version) = $this->getHistoryAndVersion();
$history = "\n\t" . implode("\n\t", $history);
return <<<EOS
{Документ:
|Автор =
|Програми =
|Дата = $date
|Източник =
|Сканирал =
|Разпознал =
|Редактирал =
|Идентификатор = mylib-$this->id
|Версия = $version
{История:$history
}
|Издател =
}
EOS;
}
public function getHistoryAndVersion()
{
$history = array();
$historyRows = $this->getHistoryInfo();
$verNo = 1;
/* if ( "$this->created_at 24" < $historyRows[0]['date'] ) {
$ver = '0.' . ($verNo++);
$vdate = $this->created_at == '0000-00-00' ? LIB_OPEN_DATE : $this->created_at;
$history[] = "$ver ($vdate) — Добавяне";
}*/
$ver = '0';
foreach ( $historyRows as $data ) {
$ver = '0.' . ($verNo++);
$history[] = "$ver ($data[date]) — $data[comment]";
}
return array($history, $ver);
}
protected function getFbiEdition()
{
return <<<EOS
{Издание:
|Заглавие =
|Издател =
|Град =
|Година =
|ISBN =
|Поредица =
}
EOS;
}
public function getDataAsPlain()
{
$authors = implode($this->getAuthorSlugs());
$translators = implode($this->getTranslatorSlugs());
$labels = implode($this->getLabelSlugs());
$contributors = array();
foreach ($this->getUserContribs() as $userContrib/*@var $userContrib UserTextContrib*/) {
$contributors[] = implode(',', array(
$userContrib->getUsername(),
$userContrib->getPercent(),
'"'.$userContrib->getComment().'"',
$userContrib->getHumandate(),
));
}
$contributors = implode(';', $contributors);
return <<<EOS
title = {$this->getTitle()}
subtitle = {$this->getSubtitle()}
authors = $authors
slug = {$this->getSlug()}
type = {$this->getType()}
lang = {$this->getLang()}
year = {$this->getYear()}
orig_license = {$this->getOrigLicenseCode()}
orig_title = {$this->getOrigTitle()}
orig_subtitle = {$this->getOrigsubtitle()}
orig_lang = {$this->getOrigLang()}
translators = $translators
trans_license = {$this->getTransLicenseCode()}
series = {$this->getSeriesSlug()}
ser_nr = {$this->getSernr()}
source = {$this->getSource()}
labels = $labels
toc_level = {$this->getHeadlevel()}
users = $contributors
id = {$this->getId()}
EOS;
}
public function getNameForFile()
{
$filename = strtr(Setup::setting('download_file'), array(
'AUTHOR' => $this->getAuthorNameEscaped(),
'SERIES' => empty($this->series) ? '' : Legacy::getAcronym(Char::cyr2lat($this->series->getName())),
'SERNO' => empty($this->sernr) ? '' : $this->sernr,
'TITLE' => Char::cyr2lat($this->title),
'ID' => $this->id,
));
$filename = $this->normalizeFileName($filename);
return $filename;
}
static public function getMinRating() {
if ( is_null( self::$_minRating ) ) {
self::$_minRating = min( array_keys( self::$ratings ) );
}
return self::$_minRating;
}
static public function getMaxRating() {
if ( is_null( self::$_maxRating ) ) {
self::$_maxRating = max( array_keys( self::$ratings ) );
}
return self::$_maxRating;
}
static public function getRatings($id) {
return Setup::db()->getFields(DBT_TEXT,
array('id' => $id),
array('rating', 'votes'));
}
public function getHistoryInfo()
{
$db = Setup::db();
$res = $db->select(DBT_EDIT_HISTORY, array('text_id' => $this->id), '*', 'date ASC');
$rows = array();
while ( $row = $db->fetchAssoc($res) ) {
$rows[] = $row;
}
if ($rows) {
$isoEntryDate = $this->created_at->format('Y-m-d');
if ( "$isoEntryDate 24" < $rows[0]['date'] ) {
$date = is_null($this->created_at) ? LIB_OPEN_DATE : $isoEntryDate;
array_unshift($rows, array('date' => $date, 'comment' => 'Добавяне'));
}
}
return $rows;
}
/**
* @Assert\File
* @var UploadedFile
*/
private $content_file;
public function getContentFile()
{
return $this->content_file;
}
/** @param UploadedFile $file */
public function setContentFile(UploadedFile $file = null)
{
$this->content_file = $file;
if ($file) {
$this->setSize($file->getSize() / 1000);
$this->rebuildHeaders($file->getRealPath());
}
}
public function isContentFileUpdated()
{
return $this->getContentFile() !== null;
}
public function setHeadlevel($headlevel)
{
$this->headlevel = $headlevel;
if ( !$this->isContentFileUpdated()) {
$this->rebuildHeaders();
}
}
public function getHeadlevel() { return $this->headlevel; }
public function setSize($size)
{
$this->size = $size;
$this->setZsize($size / 3.5);
}
public function getSize() { return $this->size; }
public function setZsize($zsize) { $this->zsize = $zsize; }
public function getZsize() { return $this->zsize; }
/**
* @ORM\PostPersist()
* @ORM\PostUpdate()
*/
public function postUpload()
{
$this->moveUploadedContentFile($this->getContentFile());
}
private function moveUploadedContentFile(UploadedFile $file = null) {
if ($file) {
$filename = Legacy::getContentFilePath('text', $this->id);
$file->move(dirname($filename), basename($filename));
}
}
private $revisionComment;
public function getRevisionComment()
{
return $this->revisionComment;
}
public function setRevisionComment($comment)
{
$this->revisionComment = $comment;
}
public function getContentAsSfb()
{
$sfb = $this->getFullTitleAsSfb() . "\n\n\n";
$anno = $this->getAnnotation();
if ( ! empty($anno) ) {
$sfb .= "A>\n$anno\nA$\n\n";
}
$sfb .= $this->getRawContent();
$extra = $this->getExtraInfoForDownload();
$extra = preg_replace('/\n\n+/', "\n\n", $extra);
$sfb .= "\nI>\n".trim($extra, "\n")."\nI$\n";
return $sfb;
}
public function getRawContent($asFileName = false)
{
if ( ! $this->is_compilation) {
if ($asFileName) {
return Legacy::getContentFilePath('text', $this->id);
} else {
return Legacy::getContentFile('text', $this->id);
}
}
$template = Legacy::getContentFile('text', $this->id);
if (preg_match_all('/\t\{file:(\d+-.+)\}/', $template, $matches, PREG_SET_ORDER)) {
foreach ($matches as $match) {
list($row, $filename) = $match;
$template = str_replace($row, Legacy::getContentFile('text', $filename), $template);
}
}
// TODO cache the full output
return $template;
}
public function getFullTitleAsSfb()
{
$sfb = '';
$sfb .= "|\t" . ($this->getAuthorNames() ?: '(неизвестен автор)') . "\n";
$sfb .= $this->getTitleAsSfb();
return $sfb;
}
public function getExtraInfoForDownload()
{
return $this->getOrigTitleAsSfb() . "\n\n"
. $this->getFullExtraInfo() . "\n\n"
. "\tСвалено от „Моята библиотека“: ".$this->getDocId()."\n"
. "\tПоследна корекция: ".Legacy::humanDate($this->cur_rev->getDate())."\n";
}
public function getContentAsFb2()
{
$conv = new SfbToFb2Converter($this->getContentAsSfb(), Legacy::getInternalContentFilePath('img', $this->id));
$conv->setObjectCount(1);
$conv->setSubtitle(strtr($this->subtitle, array('\n' => ' — ')));
$conv->setGenre($this->getGenresForFb2());
$conv->setKeywords($this->getKeywordsForFb2());
$conv->setTextDate($this->year);
$conv->setLang($this->lang);
$conv->setSrcLang(empty($this->orig_lang) ? '?' : $this->orig_lang);
foreach ($this->getTranslators() as $translator) {
$conv->addTranslator($translator->getName());
}
if ($this->series) {
$conv->addSequence($this->series->getName(), $this->sernr);
}
if ( $this->lang != $this->orig_lang ) {
foreach ($this->authors as $author) {
if ($author->getOrigName() == '') {
$conv->addSrcAuthor('(no original name for '.$author->getName().')', false);
} else {
$conv->addSrcAuthor($author->getOrigName());
}
}
$conv->setSrcTitle(empty($this->orig_title) ? '(no data for original title)' : '');
$conv->setSrcSubtitle($this->orig_subtitle);
if ($this->series && $this->series->getOrigName()) {
$conv->addSrcSequence($this->series->getOrigName(), $this->sernr);
}
}
$conv->setDocId($this->getDocId());
list($history, $version) = $this->getHistoryAndVersion();
$conv->setDocVersion($version);
$conv->setHistory($history);
$conv->setDocAuthor('Моята библиотека');
if ($this->type == 'gamebook') {
// recognize section links
$conv->addRegExpPattern('/#(\d+)/', '<a l:href="#l-$1">$1</a>');
}
$conv->enablePrettyOutput();
return $conv->convert()->getContent();
}
// TODO move this to a proper generation class
private $labelsToGenres = array(
'Алтернативна история' => 'sf_history',
'Антиутопия' => 'sf_social',
'Антична литература' => 'antique_ant',
'Антропология' => 'science',
'Археология' => 'science',
'Биография' => 'nonf_biography',
'Будизъм' => 'religion',
'Военна фантастика' => 'sf_action',
'Втора световна война' => 'sci_history',
'Готварство' => 'home_cooking',
'Готически роман' => 'sf_horror',
'Дамска проза (чиклит)' => 'love_contemporary',
'Даоизъм' => 'religion',
'Детска литература' => 'child_prose',
'Документална литература' => array('sci_history', 'nonfiction'),
'Древен Египет' => 'sci_history',
'Древен Рим' => 'sci_history',
'Древна Гърция' => 'sci_history',
'Епос' => 'antique_myths',
'Еротика' => 'love_erotica',
'Идеи и идеали' => 'sci_philosophy',
'Икономика' => 'sci_business',
'Индианска литература' => 'adv_indian',
'Индия' => 'sci_culture',
'Исторически роман' => 'prose_history',
'История' => 'sci_history',
'Киберпънк' => 'sf_cyberpunk',
'Китай' => 'sci_culture',
'Комедия' => 'humor',
'Контракултура' => 'prose_counter',
'Криминална литература' => 'detective',
'Културология' => 'sci_culture',
'Любовен роман' => 'love_contemporary',
'Любовна лирика' => 'poetry',
'Магически реализъм' => 'sf_horror',
'Медицина' => 'sci_medicine',
'Мемоари' => 'prose_history',
'Мистика' => 'sf_horror',
'Митология' => 'sci_culture',
'Модернизъм' => array('sci_culture', 'design'),
'Морска тематика' => 'adv_maritime',
'Музика' => array('sci_culture', 'design'),
'Народно творчество' => array('sci_culture', 'design'),
'Научна фантастика' => 'sf',
'Научнопопулярна литература' => 'science',
'Окултизъм' => 'religion',
'Организирана престъпност' => 'det_political',
'Паралелни вселени' => array('sf', 'sf_epic', 'sf_heroic'),
'Политология' => 'sci_politics',
'Полусвободна литература' => 'home',
'Постапокалипсис' => 'sf_history',
'Приключенска литература' => 'adventure',
'Психология' => 'sci_psychology',
'Психофактор' => 'sci_philosophy',
'Пътешествия' => 'adv_geo',
'Реализъм' => array('sci_culture', 'design'),
'Религия' => 'religion_rel',
'Ренесанс' => 'sci_history',
'Рицарски роман' => 'adv_history',
'Робинзониада' => 'sf_heroic',
'Родителство' => array('home_health', 'home'),
'Романтизъм' => array('sci_culture', 'design'),
'Руска класика' => 'prose_rus_classic',
'Сатанизъм' => 'religion',
'Сатира' => 'humor',
'Световна класика' => 'prose_classic',
'Секс' => 'home_sex',
'Символизъм' => array('sci_culture', 'design'),
'Средновековие' => 'antique',
'Средновековна литература' => 'antique_european',
'Старобългарска литература' => 'antique',
'Съвременен роман (XX–XXI век)' => 'prose_contemporary',
'Съвременна проза' => 'prose_contemporary',
'Тайни и загадки' => 'sf_horror',
'Трагедия' => 'antique',
'Трилър' => 'thriller',
'Уестърн' => 'adv_western',
'Ужаси' => 'sf_horror',
'Утопия' => 'sf_social',
'Фантастика' => 'sf',
'Фентъзи' => 'sf_fantasy',
'Философия' => 'sci_philosophy',
'Флора' => 'sci_biology',
'Хумор' => 'humor',
'Човек и бунт' => 'sci_philosophy',
'Шпионаж' => 'det_espionage',
'Япония' => 'sci_culture',
// 'Любовен роман+Исторически роман' => 'love_history',
// 'Детска литература+Фантастика' => 'child_sf',
// 'type play' => 'dramaturgy',
// 'type poetry' => 'poetry',
// 'type poetry+Детска литература' => 'child_verse',
// 'type tale+Детска литература' => 'child_tale',
);
public function getGenresForFb2()
{
$genres = array();
$labels = $this->getLabelsNames();
foreach ($labels as $label) {
if (array_key_exists($label, $this->labelsToGenres)) {
$genres = array_merge($genres, (array) $this->labelsToGenres[$label]);
}
}
$genres = array_unique($genres);
if (empty($genres)) {
switch ($this->getType()) {
case 'poetry': $genres[] = 'poetry'; break;
default: $genres[] = 'prose_contemporary';
}
}
return $genres;
}
private function getKeywordsForFb2()
{
return implode(', ', $this->getLabelsNames());
}
public function getLabelsNames()
{
$names = array();
foreach ($this->getLabels() as $label) {
$names[] = $label->getName();
}
return $names;
}
public function getLabelSlugs()
{
$slugs = array();
foreach ($this->getLabels() as $label) {
$slugs[] = $label->getSlug();
}
return $slugs;
}
static public function newFromId($id, $reader = 0) {
return self::newFromDB( array('t.id' => $id), $reader );
}
static public function newFromTitle($title, $reader = 0) {
return self::newFromDB( array('t.title' => $title), $reader );
}
static public function incReadCounter($id) {
return; // disable
Setup::db()->update(DBT_TEXT, array('read_count=read_count+1'), compact('id'));
}
static public function incDlCounter($id) {
return; // disable
Setup::db()->update(DBT_TEXT, array('dl_count=dl_count+1'), compact('id'));
}
static protected function newFromDB($dbkey, $reader = 0) {
$db = Setup::db();
//$dbkey['mode'] = 'public';
$qa = array(
'SELECT' => 't.*,
s.id seriesId,
s.name series, s.orig_name seriesOrigName,
lo.code lo_code, lo.fullname lo_name, lo.copyright lo_copyright, lo.uri lo_uri,
lt.code lt_code, lt.fullname lt_name, lt.copyright lt_copyright, lt.uri lt_uri,
r.user_id isRead, h.date lastedit',
'FROM' => DBT_TEXT .' t',
'LEFT JOIN' => array(
DBT_SERIES .' s' => 't.series_id = s.id',
DBT_LICENSE .' lo' => 't.orig_license_id = lo.id',
DBT_LICENSE .' lt' => 't.trans_license_id = lt.id',
DBT_READER_OF .' r' => "t.id = r.text_id AND r.user_id = ".((int)$reader),
DBT_EDIT_HISTORY .' h' => 't.cur_rev_id = h.id',
),
'WHERE' => $dbkey,
'ORDER BY' => 't.sernr ASC',
'LIMIT' => 1,
);
$fields = $db->fetchAssoc( $db->extselect($qa) );
if ( empty($fields) ) {
return null;
}
// Author(s), translator(s)
$tables = array('author' => DBT_AUTHOR_OF, 'translator' => DBT_TRANSLATOR_OF);
foreach ($tables as $role => $table) {
$qa = array(
'SELECT' => 'p.*, of.year',
'FROM' => $table .' of',
'LEFT JOIN' => array(DBT_PERSON .' p' => "of.person_id = p.id"),
'WHERE' => array('of.text_id' => $fields['id']),
'ORDER BY' => 'of.pos ASC',
);
$res = $db->extselect($qa);
$persons = array();
$string_name = $string_orig_name = $string_year = '';
while ( $data = $db->fetchAssoc($res) ) {
$persons[] = $data;
$string_name .= ', '. $data['name'];
$string_orig_name .= ', '. $data['orig_name'];
$string_year .= ', '. $data['year'];
}
$fields[$role.'s'] = $persons;
$fields[$role.'_name'] = ltrim($string_name, ', ');
$fields[$role.'_orig_name'] = ltrim($string_orig_name, ', ');
$fields[$role.'_year'] = ltrim($string_year, ', 0');
}
// Books
$qa = array(
'SELECT' => 'b.*, bt.*',
'FROM' => DBT_BOOK_TEXT .' bt',
'LEFT JOIN' => array(DBT_BOOK .' b' => 'bt.book_id = b.id'),
'WHERE' => array('bt.text_id' => $fields['id']),
);
$res = $db->extselect($qa);
$fields['books'] = array();
while ( $data = $db->fetchAssoc($res) ) {
$fields['books'][$data['id']] = $data;
}
$text = new Text;
foreach ($fields as $field => $value) {
$mutator = 'set'.ucfirst($field);
if (is_callable(array($text, $mutator))) {
$text->$mutator($value);
}
}
return $text;
}
public function getHeaders()
{
return $this->headers;
}
public function setHeaders(ArrayCollection $headers)
{
$this->headers = $headers;
}
public function addHeader(TextHeader $header)
{
$this->headers[] = $header;
}
public function clearHeaders()
{
$this->clearCollection($this->getHeaders());
}
public function rebuildHeaders($file = null)
{
if ($file === null) $file = Legacy::getContentFilePath('text', $this->id);
$headlevel = $this->getHeadlevel();
$this->clearHeaders();
$parser = new SfbParserSimple($file, $headlevel);
$parser->convert();
foreach ($parser->headersFlat() as $headerData) {
$header = new TextHeader;
$header->setNr($headerData['nr']);
$header->setLevel($headerData['level']);
$header->setLinecnt($headerData['line_count']);
$header->setName($headerData['title']);
$header->setFpos($headerData['file_pos']);
$header->setText($this);
$this->addHeader($header);
}
}
public function getEpubChunks($imgDir)
{
return $this->getEpubChunksFrom($this->getRawContent(true), $imgDir);
}
public function getContentHtml($imgDirPrefix = '', $part = 1, $objCount = 0)
{
$imgDir = $imgDirPrefix . Legacy::getContentFilePath('img', $this->id);
$conv = new SfbToHtmlConverter($this->getRawContent(true), $imgDir);
// TODO do not hardcode it; inject it through parameter
$internalLinkTarget = "/text/$this->id/0";
if ( ! empty( $objCount ) ) {
$conv->setObjectCount($objCount);
}
$header = $this->getHeaderByNr($part);
if ($header) {
$conv->startpos = $header->getFpos();
$conv->maxlinecnt = $header->getLinecnt();
} else {
$internalLinkTarget = '';
}
if ($this->type == 'gamebook') {
// recognize section links
$conv->patterns['/#(\d+)/'] = '<a href="#l-$1" class="ep" title="Към епизод $1">$1</a>';
}
$conv->setInternalLinkTarget($internalLinkTarget);
return $conv->convert()->getContent();
}
public function getHeaderByNr($nr)
{
foreach ($this->getHeaders() as $header) {
if ($header->getNr() == $nr) {
return $header;
}
}
return null;
}
public function getNextHeaderByNr($nr)
{
if ($nr > 0) {
foreach ($this->getHeaders() as $header) {
if ($header->getNr() == $nr + 1) {
return $header;
}
}
}
return null;
}
public function getTotalRating()
{
return $this->rating * $this->votes;
}
/**
* Update average rating
*
* @param int Newly given rating
* @param int (optional) An old rating which should be overwritten by the new one
* @return this
*/
public function updateAvgRating($newRating, $oldRating = null)
{
if ( is_null($oldRating) ) {
$this->rating = ($this->getTotalRating() + $newRating) / ($this->votes + 1);
$this->votes += 1;
} else {
$this->rating = ($this->getTotalRating() - $oldRating + $newRating) / $this->votes;
}
return $this;
}
public function getMainContentFile()
{
return Legacy::getContentFilePath('text', $this->id);
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Command;
use Symfony\Bundle\FrameworkBundle\Command\ContainerAwareCommand;
use Symfony\Component\Console\Output\OutputInterface;
use Doctrine\ORM\EntityManager;
use Chitanka\LibBundle\Legacy\Setup;
use Chitanka\LibBundle\Util\String;
class CommonDbCommand extends ContainerAwareCommand
{
protected function configure()
{
parent::configure();
$this
->setName('db:common')
->setDescription('Does nothing. Only encapsulates common database stuff')
;
}
/**
* @RawSql
*/
protected function updateTextCountByLabels(OutputInterface $output, EntityManager $em)
{
$output->writeln('Updating texts count by labels');
$update = $this->maintenanceSql('UPDATE label l SET nr_of_texts = (SELECT COUNT(*) FROM text_label WHERE label_id = l.id)');
$em->getConnection()->executeUpdate($update);
}
protected function updateTextCountByLabelsParents(OutputInterface $output, EntityManager $em)
{
$output->writeln('Updating texts count by labels parents');
$this->_updateCountByParents($em, 'LibBundle:Label', 'NrOfTexts');
}
protected function updateBookCountByCategoriesParents(OutputInterface $output, EntityManager $em)
{
$output->writeln('Updating books count by categories parents');
$this->_updateCountByParents($em, 'LibBundle:Category', 'NrOfBooks');
}
protected function _updateCountByParents(EntityManager $em, $entity, $field)
{
$dirty = array();
$repo = $em->getRepository($entity);
foreach ($repo->findAll() as $item) {
if (in_array($item->getId(), $dirty)) {
$item = $repo->find($item->getId());
}
$parent = $item->getParent();
if ($parent) {
$count = call_user_func(array($item, "get{$field}"));
do {
call_user_func(array($parent, "inc{$field}"), $count);
$em->persist($parent);
$dirty[] = $parent->getId();
} while (null !== ($parent = $parent->getParent()));
}
}
$em->flush();
}
/**
* @RawSql
*/
protected function updateCommentCountByTexts(OutputInterface $output, EntityManager $em)
{
$output->writeln('Updating comments count by texts');
$update = $this->maintenanceSql('UPDATE text t SET comment_count = (SELECT COUNT(*) FROM text_comment WHERE text_id = t.id)');
$em->getConnection()->executeUpdate($update);
}
/**
* @RawSql
*/
protected function updateBookCountByCategories(OutputInterface $output, EntityManager $em)
{
$output->writeln('Updating books count by categories');
$update = $this->maintenanceSql('UPDATE category c SET nr_of_books = (SELECT COUNT(*) FROM book WHERE category_id = c.id)');
$em->getConnection()->executeUpdate($update);
}
protected function executeUpdates($updates, \Doctrine\DBAL\Connection $connection)
{
$connection->beginTransaction();
foreach ($updates as $update) {
$connection->executeUpdate($update);
}
$connection->commit();
}
public function buildTextHeadersUpdateQuery($file, $textId, $headlevel)
{
require_once __DIR__ . '/../Legacy/SfbParserSimple.php';
$data = array();
foreach (\Chitanka\LibBundle\Legacy\makeDbRows($file, $headlevel) as $row) {
$name = $row[2];
$name = strtr($name, array('_'=>''));
$name = $this->olddb()->escape(String::my_replace($name));
$data[] = array($textId, $row[0], $row[1], $name, $row[3], $row[4]);
}
$qs = array();
$qs[] = $this->olddb()->deleteQ('text_header', array('text_id' => $textId));
if ( !empty($data) ) {
$fields = array('text_id', 'nr', 'level', 'name', 'fpos', 'linecnt');
$qs[] = $this->olddb()->multiinsertQ('text_header', $data, $fields);
}
return $qs;
}
public function printQueries($queries)
{
echo str_replace('*/;', '*/', implode(";\n", $queries) . ";\n");
}
public function webDir($file = null)
{
return __DIR__ . '/../../../../web' . ($file ? "/$file" : '');
}
public function contentDir($file = null)
{
return __DIR__ . '/../../../../web/content' . ($file ? "/$file" : '');
}
private $_olddb;
/** @return mlDatabase */
protected function olddb()
{
if ( ! $this->_olddb) {
Setup::doSetup($this->getContainer());
$this->_olddb = Setup::db();
}
return $this->_olddb;
}
private function maintenanceSql($sql)
{
return '/*MAINTENANCESQL*/'.$sql;
}
protected function getRepository($entityName)
{
if (strpos($entityName, ':') === false) {
$entityName = "LibBundle:$entityName";
}
return $this->getEntityManager()->getRepository($entityName);
}
/** @return \Doctrine\ORM\EntityManager */
protected function getEntityManager()
{
return $this->getContainer()->get('doctrine.orm.default_entity_manager');
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller\Admin;
class CategoryController extends CRUDController {
}
<file_sep><?php
namespace Chitanka\LibBundle\Util;
use Chitanka\LibBundle\Legacy\Legacy;
class File
{
static public function mycopy($source, $dest) {
if ( is_dir($source) ) {
foreach ( scandir($source) as $file ) {
if ( $file[0] == '.' ) continue;
self::mycopy("$source/$file", "$dest/$file");
}
return true;
}
self::make_parent($dest);
return copy($source, $dest);
}
static public function myfile_put_contents($filename, $data, $flags = null) {
if (is_dir($filename)) {
return false;
}
self::make_parent($filename);
$res = file_put_contents($filename, $data, $flags);
return $res;
}
static public function mymove_uploaded_file($tmp, $dest) {
self::make_parent($dest);
return move_uploaded_file($tmp, $dest);
}
static public function make_parent( $filename ) {
$dir = dirname( $filename );
if ( file_exists( $dir ) ) {
@touch( $dir );
} else {
mkdir( $dir, 0755, true );
}
}
static public function guessMimeType($file)
{
switch ( strtolower(self::getFileExtension($file)) ) {
case 'png' : return 'image/png';
case 'gif' : return 'image/gif';
case 'jpg' :
case 'jpeg': return 'image/jpeg';
}
$finfo = new finfo(FILEINFO_MIME_TYPE);
return $finfo->file($href);
}
static public function isArchive($file) {
$exts = array('zip', 'tgz', 'tar.gz', 'bz2', 'tar.bz2');
foreach ($exts as $ext) {
if ( strpos($file, '.'.$ext) !== false ) {
return true;
}
}
return false;
}
static public function getFileExtension($filename)
{
return ltrim(strrchr($filename, '.'), '.');
}
static public function cleanFileName($fname, $woDiac = true)
{
$fname = preg_replace('![^a-zA-Z0-9_. -]!u', '', $fname);
if ( $woDiac ) {
$fname = Legacy::removeDiacritics($fname);
}
$fname = preg_replace('/ +/', ' ', $fname);
$fname = str_replace('- -', '-', $fname); // from empty entities
$fname = trim($fname, '.- ');
$fname = strtr($fname, array(
' .' => '', // from empty series number
' ' => '_',
));
return $fname;
}
static public function isSFB($file)
{
if ( (strpos($file, '.sfb') !== false) && file_exists($file) ) {
$cont = file_get_contents( $file, false, NULL, -1, 10 );
if ( strpos($cont, chr(124).chr(9)) !== false )
return true;
}
return false;
}
static public function hasValidExtension($filename, $validExtensions)
{
foreach ($validExtensions as $validExtension) {
if (preg_match("/\.$validExtension$/i", $filename)) {
return true;
}
}
return false;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Legacy;
use Chitanka\LibBundle\Entity\User;
class SettingsPage extends RegisterPage {
protected
$action = 'settings',
$canChangeUsername = false,
$optKeys = array('skin', 'nav', 'css', 'js'),
$defEcnt = 10,
$nonEmptyFields = array();
public function __construct($fields) {
parent::__construct($fields);
$this->title = 'Настройки';
$this->userId = $this->user->getId();
$this->allowemail = $this->request->checkbox('allowemail');
foreach ($this->optKeys as $key) {
$this->opts[$key] = $this->request->value($key, User::$defOptions[$key]);
}
$this->tabindex = 2;
}
protected function processSubmission() {
return $this->processRegUserRequest();
}
protected function isValidPassword() {
// sometimes browsers automaticaly fill the first password field
// so the user does NOT want to change it
if ( $this->user->validatePassword($this->password) ) {
return true;
}
return parent::isValidPassword();
}
protected function processRegUserRequest()
{
$err = $this->validateInput();
$this->attempt++;
if ( !empty($err) ) {
$this->addMessage($err, true);
return $this->makeRegUserForm();
}
if ( $this->emailExists($this->user->getUsername()) ) {
return $this->makeRegUserForm();
}
$user = $this->controller->getRepository('User')->find($this->user->getId());
$user->setRealname($this->realname);
$user->setEmail($this->email);
$user->setAllowemail((int) $this->allowemail);
$user->setNews((int) $this->news);
$user->setOpts($this->makeOptionsOutput());
if ( !empty($this->password) && !empty($this->passwordRe) ) { // change password
$user->setPassword($this->password);
}
$em = $this->controller->getEntityManager();
$em->persist($user);
$em->flush();
$this->addMessage("Данните ви бяха променени.");
$this->user = $user;
$this->controller->setUser($user);
$this->user->updateSession();
return $this->makeRegUserForm();
}
protected function buildContent() {
$this->initRegUserData();
return $this->makeRegUserForm();
}
protected function makeRegUserForm() {
$username = $this->canChangeUsername
? $this->out->textField('username', '', $this->username, 25, 60, $this->tabindex++, '', array('class' => 'form-control'))
: '<span id="username" class="form-control">'.$this->user->getUsername()."</span>";
$password = $this->out->passField('password', '', '', 25, 40, $this->tabindex++, array('class' => 'form-control'));
$passwordRe = $this->out->passField('passwordRe', '', '', 25, 40, $this->tabindex++, array('class' => 'form-control'));
$realname = $this->out->textField('realname', '', $this->realname, 25, 60, $this->tabindex++, '', array('class' => 'form-control'));
$email = $this->out->textField('email', '', $this->email, 25, 60, $this->tabindex++, '', array('class' => 'form-control'));
$allowemail = $this->out->checkbox('allowemail', '', $this->allowemail, '', null, $this->tabindex++);
$common = $this->makeCommonInput();
$customInput = $this->makeCustomInput();
$news = $this->out->checkbox('news', '', $this->news, '', null, $this->tabindex++);
$historyLink = $this->controller->generateUrl('new');
return <<<EOS
<form action="" method="post" class="form-horizontal" role="form">
<input type="hidden" name="attempt" value="$this->attempt">
<div class="form-group">
<label for="username" class="col-sm-4 control-label">Потребителско име:</label>
<div class="col-sm-8">
$username
</div>
</div>
<div class="form-group">
<label for="password" class="col-sm-4 control-label">Нова парола<a id="nb1" href="#n1">*</a>:</label>
<div class="col-sm-8">
$password
<span class="help-block">Въведете нова парола само ако искате да смените сегашната си.</span>
</div>
<label for="passwordRe" class="col-sm-4 control-label">Новата парола още веднъж:</label>
<div class="col-sm-8">
$passwordRe
</div>
</div>
<div class="form-group">
<label for="realname" class="col-sm-4 control-label">Истинско име:</label>
<div class="col-sm-8">
$realname
</div>
</div>
<div class="form-group">
<label for="email" class="col-sm-4 control-label">Е-поща:</label>
<div class="col-sm-8">
$email
</div>
<div class="col-sm-offset-4 col-sm-8">
<div class="checkbox">
<label>
$allowemail Разрешаване на писма от другите потребители
</label>
</div>
</div>
</div>
$common
<div class="form-group">
<div class="col-sm-offset-4 col-sm-8">
<div class="checkbox">
<label>
$news Получаване на месечен бюлетин
</label>
</div>
<span class="help-block">Алтернативен начин да следите новото в библиотеката предлага страницата <a href="$historyLink">Новодобавено</a>.</span>
</div>
</div>
$customInput
<div class="form-group">
<div class="col-sm-offset-4 col-sm-8">
{$this->out->submitButton('Запис', '', $this->tabindex++, false, array('class' => 'btn btn-primary'))}
</div>
</div>
</form>
EOS;
}
protected function makeCommonInput() {
$skin = $this->makeSkinInput($this->tabindex++);
$nav = $this->makeNavPosInput($this->tabindex++);
return <<<EOS
<div class="form-group">
<label for="skin" class="col-sm-4 control-label">Облик:</label>
<div class="col-sm-8">
$skin
</div>
</div>
<div class="form-group">
<label for="nav" class="col-sm-4 control-label">Навигация:</label>
<div class="col-sm-8">
$nav
</div>
</div>
EOS;
}
protected function makeSkinInput($tabindex) {
return $this->out->selectBox('skin', '', Setup::setting('skins'),
$this->opts['skin'], $tabindex,
array('class' => 'form-control', 'onchange' => 'skin=this.value; changeStyleSheet()'));
}
protected function makeNavPosInput($tabindex) {
return $this->out->selectBox('nav', '', Setup::setting('navpos'),
$this->opts['nav'], $tabindex,
array('class' => 'form-control', 'onchange' => 'nav=this.value; changeStyleSheet()'));
}
protected function makeCustomInput() {
$inputs = '';
$inputs .= '<div class="form-group">';
$files = $this->container->getParameter('user_css');
foreach ($files as $file => $title) {
$inputs .= sprintf(<<<HTML
<div class="col-sm-offset-4 col-sm-8">
<div class="checkbox">
<label>
<input type="checkbox" name="css[%s]" value="%s" %s> %s
</label>
</div>
</div>
HTML
,
$file,
$file,
(isset($this->opts['css'][$file]) ? 'checked="checked"' : ''),
$title);
}
$files = $this->container->getParameter('user_js');
foreach ($files as $file => $title) {
$inputs .= sprintf(<<<HTML
<div class="col-sm-offset-4 col-sm-8">
<div class="checkbox">
<label>
<input type="checkbox" name="js[%s]" value="%s" %s> %s
</label>
</div>
</div>
HTML
,
$file,
$file,
(isset($this->opts['js'][$file]) ? 'checked="checked"' : ''),
$title);
}
$inputs .= '</div>';
$inputs .= '<div class="form-group">';
$cssCustomValue = isset($this->opts['css']['custom']) ? htmlspecialchars($this->opts['css']['custom']) : '';
$inputs .= <<<HTML
<label for="css_custom" class="col-sm-4 control-label">Собствени стилове:</label>
<div class="col-sm-8">
<input type="text" id="css_custom" class="form-control" name="css[custom]" value="$cssCustomValue" placeholder="http://mydomain.info/chitanka.css">
</div>
HTML;
$jsCustomValue = isset($this->opts['js']['custom']) ? htmlspecialchars($this->opts['js']['custom']) : '';
$inputs .= <<<HTML
<label for="js_custom" class="col-sm-4 control-label">Собствени скриптове:</label>
<div class="col-sm-8">
<input type="text" id="js_custom" class="form-control" name="js[custom]" value="$jsCustomValue" placeholder="http://mydomain.info/chitanka.js">
</div>
HTML;
$inputs .= '</div>';
return $inputs;
}
protected function makeOptionsOutput( $with_page_fields = true ) {
//$opts = array_merge( $this->user->options(), $this->opts );
$opts = $this->opts;
if ( ! $with_page_fields ) {
foreach ( $opts as $k => $_ ) {
if ( strpos( $k, 'p_' ) === 0 ) {
unset( $opts[$k] );
}
}
}
return $opts;
}
protected function initRegUserData() {
$this->username = $this->user->getUsername();
$this->password = $this->user->getPassword();
$this->realname = $this->user->getRealname();
$this->email = $this->user->getEmail();
$this->opts = array_merge($this->opts, $this->user->getOpts());
$this->allowemail = $this->user->getAllowemail();
$this->news = $this->user->getNews();
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller;
use Symfony\Component\HttpKernel\Exception\NotFoundHttpException;
use Chitanka\LibBundle\Pagination\Pager;
use Chitanka\LibBundle\Legacy\Legacy;
use Chitanka\LibBundle\Util\String;
use Chitanka\LibBundle\Entity\Person;
class PersonController extends Controller
{
protected $responseAge = 86400; // 24 hours
public function indexAction($_format)
{
return $this->display("index.$_format");
}
public function listByAlphaIndexAction($by, $_format)
{
$this->view = array(
'by' => $by,
);
return $this->display("list_by_alpha_index.$_format");
}
public function listByAlphaAction($by, $letter, $page, $_format)
{
$request = $this->get('request')->query;
$country = $request->get('country', '');
$limit = 100;
$repo = $this->getPersonRepository();
$filters = array(
'by' => $by,
'prefix' => $letter,
'country' => $country,
);
$this->view = array(
'by' => $by,
'letter' => $letter,
'country' => $country,
'persons' => $repo->getBy($filters, $page, $limit),
'pager' => new Pager(array(
'page' => $page,
'limit' => $limit,
'total' => $repo->countBy($filters)
)),
'route' => $this->getCurrentRoute(),
'route_params' => array('letter' => $letter, 'by' => $by),
);
return $this->display("list_by_alpha.$_format");
}
public function showAction($slug, $_format)
{
$person = $this->tryToFindPerson($slug);
if ( ! $person instanceof Person) {
return $person;
}
$this->prepareViewForShow($person, $_format);
$this->view['person'] = $person;
$this->putPersonInfoInView($person);
return $this->display("show.$_format");
}
public function showInfoAction($slug, $_format)
{
$person = $this->tryToFindPerson($slug);
if ( ! $person instanceof Person) {
return $person;
}
$this->view = array(
'person' => $person,
);
return $this->display("show_info.$_format");
}
protected function tryToFindPerson($slug)
{
$person = $this->getPersonRepository()->findBySlug(String::slugify($slug));
if ($person) {
return $person;
}
$person = $this->getPersonRepository()->findOneBy(array('name' => $slug));
if ($person) {
return $this->urlRedirect($this->generateUrl('person_show', array('slug' => $person->getSlug())), true);
}
throw new NotFoundHttpException("Няма личност с код $slug.");
}
protected function prepareViewForShow(Person $person, $format)
{
$this->prepareViewForShowAuthor($person, $format);
$this->prepareViewForShowTranslator($person, $format);
}
protected function prepareViewForShowAuthor(Person $person, $format)
{
$groupBySeries = $format == 'html';
$this->view['texts_as_author'] = $this->getTextRepository()->findByAuthor($person, $groupBySeries);
$this->view['books'] = $this->getBookRepository()->getByAuthor($person);
}
protected function prepareViewForShowTranslator(Person $person, $format)
{
$this->view['texts_as_translator'] = $this->getTextRepository()->findByTranslator($person);
}
protected function putPersonInfoInView(Person $person)
{
if ($person->getInfo() != '') {
// TODO move this in the entity
list($prefix, $name) = explode(':', $person->getInfo(), 2);
$site = $this->getWikiSiteRepository()->findOneBy(array('code' => $prefix));
$url = $site->getUrl($name);
$this->view['info'] = Legacy::getMwContent($url, $this->container->get('buzz'));
$this->view['info_intro'] = strtr($site->getIntro(), array(
'$1' => $person->getName(),
'$2' => $url,
));
}
}
public function suggest($slug)
{
return $this->lecacyPage('Info');
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Service;
use Doctrine\ORM\EntityManager;
use Chitanka\LibBundle\Entity\Label;
use Chitanka\LibBundle\Entity\Text;
use Chitanka\LibBundle\Entity\TextLabel;
use Chitanka\LibBundle\Entity\TextLabelLog;
use Chitanka\LibBundle\Entity\User;
class TextLabelService {
private $em;
private $user;
public function __construct(EntityManager $em, User $user) {
$this->em = $em;
$this->user = $user;
}
public function newTextLabel(Text $text) {
$textLabel = new TextLabel;
$textLabel->setText($text);
return $textLabel;
}
public function addTextLabel(TextLabel $textLabel, Text $text) {
// TODO Form::bind() overwrites the Text object with an id
$textLabel->setText($text);
$text->addLabel($textLabel->getLabel());
$log = new TextLabelLog($text, $textLabel->getLabel(), $this->user, '+');
$this->em->persist($text);
$this->em->persist($log);
$this->em->flush();
}
public function removeTextLabel(Text $text, Label $label) {
$this->em->getConnection()->executeUpdate("DELETE FROM text_label WHERE text_id = {$text->getId()} AND label_id = {$label->getId()}");
$log = new TextLabelLog($text, $label, $this->user, '-');
$this->em->persist($log);
$this->em->flush();
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller;
use Symfony\Bundle\FrameworkBundle\Controller\Controller as SymfonyController;
use Symfony\Component\HttpFoundation\Request;
use Symfony\Component\HttpFoundation\Response;
use Symfony\Component\HttpFoundation\RedirectResponse;
use Symfony\Component\HttpKernel\Exception\HttpException;
use Symfony\Component\HttpKernel\Exception\NotFoundHttpException;
use Chitanka\LibBundle\Legacy\Setup;
use Chitanka\LibBundle\Entity\User;
abstract class Controller extends SymfonyController {
/** The unqualified name of the controller: Main for MainController */
protected $name = null;
/** Data to send to the view */
protected $view = array();
/** The format of the response */
protected $responseFormat = 'html';
/** The max cache time of the response (in seconds) */
protected $responseAge = 600;
/** The status code of the response */
protected $responseStatusCode = null;
/**
* Response headers. Used to overwrite default or add new ones
*/
protected $responseHeaders = array();
private $em;
protected function legacyPage($page, $controller = ':legacy')
{
if (strpos($page, '.') === false) {
$format = $this->responseFormat;
} else {
list($page, $format) = explode('.', $page);
}
$page = Setup::getPage($page, $this, $this->container);
if ($page->redirect) {
return $this->urlRedirect($page->redirect);
}
$data = $this->getDisplayVariables() + array('page' => $page);
if ($page->inlineJs) {
$data['inline_js'] = $page->inlineJs;
}
$response = $this->render("LibBundle:$controller.$format.twig", $this->view + $data);
$this->setCacheStatusByResponse($response);
return $response;
}
protected function display($action, array $params = array())
{
if (strpos($action, '.') === false) {
$format = $this->responseFormat;
} else {
list($action, $format) = explode('.', $action);
}
if (strpos($action, ':') !== false) {
list($controller, $action) = explode(':', $action);
} else {
$controller = $this->getName();
}
$this->getRequest()->setFormat('osd', 'application/opensearchdescription+xml');
$globals = $this->getDisplayVariables();
if ($format == 'opds') {
$textsUpdatedAt = $this->getTextRevisionRepository()->getMaxDate();
$booksUpdatedAt = $this->getBookRevisionRepository()->getMaxDate();
$globals += array(
'texts_updated_at' => $textsUpdatedAt,
'books_updated_at' => $booksUpdatedAt,
'updated_at' => max($textsUpdatedAt, $booksUpdatedAt),
);
} else if ($format == 'osd') {
$this->responseAge = 31536000; // an year
}
$response = $this->render("LibBundle:$controller:$action.$format.twig", $this->view + $params + $globals);
if ($format == 'opds') {
$normalizedContent = $response->getContent();
$normalizedContent = strtr($normalizedContent, array(
"\t" => ' ',
"\n" => ' ',
));
$normalizedContent = preg_replace('/ +/', ' ', $normalizedContent);
$normalizedContent = preg_replace('/> </', ">\n<", $normalizedContent);
$normalizedContent = strtr($normalizedContent, array(
'> ' => '>',
' <' => '<',
));
$response->setContent($normalizedContent);
}
$this->setCacheStatusByResponse($response);
if ($this->responseStatusCode) {
$response->setStatusCode($this->responseStatusCode);
}
return $response;
}
protected function getDisplayVariables()
{
return array(
'navlinks' => $this->renderNavLinks(),
'navextra' => array(),
'footer_links' => $this->renderFooterLinks(),
'current_route' => $this->getCurrentRoute(),
'script_library' => $this->container->getParameter('script_library'),
'global_info_message' => $this->container->getParameter('global_info_message'),
'analytics_snippet' => $this->container->getParameter('analytics_snippet'),
'environment' => $this->container->get('kernel')->getEnvironment(),
'ajax' => $this->getRequest()->isXmlHttpRequest(),
);
}
protected function renderNavLinks() {
return $this->renderLayoutComponent('sidebar-menu', 'LibBundle::navlinks.html.twig');
}
protected function renderFooterLinks() {
return $this->renderLayoutComponent('footer-menu', 'LibBundle::footer_links.html.twig');
}
protected function renderLayoutComponent($wikiPage, $fallbackTemplate) {
$wikiPagePath = $this->getParameter('content_dir')."/wiki/special/$wikiPage.html";
if (file_exists($wikiPagePath)) {
list(, $content) = explode("\n\n", file_get_contents($wikiPagePath));
return $content;
}
return $this->renderView($fallbackTemplate);
}
protected function getStylesheet()
{
$url = $this->container->getParameter('style_url');
if ( ! $url) {
return false;
}
return $url . http_build_query($this->getUser()->getSkinPreference());
}
protected function displayText($text, $headers = array())
{
$response = new Response($text);
foreach ($headers as $header => $value) {
$response->headers->set($header, $value);
}
$this->setCacheStatusByResponse($response);
return $response;
}
protected function displayJson($content, $headers = array())
{
return $this->displayText(json_encode($content), $headers);
}
protected function setCacheStatusByResponse(Response $response)
{
if ($this->responseAge && $this->container->getParameter('use_http_cache')) {
$response->setSharedMaxAge($this->responseAge);
}
return $response;
}
public function getName()
{
if (is_null($this->name) && preg_match('/([\w]+)Controller$/', get_class($this), $m)) {
$this->name = $m[1];
}
return $this->name;
}
protected function getCurrentRoute()
{
return $this->get('request')->attributes->get('_route');
}
/** @return \Doctrine\ORM\EntityManager */
public function getEntityManager()
{
if (!isset($this->em)) {
// TODO do this in the configuration
$this->em = $this->get('doctrine.orm.entity_manager');
$this->em->getConfiguration()->addCustomHydrationMode('id', 'Chitanka\LibBundle\Hydration\IdHydrator');
$this->em->getConfiguration()->addCustomHydrationMode('key_value', 'Chitanka\LibBundle\Hydration\KeyValueHydrator');
}
return $this->em;
}
public function getRepository($entityName = null)
{
return $this->getEntityManager()->getRepository($this->getEntityName($entityName));
}
protected function getEntityName($entityName)
{
return 'LibBundle:'.$entityName;
}
private $user;
/** @return User */
public function getUser() {
// TODO remove
if ( ! isset($this->user)) {
$this->user = User::initUser($this->getUserRepository());
if ($this->user->isAuthenticated()) {
$token = new \Symfony\Component\Security\Core\Authentication\Token\UsernamePasswordToken($this->user, $this->user->getPassword(), 'User', $this->user->getRoles());
$this->get('security.context')->setToken($token);
}
}
return $this->user;
//return $this->get('security.context')->getToken()->getUser();
}
protected function getSavableUser() {
return $this->getEntityManager()->merge($this->getUser());
}
public function setUser($user) {
$this->user = $user;
}
/**
* Redirects to another route.
*
* It expects a route path parameter.
* By default, the response status code is 301.
*
* If the route empty, the status code will be 410.
* If the permanent path parameter is set, the status code will be 302.
* (copied from Symfony\Bundle\FrameworkBundle\Controller\RedirectController)
*
* @param string $route The route pattern to redirect to
* @param Boolean $permanent Whether the redirect is permanent or not
*
* @return Response A Response instance
*/
public function redirect($route, $permanent = false) {
if (!$route) {
return new Response(null, 410);
}
$attributes = $this->container->get('request')->attributes->all();
unset($attributes['_route'], $attributes['route'], $attributes['permanent'] );
return new RedirectResponse($this->container->get('router')->generate($route, $attributes), $permanent ? 301 : 302);
}
/**
* Redirects to a URL.
*
* It expects a url path parameter.
* By default, the response status code is 301.
*
* If the url is empty, the status code will be 410.
* If the permanent path parameter is set, the status code will be 302.
*
* @param string $url The url to redirect to
* @param Boolean $permanent Whether the redirect is permanent or not
*
* @return Response A Response instance
*/
public function urlRedirect($url, $permanent = false) {
if (!$url) {
return new Response(null, 410);
}
return new RedirectResponse($url, $permanent ? 301 : 302);
}
protected function notAllowed($message = null) {
throw new HttpException(401, $message);
}
protected function notFound($message = null) {
throw new NotFoundHttpException($message);
}
// TODO refactor: move to separate class
protected function getMirrorServer() {
$mirrorSites = $this->container->getParameter('mirror_sites');
if ( empty($mirrorSites) ) {
return false;
}
$ri = rand(1, 100);
$curFloor = 0;
foreach ($mirrorSites as $site => $prob) {
$curFloor += $prob;
if ( $ri <= $curFloor ) {
return $site;
}
}
return false; // main site
}
protected function enableCache($responseLifetime) {
if (is_string($responseLifetime)) {
$responseLifetime = strtotime($responseLifetime) - strtotime('now');
}
$this->responseAge = $responseLifetime;
}
protected function disableCache() {
$this->responseAge = 0;
}
protected function getWebRoot() {
return dirname($_SERVER['SCRIPT_NAME']);
}
protected function getParameter($param) {
return $this->container->getParameter($param);
}
/** @return \Chitanka\LibBundle\Entity\BookRepository */
protected function getBookRepository() { return $this->getRepository('Book'); }
/** @return \Chitanka\LibBundle\Entity\BookmarkRepository */
protected function getBookmarkRepository() { return $this->getRepository('Bookmark'); }
/** @return \Chitanka\LibBundle\Entity\BookmarkFolderRepository */
protected function getBookmarkFolderRepository() { return $this->getRepository('BookmarkFolder'); }
/** @return \Chitanka\LibBundle\Entity\BookRevisionRepository */
protected function getBookRevisionRepository() { return $this->getRepository('BookRevision'); }
/** @return \Chitanka\LibBundle\Entity\CategoryRepository */
protected function getCategoryRepository() { return $this->getRepository('Category'); }
/** @return \Chitanka\LibBundle\Entity\FeaturedBookRepository */
protected function getFeaturedBookRepository() { return $this->getRepository('FeaturedBook'); }
/** @return \Chitanka\LibBundle\Entity\ForeignBookRepository */
protected function getForeignBookRepository() { return $this->getRepository('ForeignBook'); }
/** @return \Chitanka\LibBundle\Entity\LabelRepository */
protected function getLabelRepository() { return $this->getRepository('Label'); }
/** @return \Chitanka\LibBundle\Entity\PersonRepository */
protected function getPersonRepository() { return $this->getRepository('Person'); }
/** @return \Chitanka\LibBundle\Entity\SearchStringRepository */
protected function getSearchStringRepository() { return $this->getRepository('SearchString'); }
/** @return \Chitanka\LibBundle\Entity\SequenceRepository */
protected function getSequenceRepository() { return $this->getRepository('Sequence'); }
/** @return \Chitanka\LibBundle\Entity\SeriesRepository */
protected function getSeriesRepository() { return $this->getRepository('Series'); }
/** @return \Chitanka\LibBundle\Entity\SiteRepository */
protected function getSiteRepository() { return $this->getRepository('Site'); }
/** @return \Chitanka\LibBundle\Entity\SiteNoticeRepository */
protected function getSiteNoticeRepository() { return $this->getRepository('SiteNotice'); }
/** @return \Chitanka\LibBundle\Entity\TextRepository */
protected function getTextRepository() { return $this->getRepository('Text'); }
/** @return \Chitanka\LibBundle\Entity\TextCommentRepository */
protected function getTextCommentRepository() { return $this->getRepository('TextComment'); }
/** @return \Chitanka\LibBundle\Entity\TextRatingRepository */
protected function getTextRatingRepository() { return $this->getRepository('TextRating'); }
/** @return \Chitanka\LibBundle\Entity\TextRevisionRepository */
protected function getTextRevisionRepository() { return $this->getRepository('TextRevision'); }
/** @return \Chitanka\LibBundle\Entity\UserRepository */
protected function getUserRepository() { return $this->getRepository('User'); }
/** @return \Chitanka\LibBundle\Entity\UserTextContribRepository */
protected function getUserTextContribRepository() { return $this->getRepository('UserTextContrib'); }
/** @return \Chitanka\LibBundle\Entity\UserTextReadRepository */
protected function getUserTextReadRepository() { return $this->getRepository('UserTextRead'); }
/** @return \Chitanka\LibBundle\Entity\WikiSiteRepository */
protected function getWikiSiteRepository() { return $this->getRepository('WikiSite'); }
/** @return \Chitanka\LibBundle\Entity\WorkEntryRepository */
protected function getWorkEntryRepository() { return $this->getRepository('WorkEntry'); }
/** @return \Chitanka\LibBundle\Entity\WorkContribRepository */
protected function getWorkContribRepository() { return $this->getRepository('WorkContrib'); }
}
<file_sep><?php
namespace Chitanka\LibBundle\Legacy;
use Chitanka\LibBundle\Util\Number;
use Chitanka\LibBundle\Util\String;
use Chitanka\LibBundle\Util\Char;
abstract class Page
{
const
FF_ACTION = 'action',
FF_QUERY = 'q',
FF_TEXT_ID = 'id',
FF_CHUNK_ID = 'part',
FF_LIMIT = 'plmt',
FF_OFFSET = 'page',
FF_SORTBY = 'sortby',
FF_CQUESTION = 'captchaQuestion',
FF_CQUESTION_T = 'captchaQuestionT',
FF_CANSWER = 'captchaAnswer',
FF_CTRIES = 'captchaTries',
// do not save any changed page settings
FF_SKIP_SETTINGS = 'sss';
public
$redirect = '',
$inlineJs = '';
protected
$action = '',
$title,
$outencoding,
$contentType,
$request,
$user,
$db,
$content,
$messages,
$fullContent,
$outputLength,
$allowCaching,
$query_escapes = array(
'"' => '', '\'' => '’'
),
$llimit = 0, $loffset = 0,
$maxCaptchaTries = 2, $defListLimit = 10, $maxListLimit = 50,
$includeFirstHeading = true,
$includeJumptoLinks = true,
$includeOpenSearch = false,#true, // TODO
$includeNavigation = true,
$includeNavigationLinks = true,
$includeNavigationExtraLinks = true,
$includePersonalTools = true,
$includeSearch = true,
$includeFooter = true,
$includeDataSuggestionLinks = true,
$includeDownloadLinks = true,
$includeMultiDownloadForm = true,
$includeUserLinks = true,
$includeFeedLinks = true,
$includeFilters = true,
$includeCommentLinkByNone = true,
$includeRatingLinkByNone = true,
$includeInfoLinkByNone = true,
$sendFiles = true;
public function __construct($fields)
{
foreach ($fields as $f => $v) {
$this->$f = $v;
}
$this->save_settings = $this->request->value(self::FF_SKIP_SETTINGS, 1);
$this->inencoding = 'utf-8';
$this->doIconv = true;
$this->allowCaching = true;
$this->encfilter = '';
// TODO
$this->root = '/';
$this->rootPage = $this->root;
$this->rootd = $this->root;
$this->sitename = Setup::setting('sitename');
$this->messages = $this->content = $this->fullContent = '';
$this->contentType = 'text/html';
$this->isMobile = $this->request->value('mobile', 0) == 1;
$this->outputDone = false;
$this->title = $this->sitename;
if ( $this->isMobile ) {
if ( $this->action != 'main' ) {
$this->includeNavigation = false;
}
$this->includeNavigationLinks = false;
$this->includeNavigationExtraLinks = false;
$this->includePersonalTools = false;
}
}
/**
Generate page content according to submission type (POST or GET).
*/
public function execute() {
$this->content = $this->request->wasPosted()
? $this->processSubmission()
: $this->buildContent();
return $this->content;
}
public function title() {
return $this->title;
}
public function content() {
return $this->content;
}
public function get($field) {
return isset($this->$field) ? $this->$field : null;
}
public function set($field, $value) {
$this->$field = $value;
}
public function setFields($data) {
foreach ((array) $data as $field => $value) {
$this->$field = $value;
}
}
/**
@param $message
@param $isError
*/
public function addMessage($message, $isError = false) {
$class = $isError ? 'error' : 'notice';
if ($this->controller->getRequest()->isXmlHttpRequest()) {
header("X-Message-$class: ".rawurlencode($message));
} else {
$this->controller->get('request')->getSession()->getFlashBag()->set($class, $message);
}
}
protected function addJs($js) {
$this->inlineJs .= $js . "\n";
}
/** TODO replace */
public function addRssLink($title = null, $actionOrUrl = null) {
return;
Legacy::fillOnEmpty($title, $this->title());
$title = strip_tags($title);
if ( Legacy::isUrl($actionOrUrl) ) {
$url = $actionOrUrl;
} else {
Legacy::fillOnEmpty($actionOrUrl, $this->action);
$params = array(
self::FF_ACTION => 'feed',
'obj' => $actionOrUrl,
);
$url = '';#url($params, 2);
}
$feedlink = <<<EOS
<link rel="alternate" type="application/rss+xml" title="RSS 2.0 — $title" href="$url" />
EOS;
$this->addHeadContent($feedlink);
}
public function getInlineRssLink($route, $data = array(), $title = null) {
Legacy::fillOnEmpty($title, $this->title());
$link = sprintf('<div class="feed-standalone"><a href="%s" title="RSS 2.0 — %s" rel="feed"><span class="fa fa-rss"></span> <span>RSS</span></a></div>', $this->controller->generateUrl($route, $data), $title);
return $link;
}
public function addScript($file, $debug = false) {
}
public function allowCaching() {
return $this->allowCaching;
}
/**
Output page content.
*/
public function output() {
if ( $this->outputDone ) { // already outputted
return;
}
if ( empty($this->fullContent) ) {
$this->getFullContent();
}
print $this->fullContent;
}
public function isValidEncoding($enc) {
return @iconv($this->inencoding, $enc, '') !== false;
}
/**
Build full page content.
@return string
*/
public function getFullContent()
{
$this->messages = empty( $this->messages ) ? ''
: "<div id='messages'>\n$this->messages\n</div>";
$this->fullContent = $this->messages . $this->content;
unset($this->content); // free some memory
$this->addTemplates();
$this->fullContent = Legacy::expandTemplates($this->fullContent);
return $this->fullContent;
}
private function getFirstHeading()
{
return empty($this->title) ? $this->sitename : $this->title;
}
public function getOpenSearch()
{
$opensearch = '';
if ( array_key_exists($this->action, $this->searchOptions) ) {
$opensearch = "\n\t" . $this->out->xmlElement('link', null, array(
'rel' => 'search',
'type' => 'application/opensearchdescription+xml',
'href' => 'action=opensearchdesc',
'title' => "$this->sitename ({$this->searchOptions[$this->action]})"
));
}
return $opensearch;
}
/**
Process POST Forms if there are any.
Override this function if your page contains POST Forms.
*/
protected function processSubmission() {
return $this->buildContent();
}
/**
Create page content.
Override this function to include content in your page.
*/
protected function buildContent() {
return '';
}
protected function addTemplates() {
Legacy::addTemplate('ROOT', $this->root);
Legacy::addTemplate('DOCROOT', $this->rootd.'/');
Legacy::addTemplate('SITENAME', $this->sitename);
}
protected function makeSimpleTextLink(
$title, $textId, $chunkId = 1, $linktext = '',
$attrs = array(), $data = array(), $params = array()
) {
$p = array(
self::FF_TEXT_ID => $textId,
//'slug' => $this->out->slugify(preg_replace('/^(\d+\.)+ /', '', $title)),
);
if ($chunkId != 1) {
$p[self::FF_CHUNK_ID] = $chunkId;
}
if ( empty($linktext) ) {
$linktext = '<em>'. $title .'</em>';
}
$attrs['class'] = ltrim(@$attrs['class'] . " text text-$textId");
$attrs['href'] = $this->controller->generateUrl('text_show_part', $p);
return $this->out->xmlElement('a', $linktext, $attrs);
}
protected function makeTextLinkWithAuthor($work) {
return '„' . $this->makeSimpleTextLink($work->getTitle(), $work->getId()) . '“'
. $this->makeFromAuthorSuffix($work);
}
public function makeAuthorLink(
$name, $sortby='first', $pref='', $suf='', $query=array()
) {
$name = rtrim($name, ',');
if ( empty($name) ) {
return '';
}
settype($query, 'array');
$o = '';
foreach ( explode(',', $name) as $lname ) {
$text = empty($sortby)
? 'Произведения от ' . $name
: $this->formatPersonName($lname, $sortby);
$lname = str_replace('.', '', $lname);
$link = strpos($lname, '/') !== false // contains not allowed chars
? $lname
: sprintf('<a href="%s">%s</a>', $this->controller->generateUrl('person_show', array('slug' => trim($lname))+$query), $text);
$o .= ', ' . $pref . $link . $suf;
}
return substr($o, 2);
}
public function makeFromAuthorSuffix($text) {
if ( is_array($text) ) {
if ( isset($text['author']) && trim($text['author'], ', ') != '' ) {
return ' от '.$text['author'];
}
} else {
$authors = array();
foreach ($text->getAuthors() as $author) {
if (is_array($author)) {
$slug = $author['slug'];
$name = $author['name'];
} else {
$slug = $author->getSlug();
$name = $author->getName();
}
$authors[] = sprintf('<a href="%s">%s</a>', $this->controller->generateUrl('person_show', array('slug' => $slug)), $name);
}
if (! empty($authors)) {
return ' от '. implode(', ', $authors);
}
}
return '';
}
protected function makeUserLink($name) {
return sprintf('<a href="%s" class="user" title="Към личната страница на %s">%s</a>', $this->controller->generateUrl('user_show', array('username' => $name)), $name, $name);
}
protected function makeUserLinkWithEmail($username, $email, $allowemail) {
$mlink = '';
if ( ! empty($email) && $allowemail) {
$mlink = sprintf('<a href="%s" title="Пращане на писмо на %s"><span class="fa fa-envelope-o"></span><span class="sr-only">Е-поща</span></a>',
$this->controller->generateUrl('email_user', array('username' => $username)),
String::myhtmlentities($username));
}
return $this->makeUserLink($username) .' '. $mlink;
}
protected function formatPersonName($name, $sortby = 'first') {
preg_match('/([^,]+) ([^,]+)(, .+)?/', $name, $m);
if ( !isset($m[2]) ) { return $name; }
$last = "<span class='lastname'>$m[2]</span>";
$m3 = isset($m[3]) ? $m[3] : '';
return $sortby == 'last' ? $last.', '.$m[1].$m3 : $m[1].' '.$last.$m3;
}
public function initPaginationFields() {
$this->lpage = (int) $this->request->value( self::FF_OFFSET, 1 );
$this->llimit = (int) $this->request->value(self::FF_LIMIT, $this->defListLimit );
$this->llimit = Number::normInt( $this->llimit, $this->maxListLimit );
$this->loffset = ($this->lpage - 1) * $this->llimit;
}
protected function verifyCaptchaAnswer($showWarning = false,
$_question = null, $_answer = null) {
if ( !$this->showCaptchaToUser() ) {
return true;
}
$this->captchaTries++;
Legacy::fillOnEmpty($_question, $this->captchaQuestion);
Legacy::fillOnEmpty($_answer, $this->captchaAnswer);
$res = $this->db->select(DBT_QUESTION, array('id' => $_question));
if ( $this->db->numRows($res) == 0 ) { // invalid question
return false;
}
$row = $this->db->fetchAssoc($res);
$answers = explode(',', $row['answers']);
$_answer = Char::mystrtolower(trim($_answer));
foreach ($answers as $answer) {
if ($_answer == $answer) {
$this->user->setIsHuman(true);
return true;
}
}
if ($showWarning) {
$this->addMessage($this->makeCaptchaWarning(), true);
}
$this->logFailedCaptcha("$row[question] [$row[answers]] -> \"$_answer\"");
return false;
}
protected function makeCaptchaQuestion() {
if (!$this->showCaptchaToUser()) {
return '';
}
if ( empty($this->captchaQuestion) ) {
extract( $this->db->getRandomRow(DBT_QUESTION) );
} else {
$id = $this->captchaQuestion;
$question = $this->captchaQuestionT;
}
$qid = $this->out->hiddenField(self::FF_CQUESTION, $id);
$qt = $this->out->hiddenField(self::FF_CQUESTION_T, $question);
$tr = $this->out->hiddenField(self::FF_CTRIES, $this->captchaTries);
$q = '<label for="'.self::FF_CANSWER.'" class="control-label">'.$question.'</label>';
$answer = $this->out->textField(self::FF_CANSWER, '', $this->captchaAnswer, 30, 60, 0, '', array('class' => 'form-control'));
return '<div>' . $qid . $qt . $tr . $q .' '. $answer . '</div>';
}
protected function initCaptchaFields() {
$this->captchaQuestion = (int) $this->request->value(self::FF_CQUESTION, 0);
$this->captchaQuestionT = $this->request->value(self::FF_CQUESTION_T);
$this->captchaAnswer = $this->request->value(self::FF_CANSWER);
$this->captchaTries = (int) $this->request->value(self::FF_CTRIES, 0);
}
protected function clearCaptchaQuestion() {
$this->captchaQuestion = 0;
$this->captchaQuestionT = $this->captchaAnswer = '';
$this->captchaTries = 0;
}
protected function makeCaptchaWarning() {
if ( $this->hasMoreCaptchaTries() ) {
$rest = $this->maxCaptchaTries - $this->captchaTries;
$tries = Legacy::chooseGrammNumber($rest, 'един опит', $rest.' опита');
return "Отговорили сте грешно на въпроса „{$this->captchaQuestionT}“. Имате право на още $tries.";
}
return "Вече сте направили $this->maxCaptchaTries неуспешни опита да отговорите на въпроса „{$this->captchaQuestionT}“. Нямате право на повече.";
}
protected function hasMoreCaptchaTries() {
return $this->captchaTries < $this->maxCaptchaTries;
}
protected function showCaptchaToUser() {
return $this->user->isAnonymous() && !$this->user->isHuman();
}
private function logFailedCaptcha($msg) {
file_put_contents(__DIR__."/../../../../app/logs/failed_captcha.log", date('Y-m-d H:i:s').": $msg\n", FILE_APPEND);
}
protected function addUrlQuery($args) {
return $this->out->addUrlQuery($this->request->requestUri(), $args);
}
protected function sendFile($file)
{
$this->outputLength = filesize($file);
if ($this->sendFiles) {
header('Location: '. $this->rootd . '/' . $file);
} else {
$this->fullContent = file_get_contents($file);
}
$this->outputDone = true;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller\Admin;
use Symfony\Component\HttpKernel\Exception\HttpException;
use Sonata\AdminBundle\Controller\CRUDController as BaseController;
use Chitanka\LibBundle\Entity\User;
class CRUDController extends BaseController {
public function configure() {
if ( ! $this->getUser()->inGroup('admin')) {
throw new HttpException(401);
}
parent::configure();
}
public function getUser() {
if ( ! isset($this->_user)) {
$this->_user = User::initUser($this->getRepository('User'));
}
return $this->_user;
}
public function getRepository($entityName) {
return $this->get('doctrine.orm.entity_manager')->getRepository('LibBundle:'.$entityName);
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Command;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Input\InputOption;
use Symfony\Component\Console\Input\InputArgument;
use Symfony\Component\Console\Output\OutputInterface;
use Chitanka\LibBundle\Legacy\Legacy;
use Chitanka\LibBundle\Util\String;
use Chitanka\LibBundle\Util\File;
class UpdateLibCommand extends CommonDbCommand
{
protected function configure()
{
parent::configure();
$this
->setName('lib:update')
->setDescription('Add or update new texts and books')
->addArgument('input', InputArgument::REQUIRED, 'Directory with input files or other input directories')
->addOption('save', null, InputOption::VALUE_NONE, 'Save generated files in corresponding directories')
->addOption('dump-sql', null, InputOption::VALUE_NONE, 'Output SQL queries instead of executing them')
->setHelp(<<<EOT
The <info>lib:update</info> command adds or updates texts and books.
EOT
);
}
/**
* Executes the current command.
*
* @param InputInterface $input An InputInterface instance
* @param OutputInterface $output An OutputInterface instance
*
* @return integer 0 if everything went fine, or an error code
*
* @throws \LogicException When this abstract class is not implemented
*/
protected function execute(InputInterface $input, OutputInterface $output)
{
$this->input = $input;
$this->output = $output;
$this->saveFiles = $input->getOption('save') === true;
$this->dumpSql = $input->getOption('dump-sql') === true;
$queries = $this->conquerTheWorld($this->getContainer()->get('doctrine.orm.default_entity_manager'));
$this->printQueries($queries);
$output->writeln('/*Done.*/');
}
private function defineVars($em)
{
$this->em = $em;
$this->overwrite = false; // overwrite existing files?
$this->entrydate = date('Y-m-d');
$this->modifDate = $this->entrydate . ' ' . date('H:i:s');
$this->contentDir = $this->getContainer()->getParameter('kernel.root_dir').'/../web/content';
$this->works = $this->books = array();
$this->errors = array();
}
private function conquerTheWorld($em)
{
$this->defineVars($em);
$queries = array();
$dir = $this->input->getArgument('input');
if (count(glob("$dir/*.data")) == 0) {
foreach (glob("$dir/*", GLOB_ONLYDIR) as $dir) {
$queries = array_merge($queries, $this->processPacket($dir));
}
} else {
$queries = $this->processPacket($dir);
}
array_unshift($queries, 'SET NAMES utf8');
$queries = array_merge($queries, $this->getNextIdUpdateQueries());
return $queries;
}
private function processPacket($dir)
{
$workFiles = self::sortDataFiles(glob("$dir/work.*.data"));
$bookFiles = self::sortDataFiles(glob("$dir/book.*.data"));
$queries = array();
foreach ($workFiles as $workFile) {
$queries = array_merge($queries, $this->insertWork($this->processWorkFiles($workFile)));
}
foreach ($bookFiles as $bookFile) {
$queries = array_merge($queries, $this->insertBook($this->processBookFiles($bookFile)));
}
return $queries;
}
private function processWorkFiles($dataFile)
{
$work = array();
foreach (file($dataFile) as $line) {
$work += self::_extractVarFromLineData($line);
}
$packetId = $work['id'];
$work['is_new'] = $packetId < 0;
if ($work['is_new']) {
$work['id'] = $this->getNextId('text');
}
$work['revision_id'] = $this->getNextId('text_revision');
if (isset($work['subtitle']) && $work['subtitle'][0] == '(') {
$work['subtitle'] = trim($work['subtitle'], '()');
}
if (isset($work['authors'])) {
$authors = array();
foreach (explode(',', $work['authors']) as $slug) {
$authors[] = $this->getObjectId('person', $slug);
}
$work['authors'] = $authors;
}
if (isset($work['year']) && strpos($work['year'], '-') !== false) {
list($work['year'], $work['year2']) = explode('-', $work['year']);
}
if (isset($work['translators'])) {
$translators = array();
foreach (explode(';', $work['translators']) as $slugYear) {
list($slug, $transYear) = explode(',', $slugYear);
if ($transYear == '?') $transYear = null;
if ($slug != '?') {
$translators[] = array($this->getObjectId('person', $slug), $transYear);
}
if (strpos($transYear, '-') !== false) {
list($work['trans_year'], $work['trans_year2']) = explode('-', $transYear);
} else {
$work['trans_year'] = $transYear;
}
}
$work['translators'] = $translators;
} else if ($work['is_new'] && $work['lang'] != $work['orig_lang']) {
$work['trans_license'] = 'fc';
}
if (isset($work['labels'])) {
$work['labels'] = explode(',', $work['labels']);
}
if (isset($work['users'])) {
if ($work['users'][0] == '*') {
$work['users_as_new'] = true;
$work['users'] = substr($work['users'], 1);
}
$users = array();
foreach (explode(';', $work['users']) as $userContrib) {
// username, percent, comment, date
$parts = str_getcsv($userContrib, ',');
if ($parts[0] == '-') {
$parts[0] = '?';
$parts[] = null;
} else {
if (strpos($parts[0], '(') !== false) {
throw new \Exception("Username contains parentheses: '$parts[0]' (ID $work[id])");
}
try {
$parts[] = $this->getObjectId('user', $parts[0], 'username');
} catch (\Exception $e) {
$parts[] = null;
}
}
$users[] = $parts;
}
$work['users'] = $users;
}
if (file_exists($file = str_replace('.data', '.tmpl', $dataFile))) {
$work['tmpl'] = $file;
$work = self::prepareWorkTemplate($work);
} else if (file_exists($file = str_replace('.data', '.text', $dataFile))) {
$work['text'] = $file;
}
if (file_exists($file = str_replace('.data', '.anno', $dataFile))) {
$work['anno'] = $file;
}
if (file_exists($file = str_replace('.data', '.info', $dataFile))) {
$work['info'] = $file;
}
if (file_exists($dir = strtr($dataFile, array('work.' => '', '.data' => ''))) && is_dir($dir)) {
$work['img'] = $dir;
}
return $this->works[$packetId] = $work;
}
private function processBookFiles($dataFile)
{
$book = array();
foreach (file($dataFile) as $line) {
$book += self::_extractVarFromLineData($line);
}
$packetId = $book['id'];
$book['is_new'] = $packetId < 0;
if ($book['is_new']) {
$book['id'] = $this->getNextId('book');
}
$book['revision_id'] = $this->getNextId('book_revision');
if (isset($book['subtitle']) && $book['subtitle'][0] == '(') {
$book['subtitle'] = trim($book['subtitle'], '()');
}
if (isset($book['authors'])) {
$authors = array();
foreach (explode(',', $book['authors']) as $slug) {
$authors[] = $this->getObjectId('person', $slug);
}
$book['authors'] = $authors;
}
if (file_exists($file = str_replace('.data', '.tmpl', $dataFile))) {
list($book['tmpl'], $book['works']) = $this->getBookTemplate($file, $this->works);
}
if (file_exists($file = str_replace('.data', '.anno', $dataFile))) {
$book['anno'] = $file;
}
if (file_exists($file = str_replace('.data', '.info', $dataFile))) {
$book['info'] = $file;
}
if (file_exists($file = str_replace('.data', '.covr.jpg', $dataFile))) {
$book['cover'] = $file;
}
if (file_exists($dir = strtr($dataFile, array('.data' => ''))) && is_dir($dir)) {
$book['img'] = $dir;
}
if (file_exists($file = str_replace('.data', '.djvu', $dataFile))) {
$book['djvu'] = $file;
}
if (file_exists($file = str_replace('.data', '.pdf', $dataFile))) {
$book['pdf'] = $file;
}
if (isset($book['formats'])) {
$book['formats'] = array_map('trim', explode(',', $book['formats']));
} else if ($book['is_new']) {
$book['formats'] = array();
if ( ! empty($book['works'])) {
$book['formats'][] = 'sfb';
}
if ( ! empty($book['djvu'])) {
$book['formats'][] = 'djvu';
}
if ( ! empty($book['pdf'])) {
$book['formats'][] = 'pdf';
}
}
return $book;
}
static private function _extractVarFromLineData($line)
{
$separator = '=';
$parts = explode($separator, $line);
$var = trim(array_shift($parts));
$value = trim(implode($separator, $parts));
if ($value === '') {
return array();
}
if ($value == '-' || $value == '?') {
$value = null;
}
return array($var => $value);
}
static public function sortDataFiles($files)
{
$sorted = array();
foreach ($files as $file) {
if (preg_match('/\.(\d+)\.data/', $file, $matches)) {
$sorted[$matches[1]] = $file;
}
}
ksort($sorted);
return $sorted;
}
static private function getBookTemplate($file, $works)
{
$bookTmpl = file_get_contents($file);
$bookWorks = array();
if (preg_match_all('/\{(file|text):(\d+)/', $bookTmpl, $m)) {
// already existing in the database works included in this book
foreach ($m[2] as $oldWork) {
$bookWorks[] = array('id' => $oldWork, 'is_new' => false);
}
}
foreach ($works as $packetId => $work) {
if (strpos($bookTmpl, ":$packetId") !== false) {
$bookTmpl = strtr($bookTmpl, array(
":$packetId}" => ":$work[id]}",
":$packetId-" => ":$work[id]-",
":$packetId|" => ":$work[id]|",
));
$bookWorks[] = $work;
}
}
return array($bookTmpl, $bookWorks);
}
static private function prepareWorkTemplate($work)
{
$files = array();
$template = file_get_contents($work['tmpl']);
if (preg_match_all('/\{(text|file):-\d+(-.+)\}/', $template, $matches)) {
foreach ($matches[2] as $match) {
$files["$work[id]$match"] = str_replace('.tmpl', "$match.text", $work['tmpl']);
$template = preg_replace("/(text|file):-\d+-/", "$1:$work[id]-", $template);
}
}
$work['text'] = $files;
$work['tmpl'] = $template;
return $work;
}
private $_objectsIds = array();
private function getObjectId($table, $query, $column = 'slug')
{
if ($column == 'slug') {
$query = String::slugify($query);
}
if ( ! isset($this->_objectsIds[$table][$query])) {
$sql = "SELECT id FROM $table WHERE $column = '$query'";
$result = $this->em->getConnection()->fetchAssoc($sql);
if (empty($result['id'])) {
throw new \Exception("Няма запис за $table.$column = '$query'");
}
$this->_objectsIds[$table][$query] = $result['id'];
}
return $this->_objectsIds[$table][$query];
}
private $_curIds = array();
private $_ids = array(
'text' => array(),
'book' => array(),
);
private function getNextId($table)
{
if (isset($this->_ids[$table]) && count($this->_ids[$table])) {
return array_shift($this->_ids[$table]);
}
if ( ! isset($this->_curIds[$table])) {
$tableClass = 'Chitanka\LibBundle\Entity\\'. str_replace(' ', '', ucwords(str_replace('_', ' ', $table)));
$this->_curIds[$table] = $this->em->createQuery(sprintf('SELECT MAX(e.id) FROM %s e', $tableClass))->getSingleScalarResult() + 1;
} else {
$this->_curIds[$table]++;
}
return $this->_curIds[$table];
}
private function getNextIdUpdateQueries()
{
$tables = array(
'text_revision',
'book_revision',
'text_translator',
'text_author',
'book_author',
'book_text',
'series_author',
);
$queries = array();
foreach ($tables as $table) {
$entityName = str_replace(' ', '', ucwords(str_replace('_', ' ', $table)));
$queries[] = "UPDATE next_id SET value=(SELECT max(id)+1 FROM $table) WHERE id='Chitanka\\\\LibBundle\\\\Entity\\\\$entityName'";
}
return $queries;
}
private function insertWork(array $work)
{
$qs = array();
$set = array(
'id' => $work['id'],
);
if (isset($work['title'])) {
$set += array(
'slug' => (isset($work['slug']) ? String::slugify($work['slug']) : String::slugify($work['title'])),
'title' => String::my_replace($work['title']),
);
}
if (isset($work['toc_level'])) {
$set['headlevel'] = $work['toc_level'];
} else if (isset($work['text'])) {
$set['headlevel'] = $work['toc_level'] = self::guessTocLevel(file_get_contents($work['text']));
}
if ( ! empty($work['type'])) $set['type'] = $work['type'];
if ( ! empty($work['lang'])) $set['lang'] = $work['lang'];
if ( ! empty($work['orig_lang'])) $set['orig_lang'] = $work['orig_lang'];
if (isset($work['text'])) {
$size = self::getFileSize($work['text']) / 1000;
$set += array(
'size' => $size,
'zsize' => ($size / 3.5),
);
}
if ($work['is_new']) {
$set += array(
'created_at' => $this->entrydate,
'dl_count' => 0,
'read_count' => 0,
'comment_count' => 0,
'rating' => 0,
'votes' => 0,
'has_anno' => 0,
'has_cover' => 0,
'is_compilation' => isset($work['tmpl']),
'orig_title' => (empty($work['orig_title']) ? '' : self::fixOrigTitle($work['orig_title'])),
);
if (isset($work['ser_nr'])) {
$set['sernr'] = $work['ser_nr'];
}
}
if (isset($work['subtitle'])) $set['subtitle'] = String::my_replace($work['subtitle']);
if (isset($work['orig_subtitle'])) $set['orig_subtitle'] = self::fixOrigTitle($work['orig_subtitle']);
if (isset($work['year'])) $set['year'] = $work['year'];
if (isset($work['year2'])) $set['year2'] = $work['year2'];
if (isset($work['trans_year'])) $set['trans_year'] = $work['trans_year'];
if (isset($work['anno'])) $set['has_anno'] = filesize($work['anno']) ? 1 : 0;
if (isset($work['series'])) $set['series_id'] = $this->getObjectId('series', $work['series']);
if (isset($work['orig_license'])) $set['orig_license_id'] = $this->getObjectId('license', $work['orig_license'], 'code');
if (isset($work['trans_license'])) $set['trans_license_id'] = $this->getObjectId('license', $work['trans_license'], 'code');
if (isset($work['source'])) $set['source'] = $work['source'];
if ($work['is_new']) {
$qs[] = $this->olddb()->replaceQ(DBT_TEXT, $set);
} else if (count($set) > 1) {
$qs[] = $this->olddb()->updateQ(DBT_TEXT, $set, array('id' => $work['id']));
}
if (isset($work['revision'])) {
$set = array(
'id' => $work['revision_id'],
'text_id' => $work['id'],
'user_id' => 1,
'comment' => $work['revision'],
'date' => $this->modifDate,
'first' => ($work['is_new'] ? 1 : 0),
);
$qs[] = $this->olddb()->replaceQ(DBT_EDIT_HISTORY, $set);
$qs[] = $this->olddb()->updateQ(DBT_TEXT, array('cur_rev_id' => $work['revision_id']), array('id' => $work['id']));
} else {
$qs[] = "/* no revision for text $work[id] */";
}
if ( ! empty($work['authors'])) {
$qs[] = $this->olddb()->deleteQ(DBT_AUTHOR_OF, array('text_id' => $work['id']));
foreach ($work['authors'] as $pos => $author) {
$set = array(
'id' => $this->getNextId(DBT_AUTHOR_OF),
'person_id' => $author,
'text_id' => $work['id'],
'pos' => $pos,
);
$qs[] = $this->olddb()->insertQ(DBT_AUTHOR_OF, $set, false, false);
}
if (isset($set['series_id'])) {
foreach ($work['authors'] as $pos => $author) {
$set = array(
'id' => $this->getNextId(DBT_SER_AUTHOR_OF),
'person_id' => $author,
'series_id' => $set['series_id'],
);
$qs[] = $this->olddb()->insertQ(DBT_SER_AUTHOR_OF, $set, true, false);
}
}
}
if ( ! empty($work['translators'])) {
$qs[] = $this->olddb()->deleteQ(DBT_TRANSLATOR_OF, array('text_id' => $work['id']));
foreach ($work['translators'] as $pos => $translator) {
list($personId, $transYear) = $translator;
$set = array(
'id' => $this->getNextId(DBT_TRANSLATOR_OF),
'person_id' => $personId,
'text_id' => $work['id'],
'pos' => $pos,
'year' => $transYear,
);
$qs[] = $this->olddb()->insertQ(DBT_TRANSLATOR_OF, $set, false, false);
}
}
if ( ! empty($work['labels'])) {
$qs[] = $this->olddb()->deleteQ('text_label', array('text_id' => $work['id']));
foreach ($work['labels'] as $label) {
$qs[] = $this->olddb()->insertQ('text_label', array(
'label_id' => $this->getObjectId('label', $label),
'text_id' => $work['id']
));
}
}
if (isset($work['text']) && isset($work['users'])) {
if (isset($work['users_as_new']) && $work['users_as_new']) {
$qs[] = $this->olddb()->deleteQ(DBT_USER_TEXT, array('text_id' => $work['id']));
}
foreach ($work['users'] as $user) {
list($username, $percent, $comment, $date, $userId) = $user;
$usize = $percent/100 * $size;
$set = array(
'id' => $this->getNextId(DBT_USER_TEXT),
'text_id' => $work['id'],
'size' => $usize,
'percent' => $percent,
'comment' => $comment,
'date' => $this->modifDate,
'humandate' => $date,
);
if ($userId) $set['user_id'] = $userId;
if ($username) $set['username'] = $username;
$qs[] = $this->olddb()->insertQ(DBT_USER_TEXT, $set, false, false);
}
}
if ($this->saveFiles) {
$path = Legacy::makeContentFilePath($work['id']);
if (isset($work['tmpl'])) {
File::myfile_put_contents("$this->contentDir/text/$path", String::my_replace($work['tmpl']));
$fullText = $work['tmpl'];
foreach ($work['text'] as $key => $textFile) {
$entryFile = dirname("$this->contentDir/text/$path") . "/$key";
$this->copyTextFile($textFile, $entryFile);
$fullText = str_replace("\t{file:$key}", String::my_replace(file_get_contents($textFile)), $fullText);
}
$tmpname = 'text.'.uniqid();
file_put_contents($tmpname, $fullText);
if (isset($work['toc_level'])) {
$qs = array_merge($qs, $this->buildTextHeadersUpdateQuery($tmpname, $work['id'], $work['toc_level']));
}
unlink($tmpname);
} else if (isset($work['text'])) {
$entryFile = "$this->contentDir/text/$path";
$this->copyTextFile($work['text'], $entryFile);
if (isset($work['toc_level'])) {
$qs = array_merge($qs, $this->buildTextHeadersUpdateQuery($entryFile, $work['id'], $work['toc_level']));
}
}
if (isset($work['anno'])) {
$this->copyTextFile($work['anno'], "$this->contentDir/text-anno/$path");
}
if (isset($work['info'])) {
$this->copyTextFile($work['info'], "$this->contentDir/text-info/$path");
}
if (isset($work['img'])) {
$dir = "$this->contentDir/img/$path";
if ( ! file_exists($dir)) {
mkdir($dir, 0755, true);
}
`touch $work[img]/*`;
`cp $work[img]/* $dir`;
// TODO check if images are referenced from the text file
}
}
return $qs;
}
private function insertBook(array $book)
{
$qs = array();
$set = array(
'id' => $book['id'],
);
if (isset($book['title'])) {
$set += array(
'slug' => (isset($book['slug']) ? String::slugify($book['slug']) : String::slugify($book['title'])),
'title' => String::my_replace($book['title']),
);
}
if ( ! empty($book['title_extra'])) $set['title_extra'] = String::my_replace($book['title_extra']);
if ( ! empty($book['lang'])) $set['lang'] = $book['lang'];
if ( ! empty($book['orig_lang'])) $set['orig_lang'] = $book['orig_lang'];
if ($book['is_new']) {
$set += array(
'created_at' => $this->entrydate,
'has_anno' => 0,
'has_cover' => 0,
);
}
if (isset($book['type'])) $set['type'] = $book['type'];
if (isset($book['orig_title'])) $set['orig_title'] = self::fixOrigTitle($book['orig_title']);
if (isset($book['seq_nr'])) $set['seqnr'] = $book['seq_nr'];
if (isset($book['anno'])) $set['has_anno'] = filesize($book['anno']) ? 1 : 0;
if (isset($book['cover'])) $set['has_cover'] = filesize($book['cover']) ? 1 : 0;
if (isset($book['subtitle'])) $set['subtitle'] = String::my_replace($book['subtitle']);
if (isset($book['year'])) $set['year'] = $book['year'];
if (isset($book['trans_year'])) $set['trans_year'] = $book['trans_year'];
if (isset($book['formats'])) $set['formats'] = serialize($book['formats']);
if (isset($book['sequence'])) $set['sequence_id'] = $this->getObjectId('sequence', $book['sequence']);
if (isset($book['category'])) $set['category_id'] = $this->getObjectId('category', $book['category']);
if ($book['is_new']) {
$qs[] = $this->olddb()->replaceQ(DBT_BOOK, $set);
} else if (count($set) > 1) {
$qs[] = $this->olddb()->updateQ(DBT_BOOK, $set, array('id' => $book['id']));
}
if (isset($book['revision'])) {
$set = array(
'id' => $book['revision_id'],
'book_id' => $book['id'],
'comment' => $book['revision'],
'date' => $this->modifDate,
'first' => ($book['is_new'] ? 1 : 0),
);
$qs[] = $this->olddb()->replaceQ('book_revision', $set);
} else {
$qs[] = "/* no revision for book $book[id] */";
}
if ( ! empty($book['authors'])) {
$qs[] = $this->olddb()->deleteQ('book_author', array('book_id' => $book['id']));
foreach ($book['authors'] as $pos => $author) {
$set = array(
'id' => $this->getNextId('book_author'),
'person_id' => $author,
'book_id' => $book['id'],
);
$qs[] = $this->olddb()->insertQ('book_author', $set, false, false);
}
$qs[] = $this->buildBookTitleAuthorQuery($book['id']);
}
if ( ! empty($book['works'])) {
$bookTextRepo = $this->em->getRepository('LibBundle:BookText');
foreach ($book['works'] as $work) {
$key = 'book_text'.$book['id'].'_'.$work['id'];
if ($book['is_new'] || $work['is_new']) {
$set = array(
'id' => $this->getNextId('book_text'),
'book_id' => $book['id'],
'text_id' => $work['id'],
'share_info' => (int) $work['is_new'],
);
$qs[$key] = $this->olddb()->insertQ('book_text', $set, false, false);
} else {
$relationExists = $bookTextRepo->findOneBy(array(
'book' => $book['id'],
'text' => $work['id'],
));
if (!$relationExists) {
$set = array(
'id' => $this->getNextId('book_text'),
'book_id' => $book['id'],
'text_id' => $work['id'],
'share_info' => 0,
);
$qs[$key] = $this->olddb()->insertQ('book_text', $set, false, false);
}
}
}
}
if ($this->saveFiles) {
$path = Legacy::makeContentFilePath($book['id']);
if (isset($book['tmpl'])) {
File::myfile_put_contents("$this->contentDir/book/$path", String::my_replace($book['tmpl']));
}
if (isset($book['anno'])) {
$this->copyTextFile($book['anno'], "$this->contentDir/book-anno/$path");
}
if (isset($book['info'])) {
$this->copyTextFile($book['info'], "$this->contentDir/book-info/$path");
}
if (isset($book['cover'])) {
self::copyFile($book['cover'], "$this->contentDir/book-cover/$path.jpg");
}
if (isset($book['djvu'])) {
self::copyFile($book['djvu'], "$this->contentDir/book-djvu/$path");
}
if (isset($book['pdf'])) {
self::copyFile($book['pdf'], "$this->contentDir/book-pdf/$path");
}
if (isset($book['img'])) {
self::copyDir($book['img'], "$this->contentDir/book-img/$path");
}
}
return $qs;
}
private function buildBookTitleAuthorQuery($bookId)
{
return str_replace("\n", ' ', <<<QUERY
UPDATE book b
SET title_author = (
SELECT GROUP_CONCAT(p.name SEPARATOR ', ')
FROM book_author ba
LEFT JOIN person p ON p.id = ba.person_id
WHERE b.id = $bookId AND b.id = ba.book_id
GROUP BY b.id
)
WHERE id = $bookId
QUERY
);
}
private function copyTextFile($source, $dest, $replaceChars = true)
{
if (filesize($source) == 0) {
unlink($dest);
return;
}
$contents = file_get_contents($source);
if ($replaceChars) {
$enhancedContents = String::my_replace($contents);
if (empty($enhancedContents)) {
$this->output->writeln(sprintf('<error>CharReplace failed by %s</error>', $source));
} else {
$contents = $enhancedContents;
}
}
File::myfile_put_contents($dest, $contents);
}
static private function copyFile($source, $dest)
{
if (is_dir($dest)) {
$dest .= '/'.basename($source);
}
if (filesize($source) == 0) {
unlink($dest);
return;
}
File::make_parent($dest);
copy($source, $dest);
touch($dest);
}
static private function copyDir($sourceDir, $destDir)
{
if ( ! file_exists($destDir)) {
mkdir($destDir, 0755, true);
}
foreach (glob("$sourceDir/*") as $source) {
self::copyFile($source, $destDir);
}
}
static public function guessTocLevel($text)
{
if (strpos($text, "\n>>") !== false) {
return 2;
} else if (strpos($text, "\n>") !== false) {
return 1;
}
return 0;
}
static private function getFileSize($files)
{
$size = 0;
if (is_array($files)) {
foreach ($files as $file) {
$size += strlen(file_get_contents($file));
}
} else {
$size = strlen(file_get_contents($files));
}
return $size;
}
static private function fixOrigTitle($title)
{
return strtr($title, array(
'\'' => '’',
));
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Admin;
use Sonata\AdminBundle\Form\FormMapper;
use Sonata\AdminBundle\Datagrid\DatagridMapper;
use Sonata\AdminBundle\Datagrid\ListMapper;
use Sonata\AdminBundle\Show\ShowMapper;
class SequenceAdmin extends Admin
{
protected $baseRoutePattern = 'sequence';
protected $baseRouteName = 'admin_sequence';
protected $translationDomain = 'admin';
public $extraActions = 'LibBundle:SequenceAdmin:extra_actions.html.twig';
protected function configureShowField(ShowMapper $showMapper)
{
$showMapper
->add('name')
->add('slug')
->add('publisher')
->add('is_seqnr_visible')
->add('books')
;
}
protected function configureListFields(ListMapper $listMapper)
{
$listMapper
->addIdentifier('name')
->add('slug')
->add('_action', 'actions', array(
'actions' => array(
'view' => array(),
'edit' => array(),
'delete' => array(),
)
))
;
}
protected function configureFormFields(FormMapper $formMapper)
{
$formMapper
->with('General attributes')
->add('name')
->add('slug')
->add('publisher', null, array('required' => false))
->add('is_seqnr_visible', null, array('required' => false))
->end()
;
}
protected function configureDatagridFilters(DatagridMapper $datagrid)
{
$datagrid
->add('slug')
->add('name')
->add('publisher')
->add('is_seqnr_visible')
;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller\Admin;
class SiteNoticeController extends CRUDController {
}
<file_sep><?php
namespace Chitanka\LibBundle\Twig;
use Chitanka\LibBundle\Util\Number;
use Chitanka\LibBundle\Util\Char;
use Chitanka\LibBundle\Util\String;
use Chitanka\LibBundle\Legacy\Legacy;
use Chitanka\LibBundle\Entity\Text;
class Extension extends \Twig_Extension
{
public function getName()
{
return 'chitanka';
}
public function getFunctions()
{
return array(
'anchor_name' => new \Twig_Function_Method($this, 'getAnchorName'),
'cover' => new \Twig_Function_Method($this, 'getCover'),
'progressbar' => new \Twig_Function_Method($this, 'getProgressbar'),
);
}
public function getFilters()
{
return array(
'rating_class' => new \Twig_Filter_Method($this, 'getRatingClass'),
'rating_format' => new \Twig_Filter_Method($this, 'formatRating'),
'name_format' => new \Twig_Filter_Method($this, 'formatPersonName'),
'acronym' => new \Twig_Filter_Method($this, 'getAcronym'),
'first_char' => new \Twig_Filter_Method($this, 'getFirstChar'),
'email' => new \Twig_Filter_Method($this, 'obfuscateEmail'),
'doctitle' => new \Twig_Filter_Method($this, 'getDocTitle'),
'lower' => new \Twig_Filter_Method($this, 'strtolower'),
'json' => new \Twig_Filter_Method($this, 'getJson'),
'repeat' => new \Twig_Filter_Method($this, 'repeatString'),
'join_lists' => new \Twig_Filter_Method($this, 'joinLists'),
'humandate' => new \Twig_Filter_Method($this, 'getHumanDate'),
'nl2br' => new \Twig_Filter_Method($this, 'nl2br', array('pre_escape' => 'html', 'is_safe' => array('html'))),
'dot2br' => new \Twig_Filter_Method($this, 'dot2br'),
'user_markup' => new \Twig_Filter_Method($this, 'formatUserMarkup'),
'striptags' => new \Twig_Filter_Method($this, 'stripTags'),
'domain' => new \Twig_Filter_Method($this, 'getDomain'),
'link' => new \Twig_Filter_Method($this, 'formatLinks'),
'encoding' => new \Twig_Filter_Method($this, 'changeEncoding'),
'urlencode' => new \Twig_Filter_Method($this, 'getUrlEncode'),
'qrcode' => new \Twig_Filter_Method($this, 'getQrCode'),
'put_text_in_template' => new \Twig_Filter_Method($this, 'putTextInBookTemplate'),
);
}
public function getTests()
{
return array(
'url' => new \Twig_Test_Method($this, 'isUrl'),
);
}
public function getRatingClass($rating)
{
if ( $rating >= 5.6 ) return 'degree-360 gt-half';
if ( $rating >= 5.2 ) return 'degree-330 gt-half';
if ( $rating >= 4.8 ) return 'degree-300 gt-half';
if ( $rating >= 4.4 ) return 'degree-270 gt-half';
if ( $rating >= 4.0 ) return 'degree-240 gt-half';
if ( $rating >= 3.6 ) return 'degree-210 gt-half';
if ( $rating >= 3.2 ) return 'degree-180';
if ( $rating >= 2.8 ) return 'degree-150';
if ( $rating >= 2.4 ) return 'degree-120';
if ( $rating >= 2.0 ) return 'degree-90';
if ( $rating >= 1.5 ) return 'degree-60';
if ( $rating >= 1.0 ) return 'degree-30';
return 0;
}
public function formatRating($rating)
{
return Legacy::rmTrailingZeros( Number::formatNumber($rating, 1) );
}
public function formatPersonName($name, $sortby = 'first-name')
{
if (empty($name)) {
return $name;
}
preg_match('/([^,]+) ([^,]+)(, .+)?/', $name, $m);
if ( ! isset($m[2]) ) {
return $name;
}
$last = "<span class=\"lastname\">$m[2]</span>";
$m3 = isset($m[3]) ? $m[3] : '';
return $sortby == 'last-name' ? $last.', '.$m[1].$m3 : $m[1].' '.$last.$m3;
}
public function getAcronym($title)
{
$letters = preg_match_all('/ ([a-zA-Zа-яА-Я\d])/u', ' '.$title, $matches);
$acronym = implode('', $matches[1]);
return Char::mystrtoupper($acronym);
}
public function getFirstChar($string)
{
return mb_substr($string, 0, 1, 'UTF-8');
}
public function strtolower($string)
{
return mb_strtolower($string, 'UTF-8');
}
public function getJson($content)
{
return json_encode($content);
}
public function obfuscateEmail($email)
{
return strtr($email,
array('@' => ' <span title="при сървъра">(при)</span> '));
}
public function getDocTitle($title)
{
$title = preg_replace('/\s\s+/', ' ', $title);
$title = strtr($title, array(
'<br>' => ' — ',
'&' => '&', // will be escaped afterwards by Twig
));
$title = trim(strip_tags($title));
return $title;
}
public function repeatString($string, $count)
{
return str_repeat($string, $count);
}
public function putTextInBookTemplate($template, Text $text, $htmlTextView)
{
$textId = $text->getId();
$regexp = "/\{(text|file):$textId(-[^|}]+)?\|(.+)\}/";
if (preg_match($regexp, $template, $matches)) {
$template = preg_replace($regexp, str_replace('TEXT_TITLE', $matches[3], $htmlTextView), $template);
}
$template = preg_replace("/\{(text|file):$textId(-.+)?\}/", str_replace('TEXT_TITLE', $text->getTitle(), $htmlTextView), $template);
return $template;
}
public function joinLists($string)
{
return preg_replace('|</ul>\n<ul[^>]*>|', "\n", $string);
}
public function getHumanDate($date)
{
return Legacy::humanDate($date);
}
private $_xmlElementCreator = null;
/**
* Generate an anchor name for a given string.
*
* @param string $text A string
* @param boolean $unique Always generate a unique name
* (consider all previously generated names)
*/
public function getAnchorName($text, $unique = true)
{
if (is_null($this->_xmlElementCreator)) {
$this->_xmlElementCreator = new \Sfblib_XmlElement;
}
return $this->_xmlElementCreator->getAnchorName($text, $unique);
}
public function getCover($id, $width = 200, $format = 'jpg')
{
return Legacy::getContentFilePath('book-cover', $id) . ".$width.$format";
}
public function getProgressbar($progressInPerc)
{
$perc = $progressInPerc .'%';
$progressBarWidth = '20';
$bar = str_repeat(' ', $progressBarWidth);
$bar = substr_replace($bar, $perc, $progressBarWidth/2-1, strlen($perc));
$curProgressWidth = ceil($progressBarWidth * $progressInPerc / 100);
// done bar end
$bar = substr_replace($bar, '</span>', $curProgressWidth, 0);
$bar = strtr($bar, array(' ' => ' '));
return "<pre style=\"display:inline\"><span class=\"progressbar\"><span class=\"done\">$bar</span></pre>";
}
public function nl2br($value, $sep = '<br>')
{
return str_replace("\n", $sep."\n", $value);
}
public function dot2br($value)
{
return preg_replace('/\. (?=[A-ZА-Я])/u', "<br>\n", $value);
}
public function formatUserMarkup($content)
{
return String::pretifyInput(String::escapeInput($content));
}
public function stripTags($content)
{
return strip_tags($content);
}
public function changeEncoding($string, $encoding)
{
return iconv('UTF-8', $encoding, $string);
}
public function getUrlEncode($string)
{
return urlencode($string);
}
public function getQrCode($url, $width = 100)
{
return "http://chart.apis.google.com/chart?cht=qr&chs={$width}x{$width}&chld=H|0&chl=". urlencode($url);
}
public function getDomain($url)
{
return parse_url($url, PHP_URL_HOST);
}
// TODO unit test
public function formatLinks($text)
{
$patterns = array(
'/\[\[(.+)\|(.+)\]\]/Us' => '<a href="$1">$2</a>',
'|(?<!")https?://\S+[^,.\s]|e' => "'<a href=\"$0\">'.\$this->getDomain('$0', '$2').'</a>'",
);
return preg_replace(array_keys($patterns), array_values($patterns), $text);
}
public function isUrl($string)
{
return strpos($string, 'http') === 0;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Util;
use Chitanka\LibBundle\Legacy\Legacy;
class String
{
static public function limitLength($str, $len = 80) {
if ( strlen($str) > $len ) {
return substr($str, 0, $len - 1) . '…';
}
return $str;
}
/**
Escape meta-characters used in regular expressions
*/
static public function prepareStringForPreg($string)
{
return strtr($string, array(
// in a regexp a backslash can be escaped with four backslashes - \\\\
'\\' => '\\\\\\\\',
'^' => '\^',
'$' => '\$',
'.' => '\.',
'[' => '\[',
']' => '\]',
'|' => '\|',
'(' => '\(',
')' => '\)',
'?' => '\?',
'*' => '\*',
'+' => '\+',
'{' => '\{',
'}' => '\}',
'-' => '\-',
));
}
static private $allowableTags = array('em', 'strong');
static public function escapeInput($text) {
$text = self::myhtmlentities($text);
$repl = array();
foreach (self::$allowableTags as $allowable) {
$repl["<$allowable>"] = "<$allowable>";
$repl["</$allowable>"] = "</$allowable>";
}
$text = strtr($text, $repl);
return $text;
}
static public function myhtmlentities( $text ) {
return htmlentities( $text, ENT_QUOTES, 'UTF-8');
}
static public function myhtmlspecialchars( $text ) {
return htmlspecialchars($text, ENT_COMPAT, 'UTF-8');
}
static public function fixEncoding($string)
{
if ('UTF-8' != ($enc = mb_detect_encoding($string, 'UTF-8, Windows-1251'))) {
$string = iconv($enc, 'UTF-8', $string);
}
return $string;
}
static public function pretifyInput($text) {
$patterns = array(
// link in brackets
'!(?<=[\s>])\((http://[^]\s,<]+)\)!e' => "'(<a href=\"$1\" title=\"'.urldecode('$1').'\">'.urldecode('$1').'</a>)'",
'!(?<=[\s>])(http://[^]\s,<]+)!e' => "'<a href=\"$1\" title=\"'.urldecode('$1').'\">'.urldecode('$1').'</a>'",
'/\n([^\n]*)/' => "<p>$1</p>\n",
'!\[url=([^]]+)\]([^]]+)\[/url\]!' => '<a href="$1">$2</a>',
);
$text = preg_replace(array_keys($patterns), array_values($patterns), "\n$text");
return $text;
}
static public function splitPersonName($name)
{
preg_match('/([^,]+) ([^,]+)(, .+)?/', $name, $m);
if ( ! isset($m[2]) ) {
return array('firstname' => $name);
}
return array(
'firstname' => $m[1] . (@$m[3]),
'lastname' => $m[2]
);
}
static public function getMachinePersonName($name)
{
$parts = self::splitPersonName($name);
$machineName = isset($parts['lastname'])
? $parts['lastname'] . ', ' . $parts['firstname']
: $parts['firstname'];
return $machineName;
}
static public function slugify($name, $maxlength = 60)
{
$name = strtr($name, array(
'²' => '2', '°' => 'deg',
));
$name = Char::cyr2lat($name);
$name = Legacy::removeDiacritics($name);
$name = iconv('UTF-8', 'ISO-8859-1//TRANSLIT', $name);
$name = strtolower($name);
$name = preg_replace('/[^a-z\d]/', '-', $name);
$name = preg_replace('/--+/', '-', $name);
$name = rtrim(substr($name, 0, $maxlength), '-');
return $name;
}
static public function cb_quotes($matches) {
return '„'. strtr($matches[1], array('„'=>'«', '“'=>'»', '«'=>'„', '»'=>'“')) .'“';
}
static public function my_replace($cont) {
$chars = array("\r" => '',
'„' => '"', '“' => '"', '”' => '"', '«' => '"', '»' => '"', '"' => '"',
'„' => '"', '“' => '"', '”' => '"', '«' => '"',
'»' => '"', '„' => '"', '“' => '"', '”' => '"',
'<' => '&lt;', '>' => '&gt;', ' ' => '&nbsp;',
"'" => '’', '...' => '…',
'</p>' => '', '</P>' => '',
#"\n " => "<p>", "\n" => ' ',
'<p>' => "\n\t", '<P>' => "\n\t",
);
$reg_chars = array(
'/(\s| )(-|–|){1,2}(\s)/' => '$1—$3', # mdash
'/([\s(][\d,.]*)-([\d,.]+[\s)])/' => '$1–$2', # ndash между цифри
'/(\d)x(\d)/' => '$1×$2', # знак за умножение
'/\n +/' => "\n\t", # абзаци
'/(?<!\n)\n\t\* \* \*\n(?!\n)/' => "\n\n\t* * *\n\n",
);
$cont = preg_replace('/([\s(]\d+ *)-( *\d+[\s),.])/', '$1–$2', "\n".$cont);
$cont = str_replace(array_keys($chars), array_values($chars), $cont);
#$cont = html_entity_decode($cont, ENT_NOQUOTES, 'UTF-8');
$cont = preg_replace(array_keys($reg_chars), array_values($reg_chars), $cont);
# кавички
$qreg = '/(?<=[([\s|\'"_\->\/])"(\S?|\S[^"]*[^\s"([])"/m';
#$cont = preg_replace($qreg, '„$1“', $cont);
$i = 0;
$maxIters = 6;
while ( strpos($cont, '"') !== false ) {
if ( ++$i > $maxIters ) {
self::log_error("ВЕРОЯТНА ГРЕШКА: Повече от $maxIters итерации при вътрешните кавички.");
break;
}
$cont = preg_replace_callback($qreg, 'Chitanka\LibBundle\Util\String::cb_quotes', $cont);
}
return ltrim($cont, "\n");
}
static private function log_error($s, $loud = false) {
#file_put_contents('./log/error', date('d-m-Y H:i:s'). " $s\n", FILE_APPEND);
if ($loud) { echo $s."\n"; }
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Entity;
/**
*
*/
class CategoryRepository extends EntityRepository
{
/** @return Category */
public function findBySlug($slug)
{
return $this->findOneBy(array('slug' => $slug));
}
/**
* RAW_SQL
*/
public function getAllAsTree()
{
$categories = $this->convertArrayToTree($this->getAll());
return $categories;
}
/**
* RAW_SQL
*/
public function getAll()
{
$categories = $this->_em->getConnection()->fetchAll('SELECT * FROM category ORDER BY name');
return $categories;
}
/**
* RAW_SQL
*/
public function getRoots()
{
$categories = $this->_em->getConnection()->fetchAll('SELECT * FROM category WHERE parent_id IS NULL ORDER BY name');
return $categories;
}
/** TODO move to some utility class */
protected function convertArrayToTree($labels)
{
$labelsById = array();
foreach ($labels as $i => $label) {
$labelsById[ $label['id'] ] =& $labels[$i];
}
foreach ($labels as $i => $label) {
if ($label['parent_id']) {
$labelsById[$label['parent_id']]['children'][] =& $labels[$i];
}
}
return $labels;
}
public function getByNames($name)
{
return $this->getQueryBuilder()
->where('e.name LIKE ?1')
->setParameter(1, $this->stringForLikeClause($name))
->getQuery()
->getArrayResult();
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Entity;
use Doctrine\ORM\Mapping as ORM;
/**
* @ORM\Entity(repositoryClass="Chitanka\LibBundle\Entity\BookmarkRepository")
* @ORM\HasLifecycleCallbacks
* @ORM\Table(name="bookmark",
* uniqueConstraints={@ORM\UniqueConstraint(name="uniq_key", columns={"folder_id", "text_id", "user_id"})}
* )
*/
class Bookmark extends Entity
{
/**
* @ORM\Column(type="integer")
* @ORM\Id
* @ORM\GeneratedValue(strategy="CUSTOM")
* @ORM\CustomIdGenerator(class="Chitanka\LibBundle\Doctrine\CustomIdGenerator")
*/
private $id;
/**
* @var integer
* @ORM\ManyToOne(targetEntity="BookmarkFolder")
*/
private $folder;
/**
* @var integer
* @ORM\ManyToOne(targetEntity="Text")
*/
private $text;
/**
* @var integer
* @ORM\ManyToOne(targetEntity="User", inversedBy="bookmarks")
*/
private $user;
/**
* @var date
* @ORM\Column(type="datetime")
*/
private $created_at;
public function __construct($fields)
{
foreach (array('folder', 'text', 'user') as $field) {
if (isset($fields[$field])) {
$setter = 'set' . ucfirst($field);
$this->$setter($fields[$field]);
}
}
}
public function getId() { return $this->id; }
public function setFolder($folder) { $this->folder = $folder; }
public function getFolder() { return $this->folder; }
public function setText($text) { $this->text = $text; }
public function getText() { return $this->text; }
public function setUser($user) { $this->user = $user; }
public function getUser() { return $this->user; }
public function setCreatedAt($created_at) { $this->created_at = $created_at; }
public function getCreatedAt() { return $this->created_at; }
/** @ORM\PrePersist */
public function preInsert()
{
$this->setCreatedAt(new \DateTime);
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Util;
class Number
{
static public function normInt($val, $max, $min = 1)
{
if ($val > $max) {
$val = $max;
} else if ($val < $min) {
$val = $min;
}
return (int) $val;
}
static public function formatNumber($num, $decPl = 2, $decPoint = ',', $tousandDelim = ' ')
{
$result = number_format($num, $decPl, $decPoint, $tousandDelim);
if ($decPoint == ',' && $num < 10000) {
// bulgarian extra rule: put a $tousandDelim only after 9999
$result = preg_replace('/^(\d) (\d\d\d)/', '$1$2', $result);
}
return $result;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller;
use Chitanka\LibBundle\Pagination\Pager;
use Chitanka\LibBundle\Entity\Person;
class AuthorController extends PersonController
{
public function listByCountryIndexAction($by, $_format)
{
$this->view = array(
'by' => $by,
'countries' => $this->getPersonRepository()->getCountryList()
);
return $this->display("list_by_country_index.$_format");
}
public function listByCountryAction($country, $by, $page, $_format)
{
$limit = 100;
$repo = $this->getPersonRepository();
$filters = array(
'by' => $by,
'country' => $country,
);
$this->view = array(
'by' => $by,
'country' => $country,
'persons' => $repo->getBy($filters, $page, $limit),
'pager' => new Pager(array(
'page' => $page,
'limit' => $limit,
'total' => $repo->countBy($filters)
)),
'route' => $this->getCurrentRoute(),
'route_params' => array('country' => $country, 'by' => $by, '_format' => $_format),
);
return $this->display("list_by_country.$_format");
}
public function showBooksAction($slug, $_format)
{
$person = $this->tryToFindPerson($slug);
if ( ! $person instanceof Person) {
return $person;
}
$this->view = array(
'person' => $person,
'books' => $this->getBookRepository()->getByAuthor($person),
);
return $this->display("show_books.$_format");
}
public function showTextsAction($slug, $_format)
{
$person = $this->tryToFindPerson($slug);
if ( ! $person instanceof Person) {
return $person;
}
$groupBySeries = $_format == 'html';
$this->view = array(
'person' => $person,
'texts' => $this->getTextRepository()->findByAuthor($person, $groupBySeries),
);
return $this->display("show_texts.$_format");
}
protected function prepareViewForShow(Person $person, $format)
{
$this->prepareViewForShowAuthor($person, $format);
}
protected function getPersonRepository()
{
return parent::getPersonRepository()->asAuthor();
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Tests;
abstract class TestCase extends \PHPUnit_Framework_TestCase
{
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller;
use Symfony\Component\HttpKernel\Exception\NotFoundHttpException;
use Doctrine\ORM\NoResultException;
use Chitanka\LibBundle\Entity\Book;
use Chitanka\LibBundle\Pagination\Pager;
use Chitanka\LibBundle\Legacy\Setup;
use Chitanka\LibBundle\Legacy\DownloadFile;
use Chitanka\LibBundle\Util\String;
class BookController extends Controller
{
protected $repository = 'Book';
protected $responseAge = 86400; // 24 hours
public function indexAction($_format)
{
if ($_format == 'html') {
$this->view = array(
'categories' => $this->getCategoryRepository()->getAllAsTree(),
);
}
return $this->display("index.$_format");
}
public function listByCategoryIndexAction($_format)
{
switch ($_format) {
case 'html':
$categories = $this->getCategoryRepository()->getAllAsTree();
break;
case 'opds':
$categories = $this->getCategoryRepository()->getAll();
break;
}
$this->view['categories'] = $categories;
return $this->display("list_by_category_index.$_format");
}
public function listByAlphaIndexAction($_format)
{
return $this->display("list_by_alpha_index.$_format");
}
public function listByCategoryAction($slug, $page, $_format)
{
$slug = String::slugify($slug);
$bookRepo = $this->getBookRepository();
$category = $this->getCategoryRepository()->findBySlug($slug);
if ($category === null) {
throw new NotFoundHttpException("Няма категория с код $slug.");
}
$limit = 30;
$this->view = array(
'category' => $category,
'parents' => array_reverse($category->getAncestors()),
'books' => $bookRepo->getByCategory($category, $page, $limit),
'pager' => new Pager(array(
'page' => $page,
'limit' => $limit,
'total' => $category->getNrOfBooks()
)),
'route_params' => array('slug' => $slug),
);
return $this->display("list_by_category.$_format");
}
public function listByAlphaAction($letter, $page, $_format)
{
$bookRepo = $this->getBookRepository();
$limit = 30;
$prefix = $letter == '-' ? null : $letter;
$this->view = array(
'letter' => $letter,
'books' => $bookRepo->getByPrefix($prefix, $page, $limit),
'pager' => new Pager(array(
'page' => $page,
'limit' => $limit,
'total' => $bookRepo->countByPrefix($prefix)
)),
'route_params' => array('letter' => $letter),
);
return $this->display("list_by_alpha.$_format");
}
public function listWoCoverAction($page)
{
$limit = 30;
$bookRepo = $this->getBookRepository();
$_format = 'html';
$this->view = array(
'books' => $bookRepo->getWithMissingCover($page, $limit),
'pager' => new Pager(array(
'page' => $page,
'limit' => $limit,
'total' => $this->getBookRepository()->getCountWithMissingCover()
)),
);
return $this->display("list_wo_cover.$_format");
}
public function showAction($id, $_format)
{
// FIXME
// very big books need too much memory, so give it to them
ini_set('memory_limit', '128M');
list($id) = explode('-', $id); // remove optional slug
try {
$book = $this->getBookRepository()->get($id);
} catch (NoResultException $e) {
throw new NotFoundHttpException("Няма книга с номер $id.");
}
switch ($_format) {
case 'sfb.zip':
case 'txt.zip':
case 'fb2.zip':
case 'epub':
Setup::doSetup($this->container);
return $this->urlRedirect($this->processDownload($book, $_format));
case 'djvu':
case 'pdf':
return $this->urlRedirect($this->processDownload($book, $_format));
case 'txt':
Setup::doSetup($this->container);
return $this->displayText($book->getContentAsTxt(), array('Content-Type' => 'text/plain'));
case 'fb2':
Setup::doSetup($this->container);
return $this->displayText($book->getContentAsFb2(), array('Content-Type' => 'application/xml'));
case 'sfb':
Setup::doSetup($this->container);
return $this->displayText($book->getContentAsSfb(), array('Content-Type' => 'text/plain'));
case 'data':
return $this->displayText($book->getDataAsPlain(), array('Content-Type' => 'text/plain'));
case 'opds':
break;
case 'pic':
Setup::doSetup($this->container);
case 'html':
default:
}
$this->view = array(
'book' => $book,
'authors' => $book->getAuthors(),
'template' => $book->getTemplateAsXhtml(),
'info' => $book->getExtraInfoAsXhtml(),
);
return $this->display("show.$_format");
}
public function randomAction()
{
$id = $this->getBookRepository()->getRandomId();
return $this->urlRedirect($this->generateUrl('book_show', array('id' => $id)));
}
protected function processDownload(Book $book, $format) {
$dlSite = $this->getMirrorServer();
if ( $dlSite !== false ) {
return "$dlSite/book/{$book->getId()}.$format";
}
$dlFile = new DownloadFile;
switch ($format) {
case 'sfb.zip':
default:
return $this->getWebRoot() . $dlFile->getSfbForBook($book);
case 'txt.zip':
return $this->getWebRoot() . $dlFile->getTxtForBook($book);
case 'fb2.zip':
return $this->getWebRoot() . $dlFile->getFb2ForBook($book);
case 'epub':
return $this->getWebRoot() . $dlFile->getEpubForBook($book);
case 'djvu':
case 'pdf':
return $this->getWebRoot() . $dlFile->getStaticFileForBook($book, $format);
}
}
}
<file_sep><?php
use Symfony\Component\HttpKernel\Kernel;
use Symfony\Component\Config\Loader\LoaderInterface;
class AppKernel extends Kernel
{
public function registerBundles()
{
$bundles = array(
new Symfony\Bundle\FrameworkBundle\FrameworkBundle(),
new Symfony\Bundle\SecurityBundle\SecurityBundle(),
new Symfony\Bundle\TwigBundle\TwigBundle(),
new Symfony\Bundle\MonologBundle\MonologBundle(),
new Symfony\Bundle\SwiftmailerBundle\SwiftmailerBundle(),
new Symfony\Bundle\AsseticBundle\AsseticBundle(),
new Doctrine\Bundle\DoctrineBundle\DoctrineBundle(),
new Sensio\Bundle\FrameworkExtraBundle\SensioFrameworkExtraBundle(),
// new JMS\AopBundle\JMSAopBundle(),
// new JMS\DiExtraBundle\JMSDiExtraBundle($this),
// new JMS\SecurityExtraBundle\JMSSecurityExtraBundle(),
//new FOS\UserBundle\FOSUserBundle(),
new Sonata\CoreBundle\SonataCoreBundle(),
new Sonata\AdminBundle\SonataAdminBundle(),
new Sonata\BlockBundle\SonataBlockBundle(),
//new Sonata\CacheBundle\SonataCacheBundle(),
new Sonata\jQueryBundle\SonatajQueryBundle(),
new Sonata\DoctrineORMAdminBundle\SonataDoctrineORMAdminBundle(),
new SimpleThings\EntityAudit\SimpleThingsEntityAuditBundle(),
new Knp\Bundle\MenuBundle\KnpMenuBundle(),
new JMS\SerializerBundle\JMSSerializerBundle($this),
new FOS\RestBundle\FOSRestBundle(),
new FOS\CommentBundle\FOSCommentBundle(),
new Sensio\Bundle\BuzzBundle\SensioBuzzBundle(),
new Chitanka\LibBundle\LibBundle(),
);
if ($this->getEnvironment() != 'prod') {
$bundles[] = new Symfony\Bundle\WebProfilerBundle\WebProfilerBundle();
// $bundles[] = new Sensio\Bundle\GeneratorBundle\SensioGeneratorBundle();
}
return $bundles;
}
public function registerContainerConfiguration(LoaderInterface $loader)
{
$loader->load($this->getConfigurationFile($this->getEnvironment()));
}
/**
* Returns the configuration file for the given environment and format: config_{environment}.{format}.
*
* @param string $environment Application environment
* @param string $format File format (default: yml)
* @return The configuration file path
*/
protected function getConfigurationFile($environment, $format = 'yml')
{
return __DIR__."/config/config_$environment.$format";
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Entity;
use Doctrine\Common\Collections\Collection;
/**
* An abstract class for all entities in this bundle
*/
abstract class Entity
{
public function clearCollection(Collection $collection)
{
$collection->forAll(function($key) use ($collection) {
$collection->remove($key);
return true;
});
}
public function __call($method, $arguments)
{
$getter = 'get' . str_replace(' ', '', ucwords(str_replace('_', ' ', $method)));
if (method_exists($this, $getter)) {
return $this->$getter();
}
throw new \Exception(sprintf('Method "%s" for entity "%s" does not exist', $method, get_class($this)));
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Entity;
use Doctrine\ORM\Mapping as ORM;
use Chitanka\LibBundle\Util\String;
/**
* @ORM\Entity(repositoryClass="Chitanka\LibBundle\Entity\BookmarkFolderRepository")
* @ORM\HasLifecycleCallbacks
* @ORM\Table(name="bookmark_folder",
* uniqueConstraints={@ORM\UniqueConstraint(name="uniq_key", columns={"slug", "user_id"})},
* indexes={
* @ORM\Index(name="slug_idx", columns={"slug"})}
* )
*/
class BookmarkFolder extends Entity
{
/**
* @ORM\Column(type="integer")
* @ORM\Id
* @ORM\GeneratedValue(strategy="CUSTOM")
* @ORM\CustomIdGenerator(class="Chitanka\LibBundle\Doctrine\CustomIdGenerator")
*/
private $id;
/**
* @var string
* @ORM\Column(type="string", length=40)
*/
private $slug = '';
/**
* @var string
* @ORM\Column(type="string", length=80)
*/
private $name = '';
/**
* @var integer
* @ORM\ManyToOne(targetEntity="User")
*/
private $user;
/**
* @var date
* @ORM\Column(type="datetime")
*/
private $created_at;
public function getId() { return $this->id; }
public function setSlug($slug) { $this->slug = String::slugify($slug); }
public function getSlug() { return $this->slug; }
public function setName($name) { $this->name = $name; }
public function getName() { return $this->name; }
public function setUser($user) { $this->user = $user; }
public function getUser() { return $this->user; }
public function setCreatedAt($created_at) { $this->created_at = $created_at; }
public function getCreatedAt() { return $this->created_at; }
/** @ORM\PrePersist */
public function preInsert()
{
$this->setCreatedAt(new \DateTime);
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Service;
class DbUpdater
{
}
<file_sep><?php
namespace Chitanka\LibBundle\Entity;
use Symfony\Component\Security\Core\User\UserProviderInterface;
use Symfony\Component\Security\Core\User\UserInterface;
use Symfony\Component\Security\Core\Exception\UsernameNotFoundException;
/**
*
*/
class UserRepository extends EntityRepository implements UserProviderInterface
{
public function loadUserByUsername($username)
{
$user = $this->findOneBy(array('username' => $username));
if ( ! $user) {
throw new UsernameNotFoundException;
}
return $user;
}
public function findByUsername($username)
{
return $this->findOneBy(array('username' => $username));
}
public function findByUsernames(array $usernames)
{
return $this->findBy(array('username' => $usernames));
}
public function refreshUser(UserInterface $user)
{
return $user;
}
public function supportsClass($class)
{
return false;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller;
class ForeignBookController extends Controller
{
public function indexAction($_format)
{
$this->view = array(
'books' => $this->getForeignBookRepository()->getLatest(100)
);
return $this->display("index.$_format");
}
public function bookAction()
{
$this->view = array(
'book' => $this->getForeignBookRepository()->getRandom()
);
return $this->display('book');
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Tests\Entity\Content;
use Chitanka\LibBundle\Tests\TestCase;
use Chitanka\LibBundle\Entity\Content\BookTemplate;
class BookTemplateTest extends TestCase {
public function testExtractTextIds()
{
$template = <<<TPL
> {text:123}
> {text:456-part1}
{file:789}
TPL;
$ids = BookTemplate::extractTextIds($template);
$expectedIds = array(
'123',
'456',
'789',
);
$this->assertEquals($expectedIds, $ids);
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller;
class EmailController extends Controller {
public function newAction($username) {
$this->responseAge = 0;
$_REQUEST['username'] = $username;
return $this->legacyPage('EmailUser');
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Legacy;
class Setup {
static private
$setupDone = false,
$config = null;
static private
/** @var Request */ $request,
/** @var mlDatabase */ $db,
/** @var OutputMaker */ $outputMaker;
static public function getPage($name, $controller, $container, $execute = true)
{
self::doSetup($container);
$class = 'Chitanka\LibBundle\Legacy\\'.$name.'Page';
$page = new $class(array(
'request' => self::request(),
'db' => self::db(),
'out' => self::outputMaker(),
'controller' => $controller,
'container' => $container,
//'newRequest' => $controller->get('request'),
'user' => $controller->getUser(),
'logDir' => __DIR__ . '/../../../../app/logs',
));
if ($execute) {
$page->execute();
}
return $page;
}
static public function doSetup($container)
{
if ( self::$setupDone ) {
return;
}
self::$config = $container;
self::defineConstants();
ini_set('date.timezone', self::setting('default_timezone'));
self::$setupDone = true;
}
static public function defineConstants()
{
define('BASEDIR', __DIR__ . '/../../../../web'); // TODO remove
define('SESSION_NAME', 'mls');
self::defineDbTableConsts();
$admin_email = self::setting('admin_email');
list($email, $admin) = each($admin_email);
define('ADMIN', $admin);
define('ADMIN_EMAIL', $email);
define('SITENAME', self::setting('sitename'));
define('SITE_EMAIL', self::setting('site_email'));
}
static public function setting($settingName)
{
return self::$config->getParameter($settingName);
}
static public function request()
{
return self::setupRequest();
}
static public function db()
{
return self::setupDb();
}
static public function outputMaker($forceNew = false)
{
return self::setupOutputMaker($forceNew);
}
static private function setupDb()
{
if ( ! isset(self::$db) ) {
$conn = self::$config->get('doctrine.dbal.default_connection');
self::$db = new mlDatabase($conn->getHost(), $conn->getUsername(), $conn->getPassword(), $conn->getDatabase());
}
return self::$db;
}
static private function setupRequest()
{
if ( ! isset(self::$request) ) {
self::$request = new Request();
}
return self::$request;
}
static private function setupOutputMaker($forceNew)
{
if ( $forceNew || ! isset(self::$outputMaker) ) {
self::$outputMaker = new OutputMaker();
}
return self::$outputMaker;
}
static private function defineDbTableConsts($prefix = '')
{
$tables = array(
'AUTHOR_OF' => 'text_author',
'BOOK' => 'book',
'BOOK_AUTHOR' => 'book_author',
'BOOK_TEXT' => 'book_text',
'COMMENT' => 'text_comment',
'DL_CACHE' => 'download_cache',
'DL_CACHE_TEXT' => 'download_cache_text',
'EDIT_HISTORY' => 'text_revision',
'HEADER' => 'header',
'LABEL' => 'label',
'LABEL_LOG' => 'label_log',
'LICENSE' => 'license',
'PERSON' => 'person',
'QUESTION' => 'question',
'READER_OF' => 'user_text_read',
'SER_AUTHOR_OF' => 'series_author',
'SERIES' => 'series',
'TEXT' => 'text',
'TEXT_LABEL' => 'text_label',
'TEXT_RATING' => 'text_rating',
'TRANSLATOR_OF' => 'text_translator',
'USER' => 'user',
'USER_TEXT' => 'user_text_contrib',
'WORK' => 'work_entry',
'WORK_MULTI' => 'work_contrib',
);
foreach ($tables as $constant => $table) {
define('DBT_' . $constant, $prefix . $table);
}
}
}
<file_sep>chitanka.info core
==================
Това е уеб софтуер, който задвижва [„Моята библиотека“](http://chitanka.info).
Изграден е с помощта на Symfony2, Doctrine 2 и други прекрасни свободни проекти.
За инструкции по инсталацията вижте [INSTALL.md](INSTALL.md).
<file_sep><?php
namespace Chitanka\LibBundle\Legacy;
class ClearCachePage extends Page {
protected
$action = 'clearCache';
public function __construct() {
parent::__construct();
$this->title = 'Прочистване на склада';
$this->texts = str_replace("\r", '', $this->request->value('texts'));
$this->texts = explode("\n", $this->texts);
$this->start = (int) $this->request->value('start');
$this->end = (int) $this->request->value('end');
}
protected function buildContent() {
if ( !empty($this->texts) ) {
$this->texts = array_unique($this->texts);
foreach ($this->texts as $key => $textId) {
$textId = trim($textId);
if ( empty($textId) ) {
unset($this->texts[$key]);
continue;
}
CacheManager::clearDlCache($textId);
}
$this->addMessage('Копията на следните текстове бяха изтрити: '.
implode(', ', $this->texts));
}
if ( !empty($this->start) && !empty($this->end) ) {
if ( $this->start > $this->end ) {
$t = $this->start;
$this->start = $this->end;
$this->end = $t;
}
for ($i = $this->start; $i <= $this->end; $i++) {
CacheManager::clearDlCache($i);
}
$this->addMessage("Копията на текстовете с номера от
$this->start до $this->end бяха изтрити.");
}
return $this->makeForm();
}
protected function makeForm() {
$texts = $this->out->textarea('texts', '', '', 25, 8);
$submit = $this->out->submitButton('Прочистване');
return <<<EOS
<form action="" method="post">
<div>
<label for="texts">Номера на текстове (по един на ред):</label><br />
$texts<br />
$submit
</div>
</form>
EOS;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Command;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Input\InputOption;
use Symfony\Component\Console\Input\InputArgument;
use Symfony\Component\Console\Output\OutputInterface;
use Chitanka\LibBundle\Legacy\Setup;
use Chitanka\LibBundle\Legacy\Legacy;
use Chitanka\LibBundle\Util\String;
use Chitanka\LibBundle\Util\File;
class LegacyUpdateLibCommand extends CommonDbCommand
{
protected function configure()
{
parent::configure();
$this
->setName('lib:legacy-update')
->setDescription('Add or update new texts and books (legacy variant)')
->addArgument('input', InputArgument::REQUIRED, 'Input text file')
->addArgument('globals', InputArgument::OPTIONAL, 'File with global variables')
->addOption('save', null, InputOption::VALUE_NONE, 'Save generated text files in corresponding directories')
->addOption('dump-sql', null, InputOption::VALUE_NONE, 'Output SQL queries instead of executing them')
->setHelp(<<<EOT
The <info>lib:legacy-update</info> command adds or updates texts and books.
EOT
);
}
/**
* Executes the current command.
*
* @param InputInterface $input An InputInterface instance
* @param OutputInterface $output An OutputInterface instance
*
* @return integer 0 if everything went fine, or an error code
*
* @throws \LogicException When this abstract class is not implemented
*/
protected function execute(InputInterface $input, OutputInterface $output)
{
$this->saveFiles = $input->getOption('save') === true;
$this->dumpSql = $input->getOption('dump-sql') === true;
$this->defineVars();
list($queries, $errors) = $this->conquerTheWorld($input, $output, $this->getContainer()->get('doctrine.orm.default_entity_manager'));
if ( ! empty($errors) ) {
$output->writeln("/* ###########!!! ГРЕШКИ:\n\n"
. implode("\n", $errors) . "*/\n\n");
}
echo implode(";\n", $queries), ";\n";
$output->writeln('/*Done.*/');
}
private function defineVars()
{
Setup::doSetup($this->getContainer());
$this->db = Setup::db();
$this->overwrite = true; // overwrite existing files?
//$this->curTextId = $this->getNextId('text');
$this->curEditId = $this->db->autoIncrementId(DBT_EDIT_HISTORY);
$this->curBookRev = $this->db->autoIncrementId('book_revision');
$this->entrydate = date('Y-m-d');
$this->modifDate = $this->entrydate . date(' H:i:s');
$this->orig_lang = 'bg';
$this->series = 0;
$this->sernr = null;
$this->book = 0;
$this->book_author = false; // link the book with the author
$this->author = 'hristo-smirnenski';
$this->license_orig = 1; // 1: PD, 2: FC
$this->license_trans = null;
$this->translator = 0;
$this->lang = 'bg';
$this->year = 1920;
$this->trans_year = null;
$this->type = 'poetry';
$this->comment = 'Добавяне (от Събрани съчинения, том 4. Български писател, 1979. Съставителство, редакция и бележки: Веска Иванова.)';
// 'USERNAME' => array(PERCENT, 'Сканиране, разпознаване и корекция', date('Y'))
$this->users = array('zelenkroki' => array(30, 'Форматиране и последна редакция', date('Y')));
$this->year2 = null;
$this->trans_year2 = null;
$this->subtitle = $this->orig_title = $this->orig_subtitle = null;
$this->labels = array();
$this->errors = array();
$this->contentDir = $this->getContainer()->getParameter('kernel.root_dir').'/../web/content';
$this->books = array();
}
function conquerTheWorld(InputInterface $input, OutputInterface $output, $em)
{
$file = $input->getArgument('input');
$contents = file_get_contents($file);
if (strpos($contents, "\t") === false) {
$queries = $this->insertManyFromManyFiles(explode("\n", $contents));
} else {
$queries = $this->insertManyFromOneFile($file);
}
array_unshift($queries, 'SET NAMES utf8');
return array($queries, $this->errors);
}
function insertManyFromManyFiles(array $files)
{
$queries = array();
foreach ($files as $file) {
if (empty($file)) continue;
$fp = fopen($file, 'r') or die("$file не съществува.\n");
$authorLine = $this->clearHeadline( $this->getFirstNonEmptyFileLine($fp) );
$titleLine = rtrim($this->clearHeadline( fgets($fp) ), '*');
$textContent = ltrim($this->getFileContentTillEnd($fp), "\n");
$headlevel = strpos($textContent, '>>') === false ? 1 : 2;
fclose($fp);
$queries = array_merge($queries, $this->insertCurrentText(array(
'author' => $authorLine,
'title' => $titleLine,
'textContent' => $textContent,
'headlevel' => $headlevel,
)));
}
return $queries;
}
function insertManyFromOneFile($file)
{
$fp = fopen($file, 'r') or die("$file не съществува.\n");
$queries = array();
$vars = array('textContent' => ''); // for current text
$bookFile = '';
while ( !feof($fp) ) {
$line = fgets($fp);
if ( $this->book && $this->isBookHeaderLine($line) ) {
$bookFile .= str_replace('+', '>', $line);
} else if ( ! $this->isHeaderLine($line) ) {
// add this line to current text
$vars['textContent'] .= $this->moveHeadersUp($line);
} else {
if ( count($vars) > 1 ) { // we have read a text, save it
$queries = array_merge($queries, $this->insertCurrentText($vars));
}
$vars = array('textContent' => ''); // starting next text
if (strpos($line, '> $id=') === 0) {
if ($this->book) {
$textId = str_replace('> $id=', '', rtrim($line));
$set = array('book_id' => $this->book, 'text_id' => $textId, 'share_info' => 0);
$queries[] = $this->db->insertQ(DBT_BOOK_TEXT, $set, true, false);
$bookFile .= ">\t{text:". $textId ."}\n\n";
}
continue;
}
if ($this->book) {
// FIXME this->curTextId is not initialized used anymore
$bookFile .= ">\t{text:{$this->curTextId}}\n\n";
}
$line = rtrim($this->clearHeadline($line), '*');
if ( strpos($line, '|') === false ) {
$vars['title'] = $line;
} else {
list($vars['title'], $vars['subtitle']) = explode('|', $line);
}
// check for an author line
$line2 = fgets($fp);
if ( $this->isHeaderLine($line2) ) {
$vars['author'] = $this->clearHeadline($line2);
} else {
$vars['textContent'] .= $this->moveHeadersUp($line2);
}
}
}
fclose($fp);
// last text
$queries = array_merge($queries, $this->insertCurrentText($vars));
if ($bookFile) {
$queries[] = "/*\n$bookFile\n*/";
}
return $queries;
}
/*
the big, messy processing unit.
has side effects
@param $avars Variables for the current text.
Should have at least following keys:
- textContent
- title
*/
private function insertCurrentText(array $avars)
{
extract($avars);
list($newCont, $vars) = $this->popvars($textContent);
$textContent = $newCont;
extract($vars);
// import non-existing stuff from the outer world
$fields = array('subtitle', 'lang', 'orig_lang', 'orig_title', 'orig_subtitle', 'trans_year', 'year', 'trans_year2', 'year2', 'type', 'series', 'sernr', 'license_orig', 'license_trans', 'author', 'translator', 'book', 'labels', 'users');
foreach ($fields as $field) {
if ( !isset($$field) ) {
$$field = $this->$field;
}
}
$isNew = ! isset($id);
if ( $isNew ) {
$textId = $this->getNextId('text');
$comment = $this->comment;
} else {
$textId = $id;
$comment = $this->comment_edit;
}
foreach ( array('author', 'translator', 'labels') as $var ) {
if ( is_string($$var) && strpos($$var, ',') !== false ) {
$$var = explode(',', $$var);
}
}
if ( is_string($users) ) {
$uarr = explode(';', $users);
$users = array();
foreach ( $uarr as $user_perc ) {
$up = explode(',', $user_perc);
$users[ $up[0] ] = isset($up[1]) ? $up[1] : 100;
}
}
$qs = array();
$l = strlen($textContent) / 1000;
$zl = $l / 3.5;
$textQuery = '';
if ($isNew) {
$set = array(
'slug' => String::slugify($title),
'title' => $title,
'subtitle' => $subtitle,
'lang' => $lang,
'orig_lang' => $orig_lang,
'orig_title' => $orig_title,
'orig_subtitle' => $orig_subtitle,
'trans_year' => $trans_year,
'year' => $year,
'type' => $type,
'sernr' => $sernr,
'orig_license_id' => $license_orig,
'trans_license_id' => $license_trans,
'created_at' => $this->entrydate,
'size' => $l,
'zsize' => $zl,
'id' => $textId,
'headlevel' => (isset($headlevel) ? $headlevel : 0),
);
if ($series) {
$set['series_id'] = is_numeric($series) ? $series : $this->getSeriesId($series);
}
$textQuery = $this->db->replaceQ(DBT_TEXT, $set);
} else {
$set = array(
'slug' => String::slugify($title),
'title' => $title,
'subtitle' => $subtitle,
'size' => $l, 'zsize' => $zl,
'headlevel' => isset($headlevel) ? $headlevel : 0,
);
$textQuery = $this->db->updateQ(DBT_TEXT, $set, array('id' => $textId));
}
$qs[] = "\n\n\n/* Текст $textId */\n\n$textQuery";
$set = array(
'id' => $this->curEditId,
'text_id' => $textId,
'user_id' => 1,
'comment' => $comment,
'date' => $this->modifDate,
'first' => (int) $isNew,
);
$qs[] = $this->db->replaceQ(DBT_EDIT_HISTORY, $set);
$qs[] = $this->db->updateQ(DBT_TEXT, array('cur_rev_id' => $this->curEditId), array('id' => $textId));
$this->curEditId++;
if ( !empty($book) ) {
if ( ! in_array($book, $this->books)) {
$this->books[] = $book;
$book_title = isset($book_title) ? $book_title : $title;
$set = array(
'id' => $book,
'category_id' => 1,
'title' => $book_title,
'subtitle' => (empty($subtitle) ? '' : $subtitle),
'title_author' => (isset($book_author) ? $book_author : $author),
'slug' => String::slugify($book_title),
'lang' => $lang,
'orig_lang' => $orig_lang,
'year' => $year,
'type' => 'single',
'has_anno' => strpos($textContent, 'A>') !== false ? 1 : 0,
'has_cover' => 1,
'created_at' => $this->entrydate,
);
$qs[] = $this->db->insertQ(DBT_BOOK, $set, true);
$set = array(
'id' => $this->curBookRev++,
'book_id' => $book,
'comment' => 'Добавяне',
'date' => $this->modifDate,
);
$qs[] = $this->db->insertQ('book_revision', $set, true);
}
$set = array('book_id' => $book, 'text_id' => $textId, 'share_info' => 1);
$qs[] = $this->db->insertQ(DBT_BOOK_TEXT, $set, true);
}
if ( $isNew ) {
$qs[] = $this->db->deleteQ(DBT_AUTHOR_OF, array('text_id' => $textId));
foreach ( (array) $author as $pos => $author1 ) {
$authorId = is_numeric($author1) ? $author1 : $this->getPersonId($author1);
if ( empty($authorId) ) { continue; }
$set = array('person_id' => $authorId, 'text_id' => $textId, 'pos' => $pos);
$qs[] = $this->db->insertQ(DBT_AUTHOR_OF, $set, false, false);
if ( $this->book_author ) {
$set = array('book_id' => $book, 'person_id' => $authorId);
$qs[] = $this->db->insertQ(DBT_BOOK_AUTHOR, $set, true, false);
}
}
}
if ( $isNew ) {
$qs[] = $this->db->deleteQ(DBT_TRANSLATOR_OF, array('text_id' => $textId));
foreach ( (array) $translator as $pos => $translator1 ) {
$translatorId = is_numeric($translator1) ? $translator1 : $this->getPersonId($translator1);
if ( empty($translatorId) ) { continue; }
$set = array('person_id' => $translatorId, 'text_id' => $textId, 'pos' => $pos);
$qs[] = $this->db->insertQ(DBT_TRANSLATOR_OF, $set, false, false);
}
}
foreach ( (array) $labels as $label ) {
if ( empty($label) ) continue;
$set = array('text_id' => $textId, 'label_id' => $label);
$qs[] = $this->db->insertQ(DBT_TEXT_LABEL, $set, true, false);
}
foreach ($users as $user => $userData) {
list($percent, $userComment, $humanDate) = $userData;
$userId = $this->getUserId($user);
if ( empty($userId) ) { continue; }
$size = $percent/100 * $l;
$set = array(
'user_id' => $userId,
'username' => $user,
'text_id' => $textId,
'size' => $size,
'percent' => $percent,
'date' => $this->modifDate,
'humandate' => $humanDate,
'comment' => $userComment,
);
$qs[] = $this->db->insertQ(DBT_USER_TEXT, $set, true, false);
}
$textContent = trim($textContent, "\n") . "\n";
$file = $this->contentDir . '/text/' . Legacy::makeContentFilePath($textId);
if ( !$this->overwrite && file_exists($file) ) {
$qs[] = "/* $textId СЪЩЕСТВУВА! */\n";
} else {
$id = empty($book) || isset($anno) ? $textId : $book;
$dir = empty($book) || isset($anno) ? 'text-anno' : 'book-anno';
$textContent = $this->createAnnoFile($id, $textContent, $this->saveFiles, $dir);
$id = empty($book) || isset($info) ? $textId : $book;
$dir = empty($book) || isset($info) ? 'text-info' : 'book-info';
$textContent = $this->createInfoFile($id, $textContent, $this->saveFiles, $dir);
if ($this->saveFiles) {
if ( ($type == 'poetry' || $type == 'poem' || $type == 'prosepoetry')
&& strpos($textContent, 'P>') === false ) {
if ( substr($textContent, 0, 3) == "\t[*" ) {
$textContent = preg_replace('/^\t\[([^]]+)\]\n/ms', "$0P>\n", $textContent, 1) . "P$\n";
} else {
$textContent = "P>\n{$textContent}P$\n";
}
}
File::myfile_put_contents($file, $textContent);
if ( isset($headlevel) ) {
$qs = array_merge($qs, $this->makeUpdateChunkQuery($file, $textId, $headlevel));
}
}
}
return $qs;
}
public function makeUpdateChunkQuery($file, $textId, $headlevel)
{
require_once __DIR__ . '/../Legacy/SfbParserSimple.php';
$data = array();
foreach (\Chitanka\LibBundle\Legacy\makeDbRows($file, $headlevel) as $row) {
$name = $row[2];
$name = strtr($name, array('_'=>''));
$name = $this->db->escape(String::my_replace($name));
$data[] = array($textId, $row[0], $row[1], $name, $row[3], $row[4]);
}
$qs = array();
$qs[] = $this->db->deleteQ('text_header', array('text_id' => $textId));
if ( !empty($data) ) {
$fields = array('text_id', 'nr', 'level', 'name', 'fpos', 'linecnt');
$qs[] = $this->db->multiinsertQ('text_header', $data, $fields);
}
return $qs;
}
private function createAnnoFile($textId, $content, $saveFiles = false, $dir = 'text-anno')
{
return $this->createExtraFile($textId, $content, "$this->contentDir/$dir/",
\Sfblib_SfbConverter::ANNO_S, \Sfblib_SfbConverter::ANNO_E, $saveFiles);
}
private function createInfoFile($textId, $content, $saveFiles = false, $dir = 'text-info')
{
return $this->createExtraFile($textId, $content, "$this->contentDir/$dir/",
\Sfblib_SfbConverter::INFO_S, \Sfblib_SfbConverter::INFO_E, $saveFiles);
}
private function createExtraFile($textId, $content, $dir, $startTag, $endTag, $saveFiles = false)
{
/* preg_* functions do not work correctly with big strings */
$startPos = strpos($content, $startTag);
if ( $startPos === false ) {
return $content;
}
$endPos = strpos($content, $endTag, $startPos);
if ( $endPos === false ) {
return $content;
}
$len = $endPos - $startPos;
$extra = substr($content, $startPos + strlen($startTag), $len - strlen($startTag));
$extra = ltrim($extra, "\n");
if ($saveFiles) {
File::myfile_put_contents($dir . Legacy::makeContentFilePath($textId), $extra);
}
$content = substr_replace($content, '', $startPos, $len + strlen($endTag));
$content = trim($content, "\n") . "\n";
return $content;
}
private function popvars($text) {
$vars = array();
$re = '/\t?\$(\w+)=(.*)\n/';
$mcnt = preg_match_all($re, $text, $m);
if ($mcnt) {
for ($i = 0; $i < $mcnt; $i++) {
$key = $m[1][$i];
$vars[$key] = $m[2][$i];
}
$text = preg_replace($re, '', $text);
$text = trim($text, "\n") . "\n";
}
return array($text, $vars);
}
private $_curIds = array();
private $_ids = array(
'text' => array(),
'book' => array(),
);
private function getNextId($table)
{
if (isset($this->_ids[$table]) && count($this->_ids[$table])) {
return array_shift($this->_ids[$table]);
}
if ( ! isset($this->_curIds[$table])) {
$this->_curIds[$table] = $this->db->autoIncrementId($table);
} else {
$this->_curIds[$table]++;
}
return $this->_curIds[$table];
}
private function getPersonId($personName)
{
$id = $this->db->getFields(DBT_PERSON, array('slug' => trim($personName)), 'id');
if ( empty($id) ) {
$this->errors[] = "Личността $personName не съществува";
}
return $id;
}
private function getSeriesId($name)
{
$id = $this->db->getFields(DBT_SERIES, array('slug' => trim($name)), 'id');
if ( empty($id) ) {
$this->errors[] = "Серията $name не съществува";
}
return $id;
}
private function getUserId($userName)
{
$id = $this->db->getFields(DBT_USER, array('username' => $userName), 'id');
if ( empty($id) ) {
$this->errors[] = "Потребителя $userName не съществува";
}
return $id;
}
private function clearHeadline($headline)
{
return trim($headline, " \n\t>|");
#return strtr($headline, array("|\t" => '', "\n" => ''));
}
private function isHeaderLine($line)
{
return $line[0] == '>' && $line[1] == "\t";
}
private function isBookHeaderLine($line)
{
return $line[0] == '+';
}
/** move headers one niveau up */
private function moveHeadersUp($content)
{
$content = preg_replace('/^>(>+)/', '$1', $content);
return $content;
}
private function getFirstNonEmptyFileLine($fp)
{
while ( ! feof($fp) ) {
$line = fgets($fp);
$line = trim($line);
if ( ! empty($line) ) {
break;
}
}
return $line;
}
private function getFileContentTillEnd($fp)
{
$content = '';
while ( !feof($fp) ) {
$content .= fgets($fp);
}
return $content;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Legacy;
use Chitanka\LibBundle\Util\String;
use Chitanka\LibBundle\Util\File;
use Chitanka\LibBundle\Entity\Text;
class TextPage extends Page {
public function getTextPartContent()
{
if ( ! $this->initData() ) {
return '';
}
$data = json_encode(array(
'toc' => $this->makeToc(),
'text' => $this->makeTextContent() . $this->makeEndMessage()
));
return $this->request->value('jsoncallback') . "($data);";
}
protected function makeNextSeriesWorkLink($separate = false) {
$nextWork = $this->work->getNextFromSeries();
$o = '';
if ( is_object($nextWork) ) {
$sl = $this->makeSeriesLink($this->work->series);
$tl = $this->makeSimpleTextLink(
$nextWork->title, $nextWork->getId(), 1, ''/*, array('rel' => 'next')*/);
$type = Legacy::workTypeArticle($nextWork->type);
$sep = $separate ? '<hr />' : '';
$stype = Legacy::seriesTypeArticle($this->work->seriesType);
$o = "$sep<p>Към следващото произведение от $stype „{$sl}“ — $type „{$tl}“</p>";
}
return $o;
}
protected function makeNextBookWorkLink($separate = false) {
$o = '';
foreach ($this->work->getNextFromBooks() as $book => $nextWork) {
if ( is_object($nextWork) ) {
$sl = $this->makeBookLink($book, $this->work->books[$book]['title']);
$tl = $this->makeSimpleTextLink(
$nextWork->title, $nextWork->getId(), 1, ''/*, array('rel' => 'next')*/);
$type = Legacy::workTypeArticle($nextWork->type);
switch ( $this->work->books[$book]['type'] ) {
case 'collection' : $bt = 'сборника'; break;
case 'poetry' : $bt = 'стихосбирката'; break;
case 'book' : default : $bt = 'книгата'; break;
}
$o .= "\n<p>Към следващото произведение от $bt „{$sl}“ — $type „{$tl}“</p>";
}
}
$sep = !empty($o) && $separate ? '<hr />' : '';
return $sep . $o;
}
protected function makeLicenseView($name, $uri = '') {
if ( empty($uri) ) {
return "($name)";
}
return "(<a href='$uri' rel='license'>$name</a>)";
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Tests\Service;
use Chitanka\LibBundle\Tests\TestCase;
use Chitanka\LibBundle\Service\FileUpdater;
class FileUpdaterTest extends TestCase {
public function testRmdir()
{
$updater = new FileUpdater;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller;
use Chitanka\LibBundle\Service\FeedService;
use Chitanka\LibBundle\Legacy\Legacy;
class FeedController extends Controller
{
protected $responseAge = 1800;
public function lastLiternewsAction($limit = 3)
{
$feedUrl = 'http://planet.chitanka.info/atom.xml';
$xsl = __DIR__.'/../Resources/transformers/forum-atom-compact.xsl';
$content = $this->fetchFeed($feedUrl, $xsl);
if ($content === false) {
return $this->displayText('<p class="error">Неуспех при вземането на последните литературни новини.</p>');
}
$content = $this->limitArticles($content, $limit);
$feedService = new FeedService();
$content = $feedService->cleanup($content);
return $this->displayText($content);
}
public function lastInsideNewsAction($limit = 8)
{
$feedUrl = 'http://identi.ca/api/statuses/user_timeline/127745.atom';
$xsl = __DIR__.'/../Resources/transformers/forum-atom-compact.xsl';
$content = $this->fetchFeed($feedUrl, $xsl);
if ($content === false) {
return $this->displayText('<p class="error">Неуспех при вземането на последните съобщения от identi.ca.</p>');
}
$content = $this->limitArticles($content, $limit);
return $this->displayText($content);
}
public function lastForumPostsAction($limit = 5)
{
$feedUrl = 'http://forum.chitanka.info/feed.php?c=' . $limit;
$xsl = __DIR__.'/../Resources/transformers/forum-atom-compact.xsl';
$content = $this->fetchFeed($feedUrl, $xsl);
if ($content === false) {
return $this->displayText('<p class="error">Неуспех при вземането на последните форумни съобщения.</p>');
}
$content = strtr($content, array(
'&u=' => '&u=', // user link
'</span>' => '',
"<br />\n<li>" => '</li><li>',
"<br />\n</ul>" => '</li></ul>',
' target="_blank"' => '',
));
$content = preg_replace('|<span[^>]+>|', '', $content);
return $this->displayText($content);
}
public function randomReviewAction()
{
$reviews = $this->getReviews(1, true);
if (empty($reviews)) {
return $this->displayText('No reviews found');
}
$this->view['book'] = $reviews[0];
return $this->display('FeaturedBook:book');
}
public function reviewsAction()
{
$reviews = $this->getReviews();
if (empty($reviews)) {
return $this->displayText('No reviews found');
}
$this->view = compact('reviews');
return $this->display('Review:index');
}
public function getReviews($limit = null, $random = false)
{
$reviews = array();
$feedUrl = 'http://blog.chitanka.info/section/reviews/feed';
$feed = Legacy::getFromUrlOrCache($feedUrl, $days = 0.02);
if (empty($feed) || strpos($feed, '<atom') === false) {
return $reviews;
}
$feedTree = new \SimpleXMLElement($feed);
foreach ($feedTree->xpath('//item') as $item) {
$content = $item->children('content', true)->encoded;
if (preg_match('|<img src="(.+)" title="„(.+)“ от (.+)"|U', $content, $matches)) {
$reviews[] = array(
'id' => 0,
'author' => $matches[3],
'title' => $matches[2],
'url' => $item->link->__toString(),
'cover' => $matches[1],
'description' => $item->description,
);
}
}
if ($random) {
shuffle($reviews);
}
if ($limit) {
$reviews = array_slice($reviews, 0, $limit);
}
return $reviews;
}
public function fetchFeed($xmlFile, $xslFile)
{
$proc = new \XSLTProcessor();
$xsl = new \DOMDocument();
if ($xsl->loadXML(file_get_contents($xslFile)) ) {
$proc->importStyleSheet($xsl);
}
$feed = new \DOMDocument();
$contents = Legacy::getFromUrlOrCache($xmlFile, $days = 0.02);
if (empty($contents)) {
return false;
}
if ( $feed->loadXML($contents) ) {
return $proc->transformToXML($feed);
}
return false;
}
private function limitArticles($content, $limit)
{
preg_match_all('|<article.+</article>|Ums', $content, $matches);
$content = '';
for ($i = 0; $i < $limit; $i++) {
$content .= $matches[0][$i];
}
return $content;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Entity;
/**
*
*/
class UserTextReadRepository extends BookmarkRepository
{
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller;
use Symfony\Component\HttpFoundation\Request;
use Symfony\Component\HttpKernel\Exception\HttpException;
use Symfony\Component\HttpKernel\Exception\NotFoundHttpException;
use Chitanka\LibBundle\Pagination\Pager;
class UserController extends Controller
{
public function personalToolsAction() {
$this->responseAge = 0;
return $this->render('LibBundle:User:personal_tools.html.twig', array(
'_user' => $this->getUser()
));
}
public function showAction($username)
{
$this->responseAge = 0;
$_REQUEST['username'] = $username;
return $this->legacyPage('User');
}
public function pageAction($username)
{
$this->responseAge = 0;
$_REQUEST['username'] = $username;
return $this->legacyPage('EditUserPage');
}
public function ratingsAction($username)
{
$_REQUEST['username'] = $username;
return $this->legacyPage('Textrating');
}
public function commentsAction($username, $page)
{
$_REQUEST['username'] = $username;
$_REQUEST['page'] = $page;
return $this->legacyPage('Comment');
}
public function contribsAction($username, $page)
{
$limit = 50;
$user = $this->getUserRepository()->findOneby(array('username' => $username));
$repo = $this->getUserTextContribRepository();
$this->view = array(
'user' => $user,
'contribs' => $repo->getByUser($user, $page, $limit),
'pager' => new Pager(array(
'page' => $page,
'limit' => $limit,
'total' => $repo->countByUser($user)
)),
'route' => 'user_contribs',
'route_params' => array('username' => $username),
);
return $this->display('contribs');
}
public function readListAction($username, $page)
{
$this->responseAge = 0;
if ($this->getUser()->getUsername() != $username) {
$user = $this->getUserRepository()->findOneBy(array('token' => $username));
if ( ! $user) {
throw new HttpException(401);
}
$isOwner = false;
} else {
$user = $this->getUserRepository()->findOneBy(array('username' => $username));
$isOwner = true;
}
$limit = 50;
$repo = $this->getUserTextReadRepository();
$this->view = array(
'user' => $user,
'is_owner' => $isOwner,
'read_texts' => $repo->getByUser($user, $page, $limit),
'pager' => new Pager(array(
'page' => $page,
'limit' => $limit,
'total' => $repo->countByUser($user)
)),
'route' => 'user_read_list',
'route_params' => array('username' => $username),
);
return $this->display('read_list');
}
public function bookmarksAction($username, $page)
{
$this->responseAge = 0;
if ($this->getUser()->getUsername() != $username) {
$user = $this->getUserRepository()->findOneBy(array('token' => $username));
if ( ! $user) {
throw new HttpException(401);
}
$isOwner = false;
} else {
$user = $this->getUserRepository()->findOneBy(array('username' => $username));
$isOwner = true;
}
$limit = 50;
$repo = $this->getBookmarkRepository();
$this->view = array(
'user' => $user,
'is_owner' => $isOwner,
'bookmarks' => $repo->getByUser($user, $page, $limit),
'pager' => new Pager(array(
'page' => $page,
'limit' => $limit,
'total' => $repo->countByUser($user)
)),
'route' => 'user_bookmarks',
'route_params' => array('username' => $username),
);
return $this->display('bookmarks');
}
/**
* Tell if any of the requested texts are special for the current user
* i.e. the user has bookmarked it or read it
*/
public function specialTextsAction(Request $request)
{
$this->responseAge = 0;
if ($this->getUser()->isAnonymous()) {
throw new HttpException(401);
}
$texts = $request->get('texts');
return $this->displayJson(array(
'read' => array_flip($this->getUserTextReadRepository()->getValidTextIds($this->getUser(), $texts)),
'favorities' => array_flip($this->getBookmarkRepository()->getValidTextIds($this->getUser(), $texts)),
));
}
public function editAction($username)
{
$this->responseAge = 0;
if ($this->getUser()->getUsername() != $username) {
throw new HttpException(401);
}
$styleUrl = '/bundles/lib/css/?skin=SKIN&menu=NAV';
$this->view['inline_js'] = <<<EOS
var nav = "", skin = "";
function changeStyleSheet() {
setActiveStyleSheet("$styleUrl".replace(/SKIN/, skin).replace(/NAV/, nav));
}
EOS;
return $this->legacyPage('Settings');
}
public function stylesheetAction()
{
$this->responseAge = 0;
return $this->render('LibBundle:User:stylesheet.html.twig', array(
'stylesheet' => $this->getStylesheet(),
'extra_stylesheets' => $this->getUser()->getExtraStylesheets(),
'extra_javascripts' => $this->getUser()->getExtraJavascripts(),
));
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Legacy;
use Chitanka\LibBundle\Entity\User;
class SendUsernamePage extends MailPage {
protected
$action = 'sendUsername';
public function __construct($fields) {
parent::__construct($fields);
$this->title = 'Изпращане на потребителско име';
$this->email = $this->request->value('email');
}
protected function processSubmission() {
$key = array('email' => $this->email);
$res = $this->db->select(DBT_USER, $key, 'username');
$data = $this->db->fetchAssoc($res);
if ( empty($data) ) {
$this->addMessage("Не съществува потребител с електронна поща <strong>$this->email</strong>.", true);
return $this->buildContent();
}
extract($data);
$this->username = $username;
$this->mailToName = $username;
$this->mailToEmail = $this->email;
$this->mailSubject = 'Напомняне за име от '.$this->sitename;
$sendpass = $this->controller->generateUrl('request_password');
$login = $this->controller->generateUrl('login');
$this->mailSuccessMessage = "На адреса <strong>$this->email</strong> беше
изпратено напомнящо писмо. Ако не се сещате и за паролата си,
ползвайте функцията „<a href=\"$sendpass\">Изпращане на нова парола</a>“.
Иначе можете спокойно да <a href=\"$login\">влезете</a>.";
$this->mailFailureMessage = 'Изпращането на напомняне не сполучи.';
return parent::processSubmission();
}
protected function makeForm() {
$email = $this->out->textField('email', '', $this->email, 25, 255, 2);
$submit = $this->out->submitButton('Изпращане на потребителското име', '', 3);
return <<<EOS
<p>Е, на всекиго може да се случи да си забрави името. ;-) Няма страшно!
Ако в потребителските си данни сте посочили валидна електронна поща, сега
можете да поискате напомняне за името, с което сте се регистрирали в
<em>$this->sitename</em>.</p>
<p><br /></p>
<form action="" method="post">
<fieldset>
<legend>Напомняне за име</legend>
<label for="email">Електронна поща:</label>
$email
$submit
</fieldset>
</form>
EOS;
}
protected function makeMailMessage() {
$passlink = $this->controller->generateUrl('request_password', array(), true);
return <<<EOS
Здравейте!
Някой (най-вероятно вие) поиска да ви изпратим потребителското име, с което сте
се регистрирали в $this->sitename (http://chitanka.info).
Ако все пак не сте били вие, можете да не обръщате внимание на това писмо.
Потребителското ви име е {$this->username}
Ако не се сещате и за паролата си, ползвайте функцията
„Изпращане на нова парола“ ($passlink).
$this->sitename
EOS;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller;
class StatisticsController extends Controller
{
public function indexAction()
{
$this->view = array(
'count' => array(
'authors' => $this->getPersonRepository()->asAuthor()->getCount(),
'translators' => $this->getPersonRepository()->asTranslator()->getCount(),
'texts' => $this->getTextRepository()->getCount(),
'series' => $this->getSeriesRepository()->getCount(),
'labels' => $this->getLabelRepository()->getCount(),
'books' => $this->getBookRepository()->getCount(),
'books_wo_cover'=> $this->getBookRepository()->getCountWithMissingCover(),
'sequences' => $this->getSequenceRepository()->getCount(),
'categories' => $this->getCategoryRepository()->getCount(),
'text_comments' => $this->getTextCommentRepository()->getCount('e.is_shown = 1'),
'users' => $this->getUserRepository()->getCount(),
),
'author_countries' => $this->getAuthorCountries(),
'text_types' => $this->getTextTypes(),
);
return $this->display('index');
}
private function getAuthorCountries()
{
$authors = $this->getPersonRepository()->asAuthor()->getCountsByCountry();
arsort($authors);
return $authors;
}
private function getTextTypes()
{
$texts = $this->getTextRepository()->getCountsByType();
arsort($texts);
return $texts;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller;
use Chitanka\LibBundle\Pagination\Pager;
use Chitanka\LibBundle\Util\Datetime;
class HistoryController extends Controller
{
public $booksPerPage = 30;
public $textsPerPage = 30;
public function indexAction()
{
$this->view = array(
'book_revisions_by_date' => $this->getBookRevisionRepository()->getLatest($this->booksPerPage),
'text_revisions_by_date' => $this->getTextRevisionRepository()->getLatest($this->textsPerPage),
);
return $this->display('index');
}
public function listBooksAction($page, $_format)
{
$maxPerPage = $this->booksPerPage;
$repo = $this->getBookRevisionRepository();
switch ($_format) {
case 'html':
case 'rss':
$revisions = $repo->getLatest($maxPerPage, $page);
$lastOnes = current($revisions);
$this->view = array(
'dates' => $this->getDateOptions($repo),
'book_revisions_by_date' => $revisions,
'last_date' => $lastOnes[0]['date'],
);
break;
case 'opds':
$this->view = array(
'book_revisions' => $repo->getByDate(null, $page, $maxPerPage, false),
'pager' => new Pager(array(
'page' => $page,
'limit' => $maxPerPage,
'total' => $maxPerPage * 50
)),
);
break;
}
return $this->display("list_books.$_format");
}
public function listBooksByMonthAction($year, $month, $page)
{
$dates = array("$year-$month-01", Datetime::endOfMonth("$year-$month"));
$limit = $this->booksPerPage;
$repo = $this->getBookRevisionRepository();
$this->view = array(
'dates' => $this->getDateOptions($repo),
'month' => ltrim($month, '0'),
'year' => $year,
'book_revisions_by_date' => $repo->getByDate($dates, $page, $limit),
'pager' => new Pager(array(
'page' => $page,
'limit' => $limit,
'total' => $repo->countByDate($dates)
)),
'route_params' => compact('year', 'month'),
);
return $this->display("list_books_by_month");
}
public function listTextsAction($page, $_format)
{
$maxPerPage = $this->textsPerPage;
$repo = $this->getTextRevisionRepository();
switch ($_format) {
case 'html':
case 'rss':
$revisions = $repo->getLatest($maxPerPage, $page);
$lastOnes = current($revisions);
$this->view = array(
'dates' => $this->getDateOptions($repo),
'text_revisions_by_date' => $revisions,
'last_date' => $lastOnes[0]['date'],
);
break;
case 'opds':
$this->view = array(
'text_revisions' => $repo->getByDate(null, $page, $maxPerPage, false),
'pager' => new Pager(array(
'page' => $page,
'limit' => $maxPerPage,
'total' => $maxPerPage * 50
)),
);
break;
}
return $this->display("list_texts.$_format");
}
public function listTextsByMonthAction($year, $month, $page)
{
$dates = array("$year-$month-01", Datetime::endOfMonth("$year-$month"));
$limit = $this->textsPerPage;
$repo = $this->getTextRevisionRepository();
$revisions = $repo->getByDate($dates, $page, $limit);
$this->view = array(
'dates' => $this->getDateOptions($repo),
'month' => ltrim($month, '0'),
'year' => $year,
'text_revisions_by_date' => $revisions,
'texts_by_id' => $this->extractTextsFromRevisionsByDate($revisions),
'pager' => new Pager(array(
'page' => $page,
'limit' => $limit,
'total' => $repo->countByDate($dates)
)),
'route' => 'new_texts_by_month',
'route_params' => compact('year', 'month'),
);
return $this->display('list_texts_by_month');
}
private function getDateOptions($repository)
{
$dates = array();
foreach ($repository->getMonths() as $data) {
$ym = $data['month'];
list($y, $m) = explode('-', $ym);
$data['year'] = $y;
$data['month'] = ltrim($m, '0');
$dates[$ym] = $data;
}
krsort($dates);
return $dates;
}
private function extractTextsFromRevisionsByDate($revisionsByDate)
{
$texts = array();
foreach ($revisionsByDate as $revisions) {
foreach ($revisions as $revision) {
$texts[$revision['text']['id']] = $revision['text'];
}
}
return $texts;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Legacy;
use Symfony\Component\HttpKernel\Exception\NotFoundHttpException;
use Chitanka\LibBundle\Util\String;
use Chitanka\LibBundle\Util\Number;
use Chitanka\LibBundle\Util\Char;
use Chitanka\LibBundle\Util\File;
use Chitanka\LibBundle\Entity\User;
use Chitanka\LibBundle\Entity\WorkEntry;
use Chitanka\LibBundle\Entity\WorkEntryRepository;
use Chitanka\LibBundle\Pagination\Pager;
class WorkPage extends Page {
const
DEF_TMPFILE = '',
DB_TABLE = DBT_WORK,
DB_TABLE2 = DBT_WORK_MULTI,
MAX_SCAN_STATUS = 2,
STATUS_0 = 0,
STATUS_1 = 1,
STATUS_2 = 2,
STATUS_3 = 3,
STATUS_4 = 4,
STATUS_5 = 5,
STATUS_6 = 6,
STATUS_7 = 7;
private
$FF_COMMENT = 'comment',
$FF_EDIT_COMMENT = 'editComment',
$FF_VIEW_LIST = 'vl',
$FF_SUBACTION = 'status',
$FF_LQUERY = 'wq';
protected $action = 'work';
protected $defViewList = 'work';
protected $defListLimit = 50;
protected $maxListLimit = 500;
private
$tabs = array('Самостоятелна подготовка', 'Работа в екип'),
$tabImgs = array('fa fa-user singleuser', 'fa fa-users multiuser'),
$tabImgAlts = array('сам', 'екип'),
$statuses = array(
self::STATUS_0 => 'Планира се',
self::STATUS_1 => 'Сканира се',
self::STATUS_2 => 'За корекция',
self::STATUS_3 => 'Коригира се',
self::STATUS_4 => 'Иска се SFB',
self::STATUS_5 => 'Чака проверка',
self::STATUS_6 => 'Проверен',
self::STATUS_7 => 'За добавяне',
),
$viewLists = array(
'work' => 'списъка на подготвяните произведения',
'contrib' => 'списъка на помощниците',
'listonly' => '',
),
$viewTypes = array(
'all' => 'Всички',
'my' => 'Мое участие',
'waiting' => 'Търси се коректор',
),
$statusClasses = array(
self::STATUS_0 => 'fa fa-square-o status-plan',
self::STATUS_1 => 'fa fa-square status-scan',
self::STATUS_2 => 'fa fa-circle-o status-waiting',
self::STATUS_3 => 'fa fa-dot-circle-o status-edit',
self::STATUS_4 => 'fa fa-code status-format',
self::STATUS_5 => 'fa fa-question-circle status-forcheck',
self::STATUS_6 => 'fa fa-check-circle status-checked',
self::STATUS_7 => 'fa fa-circle status-done',
'all' => 'fa fa-tasks',
'my' => 'fa fa-user',
'waiting' => 'fa fa-search-plus status-waiting',
),
$fileWhiteList = array(
'sfb', 'fb2', 'txt',
'odt', 'rtf', 'djvu', 'pdf', 'epub',
'zip', '7z', 'gz', 'tar', 'tgz', 'bz2',
);
public function __construct($fields) {
parent::__construct($fields);
$this->title = 'Работно ателие';
$this->tmpDir = 'todo/';
$this->absTmpDir = $this->container->getParameter('kernel.root_dir') . '/../web/'.$this->tmpDir;
$this->subaction = $this->request->value( $this->FF_SUBACTION, '', 1 );
$this->entryId = (int) $this->request->value('id');
$this->workType = (int) $this->request->value('workType', 0, 3);
$this->btitle = $this->request->value('title');
$this->author = $this->request->value('author');
$this->publisher = $this->request->value('publisher');
$this->pubYear = $this->request->value('pubYear');
$this->status = (int) $this->request->value('entry_status');
$this->progress = Number::normInt($this->request->value('progress'), 100, 0);
$this->is_frozen = $this->request->checkbox('is_frozen');
$this->delete = $this->request->checkbox('delete');
$this->scanuser = (int) $this->request->value('user', $this->user->getId());
$this->scanuser_view = $this->request->value('user');
$this->comment = $this->request->value($this->FF_COMMENT);
$this->comment = strtr($this->comment, array("\r"=>''));
$this->tmpfiles = $this->request->value('tmpfiles', self::DEF_TMPFILE);
$this->tfsize = $this->request->value('tfsize');
$this->editComment = $this->request->value($this->FF_EDIT_COMMENT);
$this->uplfile = $this->makeUploadedFileName();
$this->searchQuery = $this->request->value($this->FF_LQUERY);
$this->form = $this->request->value('form');
$this->bypassExisting = (int) $this->request->value('bypass', 0);
$this->date = date('Y-m-d H:i:s');
$this->rowclass = null;
$this->showProgressbar = true;
$this->viewList = $this->request->value($this->FF_VIEW_LIST,
$this->defViewList, null, $this->viewLists);
if ( !empty($this->subaction) && !empty($this->viewTypes[$this->subaction]) ) {
$this->title .= ' — ' . $this->viewTypes[$this->subaction];
} else if ( ! empty( $this->scanuser_view ) ) {
$this->setScanUserView($this->scanuser_view);
$this->title .= ' — ' . $this->data_scanuser_view->getUsername();
}
$this->multidata = array();
$this->initPaginationFields();
}
public function setScanUserView($user)
{
$this->scanuser_view = $user;
$this->data_scanuser_view = $this->findUser($user);
}
private function findUser($user)
{
$userRepo = $this->controller->getRepository('User');
return is_numeric($user) ? $userRepo->find($user) : $userRepo->findByUsername($user);
}
protected function processSubmission() {
if ( !empty($this->entryId) &&
!$this->thisUserCanEditEntry($this->entryId, $this->workType) ) {
$this->addMessage('Нямате права да редактирате този запис.', true);
return $this->makeLists();
}
if ($this->uplfile && ! File::hasValidExtension($this->uplfile, $this->fileWhiteList)) {
$formatList = implode(', ', $this->fileWhiteList);
$this->addMessage("Файлът не е в един от разрешените формати: $formatList", true);
return $this->makeLists();
}
switch ($this->workType) {
case 0: return $this->updateMainUserData();
case 1: return $this->updateMultiUserData();
}
}
protected function updateMainUserData() {
if ( empty($this->btitle) ) {
$this->addMessage('Не сте посочили заглавие на произведението.', true);
return $this->makeForm();
}
$this->btitle = String::my_replace($this->btitle);
if ($this->entryId == 0) { // check if this text exists in the library
$this->scanuser_view = 0;
if ( ! $this->bypassExisting) {
// TODO does not work if there are more than one titles with the same name
$texts = $this->controller->getRepository('Text')->findBy(array('title' => $this->btitle));
foreach ($texts as $text) {
if ($text->getAuthorNames() == $this->author) {
$wl = $this->makeSimpleTextLink($text->getTitle(), $text->getId());
$this->addMessage('В библиотеката вече съществува произведение'.
$this->makeFromAuthorSuffix($text) .
" със същото заглавие: <div class='standalone'>$wl.</div>", true);
$this->addMessage('Повторното съхраняване ще добави вашия запис въпреки горното предупреждение.');
$this->bypassExisting = 1;
return $this->makeForm();
}
}
$key = array('title' => $this->btitle, 'deleted_at IS NULL');
if ($this->db->exists(self::DB_TABLE, $key)) {
$this->addMessage('Вече се подготвя произведение със същото заглавие', true);
$this->addMessage('Повторното съхраняване ще добави вашия запис въпреки горното предупреждение.');
$this->bypassExisting = 1;
return $this->makeWorkList(0, 0, null, false, $key) . $this->makeForm();
}
}
}
if ( $this->entryId == 0 ) {
$id = $this->controller->getRepository('NextId')->findNextId('LibBundle:WorkEntry')->getValue();
$this->uplfile = preg_replace('/^0-/', "$id-", $this->uplfile);
} else {
$id = $this->entryId;
}
$set = array(
'id' => $id,
'type' => in_array($this->status, array(self::STATUS_4)) ? 1 : $this->workType,
'title'=>$this->btitle,
'author'=> strtr($this->author, array(';'=>',')),
'publisher' => $this->publisher,
'pub_year' => $this->pubYear,
'user_id'=>$this->scanuser,
'comment' => $this->pretifyComment($this->comment),
'date'=>$this->date,
'is_frozen' => $this->is_frozen,
'status'=>$this->status,
'progress' => $this->progress,
'tmpfiles' => self::rawurlencode($this->tmpfiles), #strpos($this->tmpfiles, '%') === false ? $this->tmpfiles : rawurldecode($this->tmpfiles),
'tfsize' => $this->tfsize
);
if ($this->userIsAdmin()) {
$set += array(
'admin_status' => $this->request->value('admin_status'),
'admin_comment' => $this->request->value('admin_comment'),
);
}
$key = array('id' => $this->entryId);
if ($this->delete && $this->userIsAdmin()) {
$set += array('deleted_at' => new \DateTime, 'is_frozen' => 0);
$this->db->update(self::DB_TABLE, $set, $key);
if ( $this->isMultiUser($this->workType) ) {
$this->db->update(self::DB_TABLE2, array('deleted_at' => new \DateTime), array('entry_id' => $this->entryId));
}
$this->addMessage("Произведението „{$this->btitle}“ беше махнато от списъка.");
$this->deleteEntryFiles($this->entryId);
$this->scanuser_view = null;
return $this->makeLists();
}
if ( $this->handleUpload() && !empty($this->uplfile) ) {
$set['uplfile'] = $this->uplfile;
//if ( $this->isMultiUser() ) {
$set['tmpfiles'] = $this->makeTmpFilePath(self::rawurlencode($this->uplfile));
$set['tfsize'] = Legacy::int_b2m(filesize($this->absTmpDir . $this->uplfile));
//}
}
$this->db->update(self::DB_TABLE, $set, $this->entryId);
$msg = $this->entryId == 0
? 'Произведението беше добавено в списъка с подготвяните.'
: 'Данните за произведението бяха обновени.';
$this->scanuser_view = 0;
$this->addMessage($msg);
return $this->makeLists();
}
protected function updateMultiUserData() {
if ( $this->thisUserCanDeleteEntry() && $this->form != 'edit' ) {
return $this->updateMainUserData();
}
return $this->updateMultiUserDataForEdit();
}
protected function updateMultiUserDataForEdit() {
$pkey = array('id' => $this->entryId);
$key = array('entry_id' => $this->entryId, 'user_id' => $this->user->getId());
if ( empty($this->editComment) ) {
$this->addMessage('Въвеждането на коментар е задължително.', true);
return $this->buildContent();
}
$this->editComment = $this->pretifyComment($this->editComment);
$set = array(
'entry_id' => $this->entryId,
'user_id' => $this->user->getId(),
'comment' => $this->editComment,
'date' => $this->date,
'progress' => $this->progress,
'is_frozen' => $this->is_frozen,
'deleted_at = null',
);
if ($this->request->value('uplfile') != '') {
$set['uplfile'] = $this->request->value('uplfile');
$set['filesize'] = $this->request->value('filesize');
}
if ( $this->handleUpload() && !empty($this->uplfile) ) {
$set['uplfile'] = $this->uplfile;
}
if ($this->db->exists(self::DB_TABLE2, $key)) {
$this->db->update(self::DB_TABLE2, $set, $key);
$msg = 'Данните бяха обновени.';
} else {
$set['id'] = $this->controller->getRepository('NextId')->findNextId('LibBundle:WorkContrib')->getValue();
$this->db->insert(self::DB_TABLE2, $set);
$msg = 'Току-що се включихте в подготовката на произведението.';
$this->informScanUser($this->entryId);
}
$this->addMessage($msg);
// update main entry
$set = array(
'date' => $this->date,
'status' => $this->isEditDone()
? ( $this->isReady() ? self::STATUS_6 : self::STATUS_5 )
: self::STATUS_3
);
$this->db->update(self::DB_TABLE, $set, $pkey);
return $this->makeLists();
}
protected function handleUpload() {
$tmpfile = $this->request->fileTempName('file');
if ( !is_uploaded_file($tmpfile) ) {
return false;
}
$dest = $this->absTmpDir . $this->uplfile;
if ( file_exists($dest) ) {
rename($dest, $dest .'-'. time());
}
if ( !move_uploaded_file($tmpfile, $dest) ) {
$this->addMessage("Файлът не успя да бъде качен. Опитайте пак!", true);
return false;
}
// copy local file if there is a remote workroom
if ( $remote = Setup::setting('workroom_remote') ) {
$com = sprintf('scp "%s" %s', $dest, $remote);#echo $com;
shell_exec($com);
}
$this->addMessage("Файлът беше качен. Благодарим ви за положения труд!");
return true;
}
protected function makeUploadedFileName() {
$filename = $this->request->fileName('file');
if ( empty($filename) ) {
return '';
}
$filename = Char::cyr2lat($filename);
$filename = strtr($filename, array(' ' => '_'));
return $this->entryId
. '-' . date('Ymd-His')
. '-' . $this->user->getUsername()
. '-' . File::cleanFileName($filename, false);
}
protected function buildContent() {
if ($this->viewList == 'listonly') {
return $this->makeWorkList();
}
$content = $this->makeUserGuideLink();
if ($this->subaction == 'edit'/* && $this->userCanAddEntry()*/) {
if ($this->entryId) {
$this->initData();
}
$content .= $this->makeForm();
} else {
$this->addRssLink();
$content .= $this->getInlineRssLink('workroom_rss') . $this->makeLists();
}
return $content;
}
protected function makeUserGuideLink() {
return '<div class="float-right"><a href="http://wiki.chitanka.info/Workroom" title="Наръчник за работното ателие"><span class="fa fa-info-circle"></span> Наръчник за работното ателие</a></div>';
}
protected function makeLists() {
$o = $this->makePageHelp()
. $this->makeSearchForm()
. '<div class="standalone">' . $this->makeNewEntryLink() . '</div>'
;
if ($this->viewList == 'work') {
$o .= $this->makeWorkList($this->llimit, $this->loffset);
} else {
$o .= $this->makeContribList();
}
return $o;
}
protected function makeSearchForm()
{
$id = $this->FF_LQUERY;
$action = $this->controller->generateUrl('workroom');
return <<<EOS
<form action="$action" method="get" class="form-inline standalone" role="form">
{$this->makeViewWorksLinks()}
<div class="form-group">
<label for="$id" class="sr-only">Търсене на: </label>
<div class="input-group">
<input type="text" class="form-control" title="Търсене из подготвяните произведения" maxlength="100" size="50" id="$id" name="$id">
<span class="input-group-btn">
<button class="btn btn-default" type="submit"><span class="fa fa-search"></span><span class="sr-only">Търсене</span></button>
</span>
</div>
</div>
</form>
EOS;
}
public function makeWorkList(
$limit = 0,
$offset = 0,
$order = null,
$showPageLinks = true,
$where = array())
{
$q = $this->makeSqlQuery($limit, $offset, $order, $where);
$l = $this->db->iterateOverResult($q, 'makeWorkListItem', $this, true);
if ( empty($l) ) {
return '<p class="standalone emptylist"><strong>Няма подготвящи се произведения.</strong></p>';
}
if ($showPageLinks) {
$params = array(
$this->FF_SUBACTION => $this->subaction
);
if ($this->searchQuery) $params[$this->FF_LQUERY] = $this->searchQuery;
if ($this->scanuser_view) $params['user'] = $this->scanuser_view;
$pagelinks = $showPageLinks ? $this->controller->renderView('LibBundle::pager.html.twig', array(
'pager' => new Pager(array(
'page' => $this->lpage,
'limit' => $this->llimit,
'total' => $this->db->getCount(self::DB_TABLE, $this->makeSqlWhere('', $where)),
)),
'current_route' => 'workroom',
'route_params' => $params,
)) : '';
} else {
$pagelinks = '';
}
$adminStatus = $this->userIsAdmin() ? '<th title="Администраторски статус"></th>' : '';
return <<<EOS
<table class="table table-striped table-condensed table-bordered">
<thead>
<tr>
<th>Дата</th>
$adminStatus
<th title="Тип на записа"></th>
<th title="Информация"></th>
<th title="Коментари към записа"></th>
<th title="Файл"></th>
<th style="width: 25%">Заглавие</th>
<th>Автор</th>
<th>Етап на работата</th>
<th>Потребител</th>
</tr>
</thead>
<tbody>
$l
</tbody>
</table>
$pagelinks
EOS;
}
public function makeSqlQuery(
$limit = 0, $offset = 0, $order = null, $where = array() )
{
$qa = array(
'SELECT' => 'w.*, DATE(date) ddate, u.username, u.email, u.allowemail, num_comments',
'FROM' => self::DB_TABLE. ' w',
'LEFT JOIN' => array(
DBT_USER .' u' => 'w.user_id = u.id',
'thread ct' => 'w.comment_thread_id = ct.id',
),
'WHERE' => $this->makeSqlWhere('w', $where),
'ORDER BY' => 'date DESC, w.id DESC',
'LIMIT' => array($offset, $limit)
);
return $this->db->extselectQ($qa);
}
public function makeSqlWhere($pref = '', $base = array()) {
$w = (array) $base;
if ( !empty($pref) ) $pref .= '.';
$showuser = 0;
if ($this->subaction == 'my') {
$showuser = $this->user->getId();
} else if ( ! empty($this->scanuser_view) ) {
$user = $this->findUser($this->scanuser_view);
$showuser = $user ? $user->getId() : null;
}
if ( ! empty($showuser) ) {
$entry_idQ = $this->db->selectQ(self::DB_TABLE2, array('user_id' => $showuser, 'deleted_at IS NULL'), 'entry_id');
$ors = array(
$pref.'user_id' => $showuser,
$pref.'id IN ('. $entry_idQ .')');
$w = array_merge($w, array($ors));
} else if ($this->subaction == 'waiting') {
$w = array('type' => 1, 'status' => self::MAX_SCAN_STATUS);
} else if ( strpos($this->subaction, 'st-') !== false ) {
$w = array('status' => str_replace('st-', '', $this->subaction));
} else if ( ! empty($this->searchQuery) ) {
$w[] = array(
$pref.'title' => array('LIKE', "%$this->searchQuery%"),
$pref.'author' => array('LIKE', "%$this->searchQuery%"),
);
}
$w[] = $pref.'deleted_at IS NULL';
return $w;
}
public function makeWorkListItem($dbrow, $astable = true) {
extract($dbrow);
$author = strtr($author, array(', '=>','));
$author = $this->makeAuthorLink($author);
$userlink = $this->makeUserLinkWithEmail($username, $email, $allowemail);
$info = $this->makeWorkEntryInfo($dbrow, isset($expandinfo) && $expandinfo);
$title = "<i>$title</i>";
$file = '';
if ( ! empty($tmpfiles) ) {
$file = $this->makeFileLink($tmpfiles);
} else if ( ! empty($uplfile) ) {
$file = $this->makeFileLink($uplfile);
}
$entryLink = $this->controller->generateUrl('workroom_entry_edit', array('id' => $id));
$commentsLink = $num_comments ? sprintf('<a href="%s#fos_comment_thread" title="Коментари"><span class="fa fa-comments-o"></span>%s</a>', $entryLink, $num_comments) : '';
$title = sprintf('<a href="%s" title="Към страницата за редактиране">%s</a>', $entryLink, $title);
$this->rowclass = $this->out->nextRowClass($this->rowclass);
$st = $progress > 0
? $this->makeProgressBar($progress)
: $this->makeStatus($status);
$extraclass = $this->user->getId() == $user_id ? ' hilite' : '';
if ($is_frozen) {
$sis_frozen = '<span title="Подготовката е замразена">(замразена)</span>';
$extraclass .= ' is_frozen';
} else {
$sis_frozen = '';
}
if ( $this->isMultiUser($type) ) {
$mdata = $this->getMultiEditData($id);
$musers = '';
foreach ($mdata as $muser => $data) {
$uinfo = $this->makeExtraInfo("$data[comment] ($data[progress]%)");
$ufile = empty( $data['uplfile'] )
? ''
: $this->makeFileLink($data['uplfile'], $data['username']);
if ($muser == $user_id) {
$userlink = "$userlink $uinfo $ufile";
continue;
}
$ulink = $this->makeUserLinkWithEmail($data['username'],
$data['email'], $data['allowemail']);
if ($data['is_frozen']) {
$ulink = "<span class='is_frozen'>$ulink</span>";
}
$musers .= "\n\t<li>$ulink $uinfo $ufile</li>";
$extraclass .= $this->user->getId() == $muser ? ' hilite' : '';
}
if ( !empty($mdata) ) {
$userlink = "<ul class='simplelist'>\n\t<li>$userlink</li>$musers</ul>";
if ( isset($showeditors) && $showeditors ) {
$userlink .= $this->makeEditorList($mdata);
}
} else if ( $status == self::MAX_SCAN_STATUS ) {
$userlink .= ' (<strong>очакват се коректори</strong>)';
}
}
$umarker = $this->_getUserTypeMarker($type);
$adminFields = $this->userIsAdmin() ? $this->makeAdminFieldsForTable($dbrow) : '';
if ($astable) {
return <<<EOS
<tr class="$this->rowclass$extraclass" id="e$id">
<td class="date" title="$date">$ddate</td>
$adminFields
<td>$umarker</td>
<td>$info</td>
<td>$commentsLink</td>
<td>$file</td>
<td>$title</td>
<td>$author</td>
<td style="min-width: 10em">$st $sis_frozen</td>
<td>$userlink</td>
</tr>
EOS;
}
$time = !isset($showtime) || $showtime ? "Дата: $date<br>" : '';
$titlev = !isset($showtitle) || $showtitle ? $title : '';
return <<<EOS
<p>$time
$info $titlev<br>
<strong>Автор:</strong> $author<br>
<strong>Етап:</strong> $st $sis_frozen<br>
Подготвя се от $userlink
</p>
EOS;
}
private function makeAdminFieldsForTable($dbrow)
{
if (empty($dbrow['admin_comment'])) {
return '<td></td>';
}
$comment = htmlspecialchars(nl2br($dbrow['admin_comment']));
$class = htmlspecialchars(str_replace(' ', '-', $dbrow['admin_status']));
return <<<HTML
<td>
<span class="popover-trigger workroom-$class" data-content="$comment">
<span>$dbrow[admin_status]</span>
</span>
</td>
HTML;
}
protected function _getUserTypeMarker($type)
{
return "<span class=\"{$this->tabImgs[$type]}\"><span class=\"sr-only\">{$this->tabImgAlts[$type]}</span></span>";
}
public function makeStatus($code) {
return "<span class='{$this->statusClasses[$code]}'></span> {$this->statuses[$code]}";
}
private function makeWorkEntryInfo($dbrow, $expand = false) {
$lines = array();
if ($dbrow['publisher']) {
$lines[] = '<b>Издател:</b> ' . $dbrow['publisher'];
}
if ($dbrow['pub_year']) {
$lines[] = '<b>Година:</b> ' . $dbrow['pub_year'];
}
$lines[] = $dbrow['comment'];
return $this->makeExtraInfo(implode("\n", $lines), $expand);
}
public function makeExtraInfo($info, $expand = false) {
$info = strtr(trim($info), array(
"\n" => '<br>',
"\r" => '',
));
if (empty($info) || $expand) {
return $info;
}
$info = String::myhtmlspecialchars($info);
return '<span class="popover-trigger" data-content="'.$info.'"><span class="fa fa-info-circle"></span><span class="sr-only">Инфо</span></span>';
}
public function makeProgressBar($progressInPerc) {
$perc = $progressInPerc .'%';
if ( !$this->showProgressbar ) {
return $perc;
}
return <<<HTML
<div class="progress">
<div class="progress-bar" role="progressbar" aria-valuenow="$progressInPerc" aria-valuemin="0" aria-valuemax="100" style="width: $progressInPerc%;">
<span>$progressInPerc%</span>
</div>
</div>
HTML;
}
protected function makeNewEntryLink() {
if ( !$this->userCanAddEntry() ) {
return '';
}
return sprintf('<a href="%s" class="btn btn-primary"><span class="fa fa-plus"></span> Добавяне на нов запис</a>',
$this->controller->generateUrl('workroom_entry_new'));
}
protected function makeViewWorksLinks() {
$links = array();
foreach ($this->viewTypes as $type => $title) {
$class = $this->subaction == $type ? 'selected' : '';
$links[] = sprintf('<li><a href="%s" class="%s" title="Преглед на произведенията по критерий „%s“">%s %s</a></li>',
$this->controller->generateUrl('workroom', array(
$this->FF_SUBACTION => $type
)),
$class, $title, "<span class='{$this->statusClasses[$type]}'></span>", $title);
}
$links[] = '<li role="presentation" class="divider"></li>';
foreach ($this->statuses as $code => $statusTitle) {
$type = "st-$code";
$class = $this->subaction == $type ? 'selected' : '';
$links[] = sprintf('<li><a href="%s" class="%s" title="Преглед на произведенията по критерий „%s“">%s %s</a></li>',
$this->controller->generateUrl('workroom', array(
$this->FF_SUBACTION => $type
)),
$class, $statusTitle, "<span class='{$this->statusClasses[$code]}'></span>", $statusTitle);
}
$links[] = '<li role="presentation" class="divider"></li>';
$links[] = sprintf('<li><a href="%s">Списък на помощниците</a></li>', $this->controller->generateUrl('workroom_contrib'));
return '<div class="btn-group">
<button type="button" class="btn btn-default dropdown-toggle" data-toggle="dropdown">Преглед <span class="caret"></span></button>
<ul class="dropdown-menu" role="menu">'. implode("\n", $links) .'</ul>
</div>';
}
protected function makeForm() {
$this->title .= ' — '.(empty($this->entryId) ? 'Добавяне' : 'Редактиране');
$helpTop = empty($this->entryId) ? $this->makeAddEntryHelp() : '';
$tabs = '';
foreach ($this->tabs as $type => $text) {
$text = "<span class='{$this->tabImgs[$type]}'></span> $text";
$class = '';
$url = '#';
if ($this->workType == $type) {
$class = 'active';
} else if ($this->thisUserCanDeleteEntry()) {
$route = 'workroom_entry_new';
$params = array('workType' => $type);
if ($this->entryId) {
$params['id'] = $this->entryId;
$route = 'workroom_entry_edit';
}
$url = $this->controller->generateUrl($route, $params);
}
$tabs .= "<li class='$class'><a href='$url'>$text</a></li>";
}
if ( $this->isSingleUser($this->workType) ) {
$editFields = $this->makeSingleUserEditFields();
$extra = '';
} else {
$editFields = $this->makeMultiUserEditFields();
#$extra = $this->isScanDone() ? $this->makeMultiEditInput() : '';
$extra = $this->makeMultiEditInput();
}
if ( $this->thisUserCanDeleteEntry() ) {
$title = $this->out->textField('title', '', $this->btitle, 50, 255, null, '', array('class' => 'form-control'));
$author = $this->out->textField('author', '', $this->author, 50, 255,
0, '<NAME> са няколко, ги разделете със запетаи', array('class' => 'form-control'));
$publisher = $this->out->textField('publisher', '', $this->publisher, 50, 255, 0, null, array('class' => 'form-control'));
$pubYear = $this->out->textField('pubYear', '', $this->pubYear, 50, 255, 0, null, array('class' => 'form-control'));
$comment = $this->out->textarea($this->FF_COMMENT, '', $this->comment, 10, 80, null, array('class' => 'form-control'));
$delete = empty($this->entryId) || !$this->userIsAdmin() ? ''
: '<div class="error" style="margin-bottom:1em">'.
$this->out->checkbox('delete', '', false, 'Изтриване на записа') .
' (напр., ако произведението вече е добавено в библиотеката)</div>';
$button = $this->makeSubmitButton();
if ($this->status == self::STATUS_7 && !$this->userCanSetStatus(self::STATUS_7)) {
$button = $delete = '';
}
} else {
$title = $this->btitle;
$author = $this->author;
$publisher = $this->publisher;
$pubYear = $this->pubYear;
$comment = $this->comment;
$button = $delete = '';
}
$alertIfDeleted = isset($this->entry) && $this->entry->isDeleted() ? '<div class="alert alert-danger">Този запис е изтрит.</div>' : '';
$helpBot = $this->isSingleUser($this->workType) ? $this->makeSingleUserHelp() : '';
$scanuser = $this->out->hiddenField('user', $this->scanuser);
$entry = $this->out->hiddenField('id', $this->entryId);
$workType = $this->out->hiddenField('workType', $this->workType);
$bypass = $this->out->hiddenField('bypass', $this->bypassExisting);
$action = $this->controller->generateUrl('workroom');
$this->addJs($this->createCommentsJavascript($this->entryId));
$corrections = $this->createCorrectionsView();
$adminFields = $this->userIsAdmin() ? $this->makeAdminOnlyFields() : '';
$user = $this->controller->getRepository('User')->find($this->scanuser);
$ulink = $this->makeUserLinkWithEmail($user->getUsername(),
$user->getEmail(), $user->getAllowemail());
return <<<EOS
$alertIfDeleted
$helpTop
<div style="clear:both"></div>
<ul class="nav nav-tabs">
$tabs
</ul>
<div class="tab-content">
<div class="tab-pane active">
<form action="$action" method="post" enctype="multipart/form-data" class="form-horizontal" role="form">
$scanuser
$entry
$workType
$bypass
<div class="form-group">
<label class="col-sm-2 control-label">Отговорник:</label>
<div class="col-sm-10">
<div class="form-control">
$ulink
</div>
</div>
</div>
<div class="form-group">
<label for="title" class="col-sm-2 control-label">Заглавие:</label>
<div class="col-sm-10">
$title
</div>
</div>
<div class="form-group">
<label for="author" class="col-sm-2 control-label">Автор:</label>
<div class="col-sm-10">
$author
</div>
</div>
<div class="form-group">
<label for="publisher" class="col-sm-2 control-label">Издател:</label>
<div class="col-sm-10">
$publisher
</div>
</div>
<div class="form-group">
<label for="pubYear" class="col-sm-2 control-label">Година на издаване:</label>
<div class="col-sm-10">
$pubYear
</div>
</div>
<div class="form-group">
<label for="$this->FF_COMMENT" class="col-sm-2 control-label">Коментар:</label>
<div class="col-sm-10">
$comment
</div>
</div>
$editFields
$adminFields
$delete
<div class="form-submit">$button</div>
</form>
$extra
</div>
</div>
$corrections
<div id="fos_comment_thread"></div>
<div id="helpBottom">
$helpBot
</div>
EOS;
}
private function createCorrectionsView() {
if (!$this->canShowCorrections()) {
return '';
}
// same domain as main site - for ajax
$newFile = str_replace('http://static.chitanka.info', '', $this->tmpfiles);
$dmpPath = $this->container->getParameter('assets_base_urls') . '/js/diff_match_patch.js';
return <<<CORRECTIONS
<fieldset>
<legend>Корекции</legend>
<button onclick="jQuery(this).hide(); showWorkroomDiff('#corrections')">Показване</button>
<pre id="corrections" style="display: none; white-space: pre-wrap; /* css-3 */ white-space: -moz-pre-wrap !important; /* Mozilla, since 1999 */ white-space: -pre-wrap; /* Opera 4-6 */ white-space: -o-pre-wrap; /* Opera 7 */ word-wrap: break-word; /* Internet Explorer 5.5+ */">
Зареждане...
</pre>
</fieldset>
<script src="$dmpPath"></script>
<script>
function showWorkroomDiff(target) {
function doDiff(currentContent, newContent) {
var dmp = new diff_match_patch();
var d = dmp.diff_main(currentContent, newContent);
dmp.diff_cleanupSemantic(d);
var ds = dmp.diff_prettyHtml(d);
var out = '';
var sl = ds.split('<br>');
var inIns = inDel = false;
var prevLine = 1;
for ( var i = 0, len = sl.length; i < len; i++ ) {
if ( sl[i].indexOf('<ins') != -1 ) inIns = true;
if ( sl[i].indexOf('<del') != -1 ) inDel = true;
if ( inIns || inDel ) {
var line = i+1;
if (prevLine < line-1) {
out += ' <span style="opacity: .1">[…]</span><br>';
}
out += '<span style="color: blue">' + line + ':</span> ' + sl[i] +'<br>';
prevLine = line;
}
if ( sl[i].indexOf('</ins>') != -1 ) inIns = false;
if ( sl[i].indexOf('</del>') != -1 ) inDel = false;
}
out = out.replace(/¶/g, '<span style="opacity:.1">¶</span>');
$(target).html(out);
}
$(target).show();
$.get('$newFile', function(newContent) {
// TODO find a better way to find the current text source
var m = newContent.match(/(http:\/\/chitanka.info\/(book|text)\/\d+)/);
if (m) {
var curContentUrl = m[1]+'.sfb';
$.get(curContentUrl, function(curContent){
doDiff(curContent, newContent);
});
} else {
$(target).text('Съдържанието на източника не беше открито.');
}
});
}
</script>
CORRECTIONS;
}
private function createCommentsJavascript($entry)
{
if (empty($entry)) {
return '';
}
$user = $this->controller->getRepository('User')->find($this->scanuser);
$threadUrl = $this->controller->generateUrl('fos_comment_post_threads');
$commentJs = $this->container->getParameter('assets_base_urls') . '/bundles/lib/js/comments.js';
return <<<JS
var fos_comment_thread_id = 'WorkEntry:$entry';
// api base url to use for initial requests
var fos_comment_thread_api_base_url = '$threadUrl';
// Snippet for asynchronously loading the comments
(function() {
var fos_comment_script = document.createElement('script');
fos_comment_script.async = true;
fos_comment_script.src = '$commentJs';
fos_comment_script.type = 'text/javascript';
(document.getElementsByTagName('head')[0] || document.getElementsByTagName('body')[0]).appendChild(fos_comment_script);
})();
$(document)
.on('fos_comment_before_load_thread', '#fos_comment_thread', function (event, data) {
setTimeout(function(){
$("#fos_comment_comment_cc").val("{$user->getUsername()}");
}, 2000);
})
.on('fos_comment_show_form', '#fos_comment_thread', function (data) {
var button = $(data.target);
button.next().find('input[name="fos_comment_comment[cc]"]').val(button.data("name"));
})
.on('fos_comment_submitting_form', '#fos_comment_thread', function (event, data) {
var form = $(event.target);
if (form.is(".loading")) {
event.preventDefault();
return;
}
form.addClass("loading").find(":submit").attr("disabled", true);
})
.on('fos_comment_submitted_form', '#fos_comment_thread', function (event, data) {
var form = $(event.target);
form.removeClass("loading").find(":submit").removeAttr("disabled");
})
;
JS;
}
private function canShowCorrections()
{
return strpos($this->btitle, '(корекция)') !== false
&& strpos($this->tmpfiles, 'chitanka.info') !== false
&& File::isSFB($this->absTmpDir.basename($this->tmpfiles));
}
protected function makeSubmitButton() {
$submit = $this->out->submitButton('Запис', '', null, true, array('class' => 'btn btn-primary'));
$cancel = sprintf('<a href="%s" title="Към основния списък">Отказ</a>', $this->controller->generateUrl('workroom'));
return $submit .'   '. $cancel;
}
protected function makeSingleUserEditFields() {
$status = $this->getStatusSelectField($this->status);
$progress = $this->out->textField('progress', '', $this->progress, 2, 3);
$is_frozen = $this->out->checkbox('is_frozen', '', $this->is_frozen,
'Подготовката е спряна за известно време');
$file = $this->out->fileField('file', '');
$maxFileSize = $this->out->makeMaxFileSizeField();
$maxUploadSizeInMiB = Legacy::getMaxUploadSizeInMiB();
$tmpfiles = $this->out->textField('tmpfiles', '', rawurldecode($this->tmpfiles), 50, 255)
. '   '.$this->out->label('Размер: ', 'tfsize') .
$this->out->textField('tfsize', '', $this->tfsize, 2, 4) .
'<abbr title="Мебибайта">MiB</abbr>';
$flink = $this->tmpfiles == self::DEF_TMPFILE ? ''
: $this->out->link( $this->makeTmpFilePath($this->tmpfiles), String::limitLength($this->tmpfiles)) .
($this->tfsize > 0 ? " ($this->tfsize MiB)" : '');
return <<<EOS
<div class="form-group">
<label for="entry_status" class="col-sm-2 control-label">Етап:</label>
<div class="col-sm-10">
<select name="entry_status" id="entry_status">$status</select>
  или  
$progress<label for="progress">%</label><br>
$is_frozen
</div>
</div>
<div class="form-group">
<label for="file" class="col-sm-2 control-label">Файл:</label>
<div class="col-sm-10">
<div>
$maxFileSize
$file (макс. $maxUploadSizeInMiB MiB)
</div>
<p>или</p>
<div>
$tmpfiles
<div>$flink</div>
</div>
</div>
</div>
EOS;
}
private function makeAdminOnlyFields()
{
if (empty($this->entry)) {
return '';
}
$status = $this->out->textField('admin_status', '', $this->entry->getAdminStatus(), 30, 255, null, '', array('class' => 'form-control'));
$comment = $this->out->textarea('admin_comment', '', $this->entry->getAdminComment(), 3, 80, null, array('class' => 'form-control'));
return <<<FIELDS
<div class="form-group">
<label for="admin_status" class="col-sm-2 control-label">Админ. статус:</label>
<div class="col-sm-10">
$status
</div>
</div>
<div class="form-group">
<label for="admin_comment" class="col-sm-2 control-label">Админ. коментар:</label>
<div class="col-sm-10">
$comment
</div>
</div>
FIELDS;
}
protected function getStatusSelectField($selected, $max = null)
{
$statuses = $this->statuses;
foreach (array(self::STATUS_6, self::STATUS_7) as $status) {
if ( ! $this->userCanSetStatus($status) ) {
unset( $statuses[$status] );
}
}
$field = '';
foreach ($statuses as $code => $text) {
if ( !is_null($max) && $code > $max) break;
$sel = $selected == $code ? ' selected="selected"' : '';
$field .= "<option value='$code'$sel>$text</option>";
}
return $field;
}
protected function makeMultiUserEditFields() {
return $this->makeMultiScanInput();
}
protected function makeMultiScanInput() {
$is_frozenLabel = 'Подготовката е спряна за известно време';
$cstatus = $this->status > self::MAX_SCAN_STATUS
? self::MAX_SCAN_STATUS
: $this->status;
if ( $this->thisUserCanDeleteEntry() ) {
if ( empty($this->multidata) || $this->userIsSupervisor() ) {
$status = $this->userIsSupervisor()
? $this->getStatusSelectField($this->status)
: $this->getStatusSelectField($cstatus, self::MAX_SCAN_STATUS);
$status = "<select name='entry_status' id='entry_status'>$status</select>";
$is_frozen = $this->out->checkbox('is_frozen', '', $this->is_frozen, $is_frozenLabel);
} else {
$status = $this->statuses[$cstatus]
. $this->out->hiddenField('entry_status', $this->status);
$is_frozen = '';
}
$tmpfiles = $this->out->textField('tmpfiles', '', rawurldecode($this->tmpfiles), 50, 255);
$tmpfiles .= '   '.$this->out->label('Размер: ', 'tfsize') .
$this->out->textField('tfsize', '', $this->tfsize, 2, 4) .
'<abbr title="Мебибайта">MiB</abbr>';
} else {
$status = $this->statuses[$cstatus];
$is_frozen = $this->is_frozen ? "($is_frozenLabel)" : '';
$tmpfiles = '';
}
$flink = $this->tmpfiles == self::DEF_TMPFILE ? ''
: $this->out->link( $this->makeTmpFilePath($this->tmpfiles), String::limitLength($this->tmpfiles)) .
($this->tfsize > 0 ? " ($this->tfsize MiB)" : '');
$file = $this->out->fileField('file', '');
$maxFileSize = $this->out->makeMaxFileSizeField();
$maxUploadSizeInMiB = Legacy::getMaxUploadSizeInMiB();
return <<<EOS
<div class="form-group">
<label for="entry_status" class="col-sm-2 control-label">Етап:</label>
<div class="col-sm-10">
$status
$is_frozen
</div>
</div>
<div class="form-group">
<label for="file" class="col-sm-2 control-label">Файл:</label>
<div class="col-sm-10">
<div>
$maxFileSize
$file (макс. $maxUploadSizeInMiB MiB)
</div>
<p>или</p>
<div>
$tmpfiles
<div>$flink</div>
</div>
</div>
</div>
EOS;
}
protected function makeMultiEditInput() {
$editorList = $this->makeEditorList();
$myContrib = $this->isMyContribAllowed() ? $this->makeMultiEditMyInput() : '';
return <<<EOS
<h3>Коригиране</h3>
$editorList
$myContrib
EOS;
}
protected function isMyContribAllowed() {
if ($this->userIsSupervisor()) {
return true;
}
if (in_array($this->status, array(self::STATUS_5, self::STATUS_6, self::STATUS_7))) {
return false;
}
if ($this->user->isAnonymous()) {
return false;
}
return true;
}
protected function makeMultiEditMyInput() {
$msg = '';
if ( empty($this->multidata[$this->user->getId()]) ) {
$comment = $progress = $uplfile = $filesize = '';
$is_frozen = false;
$msg = '<p>Вие също може да се включите в подготовката на текста.</p>';
} else {
extract( $this->multidata[$this->user->getId()] );
}
$ulink = $this->makeUserLink($this->user->getUsername());
$button = $this->makeSubmitButton();
$scanuser = $this->out->hiddenField('user', $this->scanuser);
$entry = $this->out->hiddenField('id', $this->entryId);
$workType = $this->out->hiddenField('workType', $this->workType);
$form = $this->out->hiddenField('form', 'edit');
$subaction = $this->out->hiddenField($this->FF_SUBACTION, $this->subaction);
$comment = $this->out->textarea($this->FF_EDIT_COMMENT, '', $comment, 10, 80, null, array('class' => 'form-control'));
$progress = $this->out->textField('progress', '', $progress, 2, 3, null, '', array('class' => 'form-control'));
$is_frozen = $this->out->checkbox('is_frozen', 'is_frozen_e', $this->is_frozen,
'Корекцията е спряна за известно време');
$file = $this->out->fileField('file', 'file2');
$readytogo = $this->userCanMarkAsReady()
? $this->out->checkbox('ready', 'ready', false, 'Готово е за добавяне')
: '';
$action = $this->controller->generateUrl('workroom');
$remoteFile = $this->out->textField('uplfile', 'uplfile2', rawurldecode($uplfile), 50, 255)
. '   '.$this->out->label('Размер: ', 'filesize2') .
$this->out->textField('filesize', 'filesize2', $filesize, 2, 4) .
'<abbr title="Мебибайта">MiB</abbr>';
return <<<EOS
<form action="$action" method="post" enctype="multipart/form-data" class="form-horizontal" role="form">
<fieldset>
<legend>Моят принос ($ulink)</legend>
$msg
$scanuser
$entry
$workType
$form
$subaction
<div class="form-group">
<label for="$this->FF_EDIT_COMMENT" class="col-sm-2 control-label">Коментар:</label>
<div class="col-sm-10">
$comment
</div>
</div>
<div class="form-group">
<label for="progress" class="col-sm-2 control-label">Напредък:</label>
<div class="col-sm-10">
<div class="input-group">
$progress
<span class="input-group-addon">%</span>
</div>
$is_frozen
</div>
</div>
<div class="form-group">
<label for="file2" class="col-sm-2 control-label">Файл:</label>
<div class="col-sm-10">
$file
</div>
</div>
<div class="form-group">
<label for="uplfile2" class="col-sm-2 control-label">Външен файл:</label>
<div class="col-sm-10">
$remoteFile
</div>
</div>
<div class="form-group">
<div class="col-sm-2"> </div>
<div class="col-sm-10">
$readytogo
</div>
</div>
<div class="form-submit">$button</div>
</fieldset>
</form>
EOS;
}
protected function makeEditorList($mdata = null) {
Legacy::fillOnEmpty($mdata, $this->multidata);
if ( empty($mdata) ) {
return '<p>Все още никой не се е включил в корекцията на текста.</p>';
}
$l = $class = '';
foreach ($mdata as $edata) {
extract($edata);
$class = $this->out->nextRowClass($class);
$ulink = $this->makeUserLinkWithEmail($username, $email, $allowemail);
$comment = strtr($comment, array("\n" => "<br>\n"));
if ( !empty($uplfile) ) {
$comment .= ' ' . $this->makeFileLink($uplfile, $username, $filesize);
}
$progressbar = $this->makeProgressBar($progress);
if ($is_frozen) {
$class .= ' is_frozen';
$progressbar .= ' (замразена)';
}
$deleteForm = $this->controller->renderView('LibBundle:Workroom:contrib_delete_form.html.twig', array('contrib' => array('id' => $edata['id'])));
$l .= <<<EOS
<tr class="$class deletable">
<td>$date</td>
<td>$ulink $deleteForm</td>
<td>$comment</td>
<td>$progressbar</td>
</tr>
EOS;
}
return <<<EOS
<table class="content">
<caption>Следните потребители обработват текста:</caption>
<thead>
<tr>
<th>Дата</th>
<th>Потребител</th>
<th>Коментар</th>
<th>Напредък</th>
</tr>
</thead>
<tbody>$l
</tbody>
</table>
EOS;
}
protected function makePageHelp() {
$regUrl = $this->controller->generateUrl('register');
$ext = $this->user->isAnonymous() ? "е необходимо първо да се <a href=\"$regUrl\">регистрирате</a> (не се притеснявайте, ще ви отнеме най-много 10–20 секунди, колкото и бавно да пишете). След това се върнете на тази страница и" : '';
$umarker = $this->_getUserTypeMarker(1);
return <<<EOS
<p>Тук може да разгледате списък на произведенията, които се подготвят за добавяне в библиотеката.</p>
<p>За да започнете подготовката на нов текст, $ext последвайте връзката „Добавяне на нов запис“. В случай че нямате възможност сами да сканирате текстове, може да се присъедините към коригирането на заглавията, отбелязани ето така: $umarker.</p>
<p>Бързината на добавянето на нови текстове в библиотеката зависи както от броя на грешките, останали след сканирането и разпознаването, така и от форма̀та на текста. Най-бързо ще бъдат добавяни отлично коригирани текстове, правилно преобразувани във <a href="http://wiki.chitanka.info/SFB">формат SFB</a>.</p>
<div class="alert alert-danger error newbooks-notice media" style="margin:1em 0">
<div class="pull-left">
<span class="fa fa-warning"></span>
</div>
<div class="media-body">
Разрешено е да се добавят само книги, издадени на български преди 2012 г. Изключение се прави за онези текстове, които са пратени от авторите си, както и за фен-преводи.
</div>
</div>
EOS;
}
protected function makeAddEntryHelp() {
$mainlink = $this->controller->generateUrl('workroom');
return <<<EOS
<p>Чрез долния формуляр може да добавите ново произведение към <a href="$mainlink">списъка с подготвяните</a>.</p>
<p>Имате възможност за избор между „{$this->tabs[0]}“ (сами ще обработите целия текст) или „{$this->tabs[1]}“ (вие ще сканирате текста, а други потребители ще имат възможността да се включат в коригирането му).</p>
<p>Въведете заглавието и автора и накрая посочете на какъв етап се намира подготовката. Ако още не сте започнали сканирането, изберете „{$this->statuses[self::STATUS_0]}“.</p>
<p>През следващите дни винаги може да промените етапа, на който се намира подготовката на произведението. За тази цел, в основния списък, заглавието ще представлява връзка към страницата за редактиране.</p>
EOS;
}
protected function makeSingleUserHelp() {
return <<<EOS
<p>На тази страница може да променяте данните за произведението.
Най-често ще се налага да обновявате етапа, на който се намира подготовката. Възможно е да посочите напредъка на подготовката и чрез процент, в случай че операциите сканиране, разпознаване и коригиране се извършват едновременно.</p>
<p>Ако подготовката на произведението е замразена, това може да се посочи, като се отметне полето „Подготовката е спряна за известно време“.</p>
EOS;
}
protected function makeContribList() {
$this->rownr = 0;
$this->rowclass = '';
$qa = array(
'SELECT' => 'ut.user_id, u.username, COUNT(ut.user_id) count, SUM(ut.size) size',
'FROM' => DBT_USER_TEXT .' ut',
'LEFT JOIN' => array(DBT_USER .' u' => 'ut.user_id = u.id'),
'GROUP BY' => 'ut.user_id',
'ORDER BY' => 'size DESC',
);
$q = $this->db->extselectQ($qa);
$list = $this->db->iterateOverResult($q, 'makeContribListItem', $this);
if ( empty($list) ) {
return '';
}
return <<<EOS
<table class="table table-striped table-condensed table-bordered" style="margin: 0 auto; max-width: 30em">
<caption>Следните потребители са сканирали или коригирали текстове за библиотеката:</caption>
<thead>
<tr>
<th>№</th>
<th>Потребител</th>
<th class="text-right" title="Размер на обработените произведения в мебибайта">Размер (в <abbr title="Кибибайта">KiB</abbr>)</th>
<th class="text-right" title="Брой на обработените произведения">Брой</th>
</tr>
</thead>
<tbody>$list
</tbody>
</table>
EOS;
}
public function makeContribListItem($dbrow) {
$this->rowclass = $this->out->nextRowClass($this->rowclass);
$ulink = $dbrow['user_id'] ? $this->makeUserLink($dbrow['username']) : $dbrow['username'];
$s = Number::formatNumber($dbrow['size'], 0);
$this->rownr += 1;
return <<<HTML
<tr class="$this->rowclass">
<td>$this->rownr</td>
<td>$ulink</td>
<td class="text-right">$s</td>
<td class="text-right">$dbrow[count]</td>
</tr>
HTML;
}
protected function initData() {
$entry = $this->repo()->find($this->entryId);
if ($entry == null) {
throw new NotFoundHttpException("Няма запис с номер $this->entryId.");
}
if ($entry->isDeleted() && !$this->userIsAdmin()) {
throw new NotFoundHttpException("Изтрит запис.");
}
$this->btitle = $entry->getTitle();
$this->author = $entry->getAuthor();
$this->publisher = $entry->getPublisher();
$this->pubYear = $entry->getPubYear();
$this->scanuser = $entry->getUser()->getId();
$this->comment = $entry->getComment();
$this->date = $entry->getDate()->format('Y-m-d');
$this->status = $entry->getStatus();
$this->progress = $entry->getProgress();
$this->is_frozen = $entry->getIsFrozen();
$this->tmpfiles = $entry->getTmpfiles();
$this->tfsize = $entry->getTfsize();
if ( !$this->thisUserCanDeleteEntry() || $this->request->value('workType', null, 3) === null ) {
$this->workType = $entry->getType();
}
$this->multidata = $this->getMultiEditData($entry->getId());
$this->entry = $entry;
}
public function getMultiEditData($mainId) {
$qa = array(
'SELECT' => 'm.*, DATE(m.date) date, u.username, u.email, u.allowemail',
'FROM' => self::DB_TABLE2 .' m',
'LEFT JOIN' => array(
DBT_USER .' u' => 'm.user_id = u.id',
),
'WHERE' => array('entry_id' => $mainId, 'deleted_at IS NULL'),
'ORDER BY' => 'm.date DESC',
);
$q = $this->db->extselectQ($qa);
$this->_medata = array();
$this->db->iterateOverResult($q, 'addMultiEditData', $this);
return $this->_medata;
}
public function addMultiEditData($dbrow) {
$this->_medata[$dbrow['user_id']] = $dbrow;
}
protected function isScanDone() {
return $this->status >= self::MAX_SCAN_STATUS;
}
protected function isEditDone() {
$key = array(
'entry_id' => $this->entryId,
'is_frozen' => false,
'progress < 100',
'deleted_at IS NULL',
);
return ! $this->db->exists(self::DB_TABLE2, $key);
}
public function isSingleUser($type = null) {
if ($type === null) $type = $this->workType;
return $type == 0;
}
public function isMultiUser($type = null) {
if ($type === null) $type = $this->workType;
return $type == 1;
}
public function thisUserCanEditEntry($entry, $type) {
if ($this->user->isAnonymous()) {
return false;
}
if ($this->userIsSupervisor() || $type == 1) return true;
$key = array('id' => $entry, 'user_id' => $this->user->getId());
return $this->db->exists(self::DB_TABLE, $key);
}
public function userCanEditEntry($user, $type = 0) {
if ($user->isAnonymous()) {
return false;
}
return $this->userIsSupervisor()
|| $user == $this->user->getId()
|| ($type == 1 && $this->userCanAddEntry());
}
public function thisUserCanDeleteEntry() {
if ($this->userIsSupervisor() || empty($this->entryId)) return true;
if ( isset($this->_tucde) ) return $this->_tucde;
$key = array('id' => $this->entryId, 'user_id' => $this->user->getId());
return $this->_tucde = $this->db->exists(self::DB_TABLE, $key);
}
public function userCanDeleteEntry($user) {
return $this->user->inGroup('workroom-admin', 'workroom-supervisor') || $user == $this->scanuser;
}
public function userCanAddEntry() {
return $this->user->isAuthenticated() && $this->user->allowsEmail();
}
public function userCanMarkAsReady()
{
return $this->userIsAdmin();
}
public function isReady()
{
return $this->userCanMarkAsReady() && $this->request->checkbox('ready');
}
private function userIsAdmin()
{
return $this->user->inGroup('workroom-admin');
}
private function userIsSupervisor()
{
return $this->user->inGroup(array('workroom-admin', 'workroom-supervisor'));
}
private function userCanSetStatus($status) {
switch ($status) {
case self::STATUS_7:
return $this->user->inGroup('workroom-admin');
case self::STATUS_6:
return $this->user->inGroup(array('workroom-admin', 'workroom-supervisor'));
default:
return $this->user->isAuthenticated();
}
}
protected function informScanUser($entry) {
$res = $this->db->select(self::DB_TABLE, array('id' => $entry));
extract( $this->db->fetchAssoc($res) );
$sel = array('realname', 'email');
$res = $this->db->select(DBT_USER, array('id' => $user_id), $sel);
extract( $this->db->fetchAssoc($res) );
if ( empty($email) ) {
return true;
}
$editLink = $this->controller->generateUrl('workroom_entry_edit', array('id' => $entry));
$mailpage = Setup::getPage('Mail', $this->controller, $this->container, false);
$msg = <<<EOS
Нов потребител се присъедини към подготовката на „{$title}“ от $author.
$editLink
Моята библиотека
EOS;
$fields = array(
'mailToName' => $realname,
'mailToEmail' => $email,
'mailSubject' => "$this->sitename: Нов коректор на ваш текст",
'mailMessage' => $msg);
$mailpage->setFields($fields);
return $mailpage->execute();
}
protected function escapeBlackListedExt($filename) {
if ( ! File::hasValidExtension($filename, $this->fileWhiteList)) {
$filename .= '.txt';
}
// remove leading dots
$filename = ltrim($filename, '.');
return $filename;
}
protected function makeTmpFilePath($file = '') {
if (preg_match('|https?://|', $file)) {
return $file;
}
return Setup::setting('workroom_root').'/'.$this->tmpDir . $file;
}
protected function makeFileLink($file, $username = '', $filesize = null)
{
$title = empty($username)
? $file
: "Качен файл от $username — $file";
if ($filesize) {
$title .= " ($filesize MiB)";
}
return $this->out->link_raw(
$this->makeTmpFilePath($file),
'<span class="fa fa-save"></span><span class="sr-only">Файл</span>',
$title);
}
static public function rawurlencode($file)
{
return strtr(rawurlencode($file), array(
'%2F' => '/',
'%3A' => ':',
));
}
protected function deleteEntryFiles($entry)
{
$files = $this->absTmpDir . "$entry-*";
$delDir = $this->absTmpDir . 'deleted';
`mv $files $delDir`;
}
public function pretifyComment($text)
{
return String::my_replace($text);
}
/** @return WorkEntryRepository */
private function repo() {
return $this->controller->getRepository('WorkEntry');
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Legacy;
class mlDatabase {
/** Database server name */
protected $server;
/** Database user name */
protected $user;
/** Database user password */
protected $pass;
/** Database name */
protected $dbName;
protected $prefix = '';
protected $charset = 'utf8';
protected $collationConn = 'utf8_general_ci';
/**
Connection to the database
@var resource
*/
protected $conn = NULL;
protected $doLog = true;
protected $errno;
protected $slave = null;
protected $master = null;
public function __construct($server, $user, $pass, $dbName) {
$this->server = $server;
$this->user = $user;
$this->pass = $<PASSWORD>;
$this->dbName = $dbName;
$date = date('Y-m-d');
$this->logFile = dirname(__FILE__)."/../../../../app/logs/db-$date.sql";
$this->errLogFile = dirname(__FILE__)."/../../../../app/logs/db-error-$date";
}
public function setSlave($server, $user, $pass, $name) {
$this->slave = new mlDatabase($server, $user, $pass, $name);
}
public function setMaster($server, $user, $pass, $name) {
$this->master = new mlDatabase($server, $user, $pass, $name);
$this->master->disableLogging();
}
public function exists($table, $keys = array()) {
return $this->getCount($table, $keys) > 0;
}
public function getObjects($table, $dbkey = array(), $kfield = null) {
fillOnNull($kfield, 'id');
$res = $this->select($table, $dbkey);
$objs = array();
while ( $row = mysql_fetch_assoc($res) ) {
$objs[ $row[$kfield] ] = $row;
}
return $objs;
}
public function getNames($table, $dbkey = array(), $nfield = null, $kfield = null) {
fillOnNull($nfield, 'name');
fillOnNull($kfield, 'id');
$sel = array($kfield, $nfield);
$res = $this->select($table, $dbkey, $sel, $nfield);
$objs = array();
while ( $row = mysql_fetch_row($res) ) {
$objs[ $row[0] ] = $row[1];
}
return $objs;
}
public function getFields($table, $dbkey, $fields) {
$res = $this->select($table, $dbkey, $fields);
if ( $this->numRows($res) == 0 ) {
return null;
}
$row = $this->fetchRow($res);
return count($row) > 1 ? $row : $row[0];
}
public function getFieldsMulti($table, $dbkey, $fields) {
$res = $this->select($table, $dbkey, $fields);
$data = array();
while ( $row = $this->fetchRow($res) ) {
$data[] = count($row) > 1 ? $row : $row[0];
}
return $data;
}
public function getRandomRow($table) {
$res = $this->select($table, array(), array('MIN(id)', 'MAX(id)'));
list($min, $max) = $this->fetchRow($res);
do {
$res = $this->select($table, array('id' => rand($min, $max)));
$row = $this->fetchAssoc($res);
if ( !empty($row) ) return $row;
} while (true);
}
public function getCount($table, $keys = array()) {
$res = $this->select($table, $keys, 'COUNT(*)');
list($count) = mysql_fetch_row($res);
return (int) $count;
}
public function iterateOverResult($query, $func, $obj = null,
$buffered = false) {
$result = $this->query($query, $buffered);
$out = '';
if ($result) {
while ( $row = mysql_fetch_assoc($result) ) {
$out .= is_null($obj) ? $func($row) : $obj->$func($row);
}
$this->freeResult($result);
}
return $out;
}
public function select($table, $keys = array(), $fields = array(),
$orderby = '', $offset = 0, $limit = 0, $groupby = '') {
$q = $this->selectQ($table, $keys, $fields, $orderby, $offset, $limit);
return $this->query($q);
}
public function selectQ($table, $keys = array(), $fields = array(),
$orderby = '', $offset = 0, $limit = 0, $groupby = '') {
settype($fields, 'array');
$sel = empty($fields) ? '*' : implode(', ', $fields);
$sorder = empty($orderby) ? '' : ' ORDER BY '.$orderby;
$sgroup = empty($groupby) ? '' : ' GROUP BY '.$groupby;
$slimit = $limit > 0 ? " LIMIT $offset, $limit" : '';
return "SELECT $sel FROM $table".$this->makeWhereClause($keys).
$sgroup . $sorder . $slimit;
}
public function extselect($qparts) {
return $this->query( $this->extselectQ($qparts) );
}
/**
Build an SQL SELECT statement with LEFT JOIN clause(s) from an array
(Idea from phpBB).
@param $qparts Associative array with following possible keys:
SELECT, FROM, LEFT JOIN, WHERE, GROUP BY, ORDER BY, LIMIT
*/
public function extselectQ($qparts, $distinct = false) {
$qd = $distinct ? ' DISTINCT' : '';
$q = "SELECT$qd $qparts[SELECT] FROM $qparts[FROM]";
if ( isset($qparts['LEFT JOIN']) ) {
foreach ($qparts['LEFT JOIN'] as $table => $onrule) {
$q .= " LEFT JOIN $table ON ($onrule)";
}
}
if ( isset($qparts['WHERE']) ) {
$q .= $this->makeWhereClause($qparts['WHERE']);
}
foreach ( array('GROUP BY', 'ORDER BY') as $key ) {
if ( isset($qparts[$key]) ) {
$q .= " $key $qparts[$key]";
}
}
if ( isset($qparts['LIMIT']) ) {
if ( is_array($qparts['LIMIT']) ) {
list($offset, $limit) = $qparts['LIMIT'];
} else {
$offset = 0;
$limit = (int) $qparts['LIMIT'];
}
$q .= $limit > 0 ? " LIMIT $offset, $limit" : '';
}
return $q;
}
public function insert($table, $data, $ignore = false, $putId = true) {
return $this->query($this->insertQ($table, $data, $ignore, $putId));
}
public function insertQ($table, $data, $ignore = false, $putId = true) {
if ( empty($data) ) {
return '';
}
if ($putId && ! array_key_exists('id', $data) && ($id = $this->autoIncrementId($table)) ) {
$data['id'] = $id;
}
$signore = $ignore ? ' IGNORE' : '';
return "INSERT$signore INTO $table". $this->makeSetClause($data);
}
public function multiinsert($table, $data, $fields, $ignore = false) {
return $this->query($this->multiinsertQ($table, $data, $fields, $ignore));
}
public function multiinsertQ($table, $data, $fields, $ignore = false) {
if ( empty($data) || empty($fields) ) {
return '';
}
$vals = ' (`'. implode('`, `', $fields) .'`) VALUES';
$fcnt = count($fields);
foreach ($data as $rdata) {
$vals .= ' (';
for ($i=0; $i < $fcnt; $i++) {
$val = isset($rdata[$i]) ? $this->normalizeValue($rdata[$i]) : "''";
$vals .= $val .', ';
}
$vals = rtrim($vals, ' ,') .'),';
}
$signore = $ignore ? ' IGNORE' : '';
return "INSERT$signore INTO $table". rtrim($vals, ',');
}
public function update($table, $data, $keys) {
return $this->query( $this->updateQ($table, $data, $keys) );
}
public function updateQ($table, $data, $keys) {
if ( empty($data) ) { return ''; }
if ( empty($keys) ) { return $this->insertQ($table, $data, true); }
if ( !is_array($keys) ) {
$keys = array('id' => $keys);
}
return 'UPDATE '. $table . $this->makeSetClause($data) .
$this->makeWhereClause($keys);
}
public function replace($table, $data) {
return $this->query( $this->replaceQ($table, $data) );
}
public function replaceQ($table, $data) {
if ( empty($data) ) { return ''; }
return 'REPLACE '.$table.$this->makeSetClause($data);
}
public function delete($table, $keys, $limit = 0) {
return $this->query( $this->deleteQ($table, $keys, $limit) );
}
public function deleteQ($table, $keys, $limit = 0) {
if ( empty($keys) ) { return ''; }
if ( !is_array($keys) ) $keys = array('id' => $keys);
$q = 'DELETE FROM '. $table . $this->makeWhereClause($keys);
if ( !empty($limit) ) $q .= " LIMIT $limit";
return $q;
}
public function makeSetClause($data, $putKeyword = true) {
if ( empty($data) ) { return ''; }
$keyword = $putKeyword ? ' SET ' : '';
$cl = array();
foreach ($data as $field => $value) {
if ($value === null) {
continue;
}
if ( is_numeric($field) ) { // take the value as is
$cl[] = $value;
} else {
$cl[] = "`$field` = ". $this->normalizeValue($value);
}
}
return $keyword . implode(', ', $cl);
}
/**
@param $keys Array with mixed keys (associative and numeric).
By numeric key take the value as is if the value is a string, or send it
recursive to makeWhereClause() with OR-joining if the value is an array.
By string key use “=” for compare relation if the value is string;
if the value is an array, use the first element as a relation and the
second as comparison value.
An example follows:
$keys = array(
'k1 <> 1', // numeric key, string value
array('k2' => 2, 'k3' => 3), // numeric key, array value
'k4' => 4, // string key, scalar value
'k5' => array('>=', 5), // string key, array value (rel, val)
)
@param $join How to join the elements from $keys
@param $putKeyword Should the keyword “WHERE” precede the clause
*/
public function makeWhereClause($keys, $join = 'AND', $putKeyword = true) {
if ( empty($keys) ) {
return $putKeyword ? ' WHERE 1' : '';
}
$cl = $putKeyword ? ' WHERE ' : '';
$whs = array();
foreach ($keys as $field => $rawval) {
if ( is_numeric($field) ) { // take the value as is
$field = $rel = '';
if ( is_array($rawval) ) {
$njoin = $join == 'AND' ? 'OR' : 'AND';
$val = '('.$this->makeWhereClause($rawval, $njoin, false).')';
} else {
$val = $rawval;
}
} else {
if ( is_array($rawval) ) {
list($rel, $val) = $rawval;
if (($rel == 'IN' || $rel == 'NOT IN') && is_array($val)) {
// set relation — build an SQL set
$cb = array($this, 'normalizeValue');
$val = '('. implode(', ', array_map($cb, $val)) .')';
} else {
$val = $this->normalizeValue($val);
}
} else {
$rel = '='; // default relation
$val = $this->normalizeValue($rawval);
}
}
$whs[] = "$field $rel $val";
}
$cl .= '('. implode(") $join (", $whs) . ')';
return $cl;
}
public function normalizeValue($value) {
/*if ( is_null($value) ) {
return 'NULL';
} else */
if ( is_bool($value) ) {
$value = $value ? 1 : 0;
} else if ($value instanceof \DateTime) {
$value = $value->format('Y-m-d H:i:s');
} else {
$value = $this->escape($value);
}
return '\''. $value .'\'';
}
public function setPrefix($prefix) { $this->prefix = $prefix; }
public function escape($string) {
if ($this->slave) {
return $this->slave->escape($string);
}
if ( !isset($this->conn) ) { $this->connect(); }
return mysql_real_escape_string($string, $this->conn);
}
/**
Send a query to the database.
@param string $query
@param bool $useBuffer Use buffered or unbuffered query
@return resource, or false by failure
*/
public function query($query, $useBuffer = true) {
if ( empty($query) ) {
return true;
}
if ( $this->slave && ! self::isWriteQuery($query) ) {
return $this->slave->query($query, $useBuffer);
}
if ( !isset($this->conn) ) { $this->connect(); }
$res = $useBuffer
? mysql_query($query, $this->conn)
: mysql_unbuffered_query($query, $this->conn);
if ( !$res ) {
$this->errno = mysql_errno();
$this->error = mysql_error();
$this->log("Error $this->errno: $this->error\nQuery: $query\n"
/*."Backtrace\n". print_r(debug_backtrace(), true)*/, true);
return false;
}
if ( self::isWriteQuery($query) ) {
$u = isset($GLOBALS['user']) ? $GLOBALS['user']->id : 0;
$this->log("/*U=$u*/ $query;", false);
if ($this->master) {
$this->master->query($query);
}
}
return $res;
}
public function transaction($queries) {
$res = array();
$this->query('START TRANSACTION');
foreach ( (array) $queries as $query) {
$lres = $this->query($query);
if ($lres === false) {
return false;
}
$res[] = $lres;
}
$this->query('COMMIT');
return $res;
}
static public function isWriteQuery($query)
{
return preg_match('/UPDATE|INSERT|REPLACE|DELETE|START|COMMIT|ALTER/', $query);
}
/** @return array Associative array */
public function fetchAssoc($result) {
return mysql_fetch_assoc($result);
}
/** @return array */
public function fetchRow($result) {
return mysql_fetch_row($result);
}
/** @return integer */
public function numRows($result) {
return mysql_num_rows($result);
}
/** @return integer */
public function affectedRows() {
return mysql_affected_rows($this->conn);
}
public function freeResult($result) {
return mysql_free_result($result);
}
/**
Return next auto increment for a table
@param string $tableName
@return integer
*/
public function autoIncrementId($tableName) {
$res = $this->query('SHOW TABLE STATUS LIKE "'.$tableName.'"');
$row = mysql_fetch_assoc($res);
return $row['Auto_increment'];
}
protected function connect() {
$this->conn = mysql_connect($this->server, $this->user, $this->pass, true)
or $this->mydie("Проблем: Няма връзка с базата. Изчакайте пет минути и опитайте отново да заредите страницата.");
mysql_select_db($this->dbName, $this->conn)
or $this->mydie("Could not select database $this->dbName.");
mysql_query("SET NAMES '$this->charset' COLLATE '$this->collationConn'", $this->conn)
or $this->mydie("Could not set names to '$this->charset':");
}
protected function mydie($msg) {
header('Content-Type: text/plain; charset=UTF-8');
header('HTTP/1.1 503 Service Temporarily Unavailable');
die($msg .' '. mysql_error());
}
public function enableLogging()
{
$this->doLog = true;
}
public function disableLogging()
{
$this->doLog = false;
}
protected function log($msg, $isError = true)
{
if ($this->doLog) {
file_put_contents($isError ? $this->errLogFile : $this->logFile,
'/*'.date('Y-m-d H:i:s').'*/ '. $msg."\n", FILE_APPEND);
}
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Service;
use Chitanka\LibBundle\Entity\Text;
use Chitanka\LibBundle\Entity\User;
use Chitanka\LibBundle\Legacy\mlDatabase as LegacyDb;
class TextService {
private $legacyDb;
public function __construct(LegacyDb $db) {
$this->legacyDb = $db;
}
/**
* Get similar texts based ot readers count.
* @param Text $text
* @param int $limit Return up to this limit number of texts
* @param User $reader Do not return texts marked as read by this reader
*/
public function findTextAlikes(Text $text, $limit = 10, User $reader = null) {
$alikes = array();
$qa = array(
'SELECT' => 'text_id, count(*) readers',
'FROM' => DBT_READER_OF .' r',
'WHERE' => array(
'r.text_id' => array('<>', $text->getId()),
'r.user_id IN ('
. $this->legacyDb->selectQ(DBT_READER_OF, array('text_id' => $text->getId()), 'user_id')
. ')',
),
'GROUP BY' => 'r.text_id',
'ORDER BY' => 'readers DESC',
);
if ( is_object($reader) ) {
$qa['WHERE'][] = 'text_id NOT IN ('
. $this->legacyDb->selectQ(DBT_READER_OF, array('user_id' => $reader->getId()), 'text_id')
. ')';
}
$res = $this->legacyDb->extselect($qa);
$alikes = $textsInQueue = array();
$lastReaders = 0;
$count = 0;
while ( $row = $this->legacyDb->fetchAssoc($res) ) {
$count++;
if ( $lastReaders > $row['readers'] ) {
if ( $count > $limit ) {
break;
}
$alikes = array_merge($alikes, $textsInQueue);
$textsInQueue = array();
}
$textsInQueue[] = $row['text_id'];
$lastReaders = $row['readers'];
}
if ( $count > $limit ) {
$alikes = array_merge($alikes, $this->filterSimilarByLabel($text, $textsInQueue, $limit - count($alikes)));
}
// if ( empty($texts) ) {
// $texts = $this->getSimilarByLabel($text, $limit, $reader);
// }
return $alikes;
}
/**
* Get similar texts based ot readers count.
* @param Text $text
* @param int $limit Return up to this limit number of texts
* @param User $reader Do not return texts marked as read by this reader
*/
private function getSimilarByLabel(Text $text, $limit = 10, User $reader = null) {
$qa = array(
'SELECT' => 'text_id',
'FROM' => DBT_TEXT_LABEL,
'WHERE' => array(
'text_id' => array('<>', $text->getId()),
'label_id IN ('
. $this->legacyDb->selectQ(DBT_TEXT_LABEL, array('text_id' => $text->getId()), 'label_id')
. ')',
),
'GROUP BY' => 'text_id',
'ORDER BY' => 'COUNT(text_id) DESC',
'LIMIT' => $limit,
);
if ( $reader ) {
$qa['WHERE'][] = 'text_id NOT IN ('
. $this->legacyDb->selectQ(DBT_READER_OF, array('user_id' => $reader->getId()), 'text_id')
. ')';
}
$res = $this->legacyDb->extselect($qa);
$texts = array();
while ($row = $this->legacyDb->fetchRow($res)) {
$texts[] = $row[0];
}
return $texts;
}
private function filterSimilarByLabel(Text $text, $texts, $limit) {
$qa = array(
'SELECT' => 'text_id',
'FROM' => DBT_TEXT_LABEL,
'WHERE' => array(
'text_id' => array('IN', $texts),
'label_id IN ('
. $this->legacyDb->selectQ(DBT_TEXT_LABEL, array('text_id' => $text->getId()), 'label_id')
. ')',
),
'GROUP BY' => 'text_id',
'ORDER BY' => 'COUNT(text_id) DESC',
'LIMIT' => $limit,
);
$res = $this->legacyDb->extselect($qa);
$texts = array();
while ($row = $this->legacyDb->fetchRow($res)) {
$texts[] = $row[0];
}
return $texts;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Legacy;
use Chitanka\LibBundle\Util\Ary;
class Request {
const
STANDARD_PORT = 80,
PARAM_SEPARATOR = '=',
ONEDAYSECS = 86400; // 60*60*24 - number of seconds in a day
protected
$cookiePath = '/',
$bots = array('bot', 'search', 'crawl', 'spider', 'fetch', 'reader',
'subscriber', 'google', 'rss'),
// hash of the request
$hash;
public function __construct() {
/** Timelife for cookies */
$this->cookieExp = time() + self::ONEDAYSECS * 30; // 30 days
$this->ua = strtolower(@$_SERVER['HTTP_USER_AGENT']);
}
public function action() {
return $this->action;
}
/**
Fetch a field value from the request.
Return default value if $name isn’t set in the request, or if $allowed
is an array and does not contain $name as a key.
@param $name
@param $default
@param $paramno
@param $allowed Associative array
*/
public function value($name, $default = null, $paramno = null, $allowed = null) {
if ( isset($_REQUEST[$name]) ) {
$val = $_REQUEST[$name];
} else if ( is_null($paramno) ) {
return $default;
} else if ( isset($this->params[$paramno]) ) {
$val = $_REQUEST[$name] = $_GET[$name] = $this->params[$paramno];
} else {
return $default;
}
return is_array($allowed) ? Ary::normKey($val, $allowed, $default) : $val;
}
public function setValue($name, $value) {
$_REQUEST[$name] = $_GET[$name] = $value;
}
public function checkbox($name, $dims = null) {
if ( !isset($_REQUEST[$name]) ) {
return false;
}
$val = $_REQUEST[$name];
if ( is_array($dims) && !empty($dims) ) {
foreach ($dims as $dim) { $val = $val[$dim]; }
}
return $val == 'on';
}
/** @return bool */
public function wasPosted() {
return @$_SERVER['REQUEST_METHOD'] == 'POST';
}
public function isBotRequest() {
foreach ($this->bots as $bot) {
if ( strpos($this->ua, $bot) !== false ) {
return true;
}
}
return false;
}
/**
Tests whether a given set of parameters corresponds to the GET request.
@param $reqData Associative array
@return bool
*/
public function isCurrentRequest($reqData) {
if ( !is_array($reqData) ||
count(array_diff_assoc($_GET, $reqData)) > 0 ||
count(array_diff_assoc($reqData, $_GET)) > 0 ) {
return false;
}
foreach ($_GET as $param => $val) {
if ($reqData[$param] != $val) {
return false;
}
}
return true;
}
public function referer() {
return isset($_SERVER['HTTP_REFERER']) ? $_SERVER['HTTP_REFERER'] : '';
}
public function serverPlain() {
return $_SERVER['SERVER_NAME'];
}
public function server() {
$s = @$_SERVER['HTTPS'] != 'off' ? 'http' : 'https';
$s .= '://' . $_SERVER['SERVER_NAME'];
if ( $_SERVER['SERVER_PORT'] != self::STANDARD_PORT ) {
$s .= ':' . $_SERVER['SERVER_PORT'];
}
return $s;
}
public function requestUri( $absolute = false ) {
$uri = $absolute ? $this->server() : '';
$uri .= $_SERVER['REQUEST_URI'];
return $uri;
}
/**
*/
public function fileName($name) {
if( !isset( $_FILES[$name] ) ) { return null; }
return $_FILES[$name]['name'];
}
/**
*/
public function fileTempName($name) {
if ( !isset($_FILES[$name]) ) { return null; }
if ( $_FILES[$name]['error'] !== 0 ) { return false; }
return $_FILES[$name]['tmp_name'];
}
public function getParams()
{
return $this->params;
}
public function hash() {
if ( empty($this->hash) ) {
ksort($_GET);
$this->hash = md5( serialize($_GET + $this->params) );
}
return $this->hash;
}
public function setCookie($name, $value, $expire = null, $multiDomain = true) {
if (is_null($expire)) $expire = $this->cookieExp;
//setcookie($name, $value, $expire, $this->cookiePath);
if ($multiDomain) {
setcookie($name, $value, $expire, $this->cookiePath, '.'.$this->serverPlain(), false, true);
}
}
public function deleteCookie($name, $multiDomain = true) {
setcookie($name, '', time() - 86400, $this->cookiePath);
if ($multiDomain) {
setcookie($name, '', time() - 86400, $this->cookiePath, '.'.$this->serverPlain());
}
}
public function makeInputFieldsForGetVars($exclude = array()) {
$c = '';
foreach ($_GET as $name => $value) {
if ( in_array($name, $exclude) || is_numeric($name) ) {
continue;
}
$c .= "<input type='hidden' name='$name' value='$value' />\n";
}
return $c;
}
static public function isXhr()
{
return isset($_SERVER['HTTP_X_REQUESTED_WITH'])
&& $_SERVER['HTTP_X_REQUESTED_WITH'] == 'XMLHttpRequest';
}
static public function isAjax()
{
return self::isXhr();
}
public function isMSIE() {
return strpos($this->ua, 'msie') !== false;
}
public function isCompleteSubmission() {
return $this->value('submitButton') !== null;
}
}
<file_sep><?php
class SqlImporter {
private $db;
public function __construct($dsn, $dbuser, $dbpassword) {
$this->db = new PDO($dsn, $dbuser, $dbpassword);
}
public function importFile($sqlFile) {
$this->db->exec('SET FOREIGN_KEY_CHECKS=0');
$this->db->exec('SET NAMES utf8');
$sqlProc = new SqlFileProcessor($sqlFile);
$db = $this->db;
$sqlProc->walkThruQueries(function($query) use ($db) {
echo substr($query, 0, 80), "\n";
$result = $db->exec($query);
if ($result === false) {
error_log("Error by $query");
error_log(print_r($db->errorInfo(), true));
}
});
$this->db->exec('SET FOREIGN_KEY_CHECKS=1');
}
}
class SqlFileProcessor {
private $filename;
public function __construct($filename) {
$this->filename = $filename;
}
public function walkThruQueries($callback = null) {
$reader = new FileLineReader($this->filename);
$queries = array();
if ($callback === null) {
$callback = function($query) use ($queries) {
$queries[] = $query;
};
}
$queryBuf = '';
while ($reader->hasMore()) {
$line = $reader->readLine();
if (empty($line) || $this->isComment($line) || $this->isInternMysqlQuery($line)) {
continue;
}
$queryBuf .= $line;
if (substr($queryBuf, -1, 1) == ';') {
$callback($queryBuf);
$queryBuf = '';
}
}
if ($queryBuf) {
$callback($queryBuf);
}
$reader->close();
return $queries;
}
private function isComment($line) {
return strpos($line, '--') === 0;
}
private function isInternMysqlQuery($line) {
return strpos($line, '/*') === 0;
}
}
class FileLineReader {
private $isGzipped;
private $filename;
private $handle;
public function __construct($filename) {
if (!file_exists($filename)) {
throw new \Exception("File '$filename' does not exist.");
}
if (!is_readable($filename)) {
throw new \Exception("File '$filename' is not readable.");
}
$this->isGzipped = preg_match('/\.gz$/', $filename);
$this->filename = $filename;
$this->handle = $this->open();
}
public function __destruct() {
if ($this->handle) {
$this->close();
}
}
public function open() {
$handle = $this->isGzipped ? gzopen($this->filename, 'r') : fopen($this->filename, 'r');
if (!$handle) {
throw new \Exception("File '$this->filename' could not be opened for reading.");
}
return $this->handle = $handle;
}
public function eof() {
return $this->isGzipped ? gzeof($this->handle) : feof($this->handle);
}
public function hasMore() {
return !$this->eof();
}
public function readLine($trim = true) {
$line = $this->gets();
return $trim ? rtrim($line) : $line;
}
public function gets() {
return $this->isGzipped ? gzgets($this->handle) : fgets($this->handle);
}
public function close() {
$this->isGzipped ? gzclose($this->handle) : fclose($this->handle);
$this->handle = null;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Doctrine;
use Doctrine\ORM\EntityManager;
use Doctrine\ORM\Id\AbstractIdGenerator;
use Doctrine\ORM\Mapping\ClassMetadata;
use Chitanka\LibBundle\Entity\NextIdRepository;
class CustomIdGenerator extends AbstractIdGenerator
{
public function generate(EntityManager $em, $entity)
{
return $this->createNextIdRepository($em)->selectNextId($entity);
}
protected function createNextIdRepository(EntityManager $em)
{
return new NextIdRepository($em, new ClassMetadata('Chitanka\LibBundle\Entity\NextId'));
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Listener;
use Symfony\Component\HttpKernel\HttpKernelInterface;
use Symfony\Component\HttpKernel\Event\GetResponseEvent;
class RequestListener
{
public function onKernelRequest(GetResponseEvent $event)
{
$request = $event->getRequest();
$request->setFormat('osd', 'application/opensearchdescription+xml');
$request->setFormat('suggest', 'application/x-suggestions+json');
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller;
use Symfony\Component\HttpFoundation\Request;
use Symfony\Component\Security\Core\SecurityContext;
use Symfony\Component\Security\Encoder\MessageDigestPasswordEncoder;
class SecurityController extends Controller
{
protected $responseAge = 0;
public function loginAction()
{
return $this->legacyPage('Login');
// $encoder = new MessageDigestPasswordEncoder('sha1');
// echo $encoder->encodePassword('<PASSWORD>', '9');
// get the error if any (works with forward and redirect -- see below)
if ($this->get('request')->attributes->has(SecurityContext::AUTHENTICATION_ERROR)) {
$error = $this->get('request')->attributes->get(SecurityContext::AUTHENTICATION_ERROR);
} else {
$error = $this->get('request')->getSession()->get(SecurityContext::AUTHENTICATION_ERROR);
}
$this->view = array(
// last username entered by the user
'last_username' => $this->get('request')->getSession()->get(SecurityContext::LAST_USERNAME),
'error' => $error,
);
return $this->display('login');
}
public function registerAction()
{
return $this->legacyPage('Register');
}
public function logoutAction(Request $request)
{
$user = $this->getUser();
if ($user) {
$user->eraseCredentials();
$user->logout();
}
$session = $request->getSession();
$session->invalidate();
$session->getFlashBag()->set('notice', 'Излязохте от Моята библиотека.');
return $this->redirect('homepage');
}
public function requestUsernameAction()
{
return $this->legacyPage('SendUsername');
}
public function requestPasswordAction()
{
return $this->legacyPage('SendNewPassword');
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Entity;
/**
*
*/
class TextLabelLogRepository extends EntityRepository {
public function getAll($page = 1, $limit = 30) {
$query = $this->createQueryBuilder('log')
->select('log', 'text', 'label', 'user')
->leftJoin('log.text', 'text')
->leftJoin('log.label', 'label')
->leftJoin('log.user', 'user')
->addOrderBy('log.date', 'desc')
->getQuery();
$this->setPagination($query, $page, $limit);
return $query->getArrayResult();
}
public function getForText(Text $text) {
$query = $this->createQueryBuilder('log')
->select('log', 'text', 'label', 'user')
->leftJoin('log.text', 'text')
->leftJoin('log.label', 'label')
->leftJoin('log.user', 'user')
->where('log.text = ?1')->setParameter(1, $text)
->addOrderBy('log.date', 'asc')
->getQuery();
return $query->getArrayResult();
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Entity;
use Doctrine\ORM\Mapping as ORM;
use Symfony\Component\Validator\Constraints as Assert;
use Symfony\Bridge\Doctrine\Validator\Constraints\UniqueEntity;
use Chitanka\LibBundle\Util\String;
/**
* @ORM\Entity(repositoryClass="Chitanka\LibBundle\Entity\LabelRepository")
* @ORM\Table(name="label")
* @UniqueEntity(fields="slug", message="This slug is already in use.")
* @UniqueEntity(fields="name")
*/
class Label extends Entity
{
/**
* @ORM\Column(type="integer")
* @ORM\Id
* @ORM\GeneratedValue(strategy="CUSTOM")
* @ORM\CustomIdGenerator(class="Chitanka\LibBundle\Doctrine\CustomIdGenerator")
*/
private $id;
/**
* @var string $slug
* @ORM\Column(type="string", length=80, unique=true)
* @Assert\NotBlank
*/
private $slug = '';
/**
* @var string $name
* @ORM\Column(type="string", length=80, unique=true)
* @Assert\NotBlank
*/
private $name = '';
/**
* @var integer $parent
* @ORM\ManyToOne(targetEntity="Label", inversedBy="children")
*/
private $parent;
/**
* Number of texts having this label
* @var integer $nr_of_texts
* @ORM\Column(type="integer")
*/
private $nr_of_texts = 0;
/**
* The children of this label
* @var array
* @ORM\OneToMany(targetEntity="Label", mappedBy="parent")
*/
private $children;
/**
* @var array
* @ORM\ManyToMany(targetEntity="Text", mappedBy="labels")
* @ORM\OrderBy({"title" = "ASC"})
*/
private $texts;
public function getId() { return $this->id; }
public function setSlug($slug) { $this->slug = String::slugify($slug); }
public function getSlug() { return $this->slug; }
public function setName($name) { $this->name = $name; }
public function getName() { return $this->name; }
public function setParent($parent) { $this->parent = $parent; }
public function getParent() { return $this->parent; }
public function setNrOfTexts($nr_of_texts) { $this->nr_of_texts = $nr_of_texts; }
public function getNrOfTexts() { return $this->nr_of_texts; }
public function incNrOfTexts($value = 1)
{
$this->nr_of_texts += $value;
}
public function setChildren($children) { $this->children = $children; }
public function getChildren() { return $this->children; }
public function setTexts($texts) { $this->texts = $texts; }
public function getTexts() { return $this->texts; }
public function __toString()
{
return $this->name;
}
/**
* Add child label
*/
public function addChild($label)
{
$this->children[] = $label;
}
/**
* Get all ancestors
*
* @return array
*/
public function getAncestors()
{
$ancestors = array();
$label = $this;
while (null !== ($parent = $label->getParent())) {
$ancestors[] = $parent;
$label = $parent;
}
return $ancestors;
}
public function getDescendantIdsAndSelf()
{
return array_merge(array($this->getId()), $this->getDescendantIds());
}
/**
* Get all descendants
*
* @return array
*/
public function getDescendantIds()
{
$ids = array();
foreach ($this->getChildren() as $label) {
$ids[] = $label->getId();
$ids = array_merge($ids, $label->getDescendantIds());
}
return $ids;
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller;
use Symfony\Component\HttpKernel\Exception\HttpException;
use Symfony\Component\HttpKernel\Exception\NotFoundHttpException;
class WorkroomController extends Controller
{
protected $repository = 'WorkEntry';
protected $responseAge = 0;
public function indexAction($status, $page)
{
$_REQUEST['status'] = $status;
$_REQUEST['page'] = $page;
return $this->legacyPage('Work');
}
public function listAction($_format)
{
$_REQUEST['vl'] = 'listonly';
$this->responseFormat = $_format;
return $this->legacyPage('Work');
}
public function listContributorsAction()
{
$_REQUEST['vl'] = 'contrib';
return $this->legacyPage('Work');
}
public function showAction($id)
{
$_REQUEST['id'] = $id;
return $this->legacyPage('Work');
}
public function newAction()
{
if ($this->getUser()->isAnonymous()) {
throw new HttpException(401, 'Нямате достатъчни права за това действие.');
}
$_REQUEST['id'] = 0;
$_REQUEST['status'] = 'edit';
return $this->legacyPage('Work');
}
public function createAction()
{
return $this->legacyPage('Work');
}
public function editAction($id)
{
$_REQUEST['id'] = $id;
$_REQUEST['status'] = 'edit';
return $this->legacyPage('Work');
}
public function updateAction()
{
return $this->legacyPage('Work');
}
public function deleteAction()
{
return $this->legacyPage('Work');
}
public function deleteContribAction($id)
{
$this->responseAge = 0;
if ( ! $this->getUser()->inGroup('workroom-admin')) {
throw new HttpException(401, 'Нямате достатъчни права за това действие.');
}
$contrib = $this->getWorkContribRepository()->find($id);
if ($contrib === null) {
throw new NotFoundHttpException();
}
$entry = $contrib->getEntry();
$contrib->delete();
$em = $this->getEntityManager();
$em->persist($contrib);
$em->flush();
if ($this->get('request')->isXmlHttpRequest()) {
return $this->displayJson($contrib);
}
return $this->urlRedirect($this->generateUrl('workroom_entry_edit', array('id' => $entry->getId())));
}
public function rssAction()
{
$_REQUEST['type'] = 'work';
return $this->legacyPage('Feed');
}
public function latestAction($limit = 10)
{
$this->view = array(
'entries' => $this->getWorkEntryRepository()->getLatest($limit),
);
return $this->display('WorkEntry:list');
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Controller\Admin;
class WorkEntryController extends CRUDController {
}
<file_sep><?php
namespace Chitanka\LibBundle\Command;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Input\InputOption;
use Symfony\Component\Console\Output\OutputInterface;
use Chitanka\LibBundle\Entity\Person;
use Chitanka\LibBundle\Util\HttpAgent;
class VerifyPersonInfoLinksDbCommand extends CommonDbCommand
{
private $secsBetweenRequests = 5;
protected function configure()
{
parent::configure();
$this
->setName('db:verify-person-info-links')
->setDescription('Verify the person wiki info links')
->addOption('dump-sql', null, InputOption::VALUE_NONE, 'Output SQL queries instead of executing them')
->setHelp(<<<EOT
The <info>db:verify-person-info-links</info> command verifies the existance of the person wiki info links and removes the non-existing ones.
EOT
);
}
/**
* Executes the current command.
*
* @param InputInterface $input An InputInterface instance
* @param OutputInterface $output An OutputInterface instance
*
* @return integer 0 if everything went fine, or an error code
*
* @throws \LogicException When this abstract class is not implemented
*/
protected function execute(InputInterface $input, OutputInterface $output)
{
$this->em = $this->getContainer()->get('doctrine.orm.default_entity_manager');
$this->output = $output;
$dumpSql = $input->getOption('dump-sql') === true;
$this->verifyWikiInfoLinks($dumpSql);
$output->writeln('/*Done.*/');
}
private function verifyWikiInfoLinks($dumpSql)
{
$personIds = $this->getIdsForPersonsWithInvalidInfoLinks();
$this->removeInvalidInfoLinksByPersons($personIds, $dumpSql);
}
private function getIdsForPersonsWithInvalidInfoLinks()
{
$iterableResult = $this->em->createQuery('SELECT p FROM LibBundle:Person p WHERE p.info LIKE \'%:%\'')->iterate();
$siteRepo = $this->em->getRepository('LibBundle:WikiSite');
$httpAgent = new HttpAgent;
$ids = array();
foreach ($iterableResult AS $i => $row) {
$person = $row[0];
list($prefix, $name) = explode(':', $person->getInfo(), 2);
$site = $siteRepo->findOneBy(array('code' => $prefix));
$url = $site->getUrl($name);
$this->output->writeln("/* ({$person->getId()}) Checking $url */");
if ( ! $httpAgent->urlExists($url)) {
$ids[] = $person->getId();
$this->output->writeln("/* {$person->getName()}: $url is a broken link */");
}
sleep($this->secsBetweenRequests);
}
return $ids;
}
private function removeInvalidInfoLinksByPersons($personIds, $dumpSql)
{
$queries = array();
if (count($personIds)) {
$queries[] = sprintf('UPDATE person SET info = NULL WHERE id IN ('.implode(',', $personIds).')');
if ($dumpSql) {
$this->printQueries($queries);
} else {
$this->executeUpdates($queries, $this->em->getConnection());
}
}
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Command;
use Symfony\Component\Console\Input\InputDefinition;
use Symfony\Component\Console\Input\InputOption;
use Symfony\Component\Console\Input\InputArgument;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Output\OutputInterface;
use Symfony\Component\Finder\Finder;
use Doctrine\ORM\Query\ResultSetMapping;
use Chitanka\LibBundle\Legacy\Legacy;
use Chitanka\LibBundle\Util\String;
class MigrateDbCommand extends CommonDbCommand
{
protected function configure()
{
parent::configure();
$this
->setName('db:migrate')
->setDescription('Migrate old database')
->addOption('old-db', '', InputOption::VALUE_REQUIRED, 'Old database')
->setHelp(<<<EOT
The <info>db:migrate</info> command migrates the old database from mylib to the new schema.
EOT
);
}
/**
* Executes the current command.
*
* @param InputInterface $input An InputInterface instance
* @param OutputInterface $output An OutputInterface instance
*
* @return integer 0 if everything went fine, or an error code
*
* @throws \LogicException When this abstract class is not implemented
*/
protected function execute(InputInterface $input, OutputInterface $output)
{
$options = $input->getOptions();
$em = $this->getContainer()->get('doctrine.orm.default_entity_manager');
$this->migrateDb($output, $em, $options['old-db']);
$output->writeln('Done.');
}
protected function migrateDb(OutputInterface $output, $em, $olddb)
{
$this->prepareOldDatabase($output, $em, $olddb);
$this->copyTables($output, $em, $olddb);
$this->convertBooleanColumns($output, $em);
$this->convertTextSize($output, $em);
$this->updateTextCountByLabels($output, $em);
$this->updateCommentCountByTexts($output, $em);
$this->updateBookCountByCategories($output, $em);
$this->fillSlugFields($output, $em);
$this->convertPersonInfoField($output, $em);
$this->convertUserOptions($output, $em, $olddb);
$this->fillHasCoverByBooks($output, $em);
$this->insertBookPersonRelations($output, $em);
$this->fillUserTextContribDates($output, $em);
}
/**
* @RawSql
*/
protected function copyTables(OutputInterface $output, $em, $olddb)
{
$output->writeln('Copying database tables');
$conn = $em->getConnection();
foreach ($this->getRawCopyQueries() as $query) {
$query = strtr($query, array('%olddb%' => $olddb));
$output->writeln($query);
$conn->executeUpdate($query);
}
}
/**
* @RawSql
*/
protected function convertBooleanColumns(OutputInterface $output, $em)
{
$output->writeln('Converting boolean columns');
$data = array(
'text' => array('has_anno'),
'text_comment' => array('is_shown'),
'book_text' => array('share_info'),
'license' => array('free', 'copyright'),
'work_entry' => array('is_frozen'),
'work_contrib' => array('is_frozen'),
'user' => array('allowemail', 'news'),
);
$queries = array();
$conn = $em->getConnection();
foreach ($data as $model => $fields) {
foreach ($fields as $field) {
$conn->executeUpdate(sprintf('UPDATE %s SET %s = 0 WHERE %s = 1', $model, $field, $field));
$conn->executeUpdate(sprintf('UPDATE %s SET %s = 1 WHERE %s = 2', $model, $field, $field));
}
}
}
/**
* @RawSql
*/
protected function convertTextSize(OutputInterface $output, $em)
{
$output->writeln('Converting text size to kibibytes');
$queries = array();
$conn = $em->getConnection();
$sql = 'SELECT id, size, zsize FROM text';
foreach ($conn->fetchAll($sql) as $text) {
$queries[] = sprintf('UPDATE text SET size = %d, zsize = %d WHERE id = %d',
Legacy::int_b2k($text['size']),
Legacy::int_b2k($text['zsize']),
$text['id']);
}
$this->executeUpdates($queries, $conn);
}
/**
* @RawSql
*/
protected function fillSlugFields(OutputInterface $output, $em)
{
$output->writeln('Filling slug fields');
$queries = array();
$conn = $em->getConnection();
$tables = array(
'book' => 'title',
'person' => 'orig_name, name',
'label' => 'name',
'text' => 'title',
'series' => 'name',
'sequence' => 'name',
);
foreach ($tables as $table => $field) {
$slugs = array();
$sql = sprintf('SELECT id, %s AS name FROM %s', $field, $table);
foreach ($conn->fetchAll($sql) as $row) {
$name = $row['name'];
if (isset($row['orig_name']) && preg_match('/[a-z]/', $row['orig_name'])) {
$name = $row['orig_name'];
}
$slug = String::slugify($name);
if ($field != 'title') {
if (isset($slugs[$slug])) {
$slugs[$slug]++;
$slug .= $slugs[$slug];
} else {
$slugs[$slug] = 1;
}
}
$queries[] = sprintf('UPDATE %s SET slug = "%s" WHERE id = %d', $table, $slug, $row['id']);
}
}
$this->executeUpdates($queries, $conn);
}
/**
* @RawSql
*/
protected function fillHasCoverByBooks(OutputInterface $output, $em)
{
$output->writeln('Setting the "has_cover" field by books');
$coverDir = $this->getContainer()->getParameter('kernel.root_dir').'/../web/content/book-cover';
$finder = new Finder();
$finder->files()->name('*.jpg');
$ids = array();
foreach ($finder->in($coverDir) as $file) {
if (preg_match('/(\d+)\.jpg/', $file->getFilename(), $m)) {
$ids[] = $m[1];
}
}
$query = 'UPDATE book SET has_cover = 1 WHERE id IN ('.implode(',', $ids).')';
$em->getConnection()->executeUpdate($query);
}
/**
* @RawSql
*/
protected function insertBookPersonRelations(OutputInterface $output, $em)
{
$output->writeln('Initializing missing book-author relations');
$queries = array();
$conn = $em->getConnection();
$sql = 'SELECT id, title_author FROM book WHERE title_author <> ""';
foreach ($conn->fetchAll($sql) as $book) {
foreach (explode(',', $book['title_author']) as $name) {
$sql = sprintf('SELECT id FROM person WHERE name = "%s"', trim($name));
$person = $conn->fetchArray($sql);
$queries[] = sprintf('INSERT IGNORE book_author SET book_id = %d, person_id = %d', $book['id'], $person[0]);
}
}
$this->executeUpdates($queries, $conn);
}
/**
* @RawSql
*/
protected function convertPersonInfoField(OutputInterface $output, $em)
{
$output->writeln('Converting info field by persons');
$query = 'UPDATE person SET info = CONCAT(info, ":", name) WHERE info <> ""';
$em->getConnection()->executeUpdate($query);
}
/**
* @RawSql
*/
protected function convertUserOptions(OutputInterface $output, $em, $olddb)
{
$output->writeln('Converting user options');
$queries = array();
$conn = $em->getConnection();
$sql = "SELECT id, opts FROM $olddb.user";
foreach ($conn->fetchAll($sql) as $user) {
if ( ! empty($user['opts']) ) {
$opts = @gzinflate($user['opts']);
if ($opts[0] != 'a') {
$opts = '';
}
$queries[] = sprintf('UPDATE user SET opts = \'%s\' WHERE id = %d', $opts, $user['id']);
}
}
$queries[] = 'UPDATE user SET opts = "a:0:{}" WHERE opts = ""';
$groups = array(
'a' => array('user', 'workroom-admin', 'admin'),
'wa' => array('user', 'workroom-admin'),
'nu' => array('user'),
);
foreach ($groups as $oldGroup => $newGroups) {
$queries[] = sprintf('UPDATE user SET groups = \'%s\' WHERE groups = \'%s\'', serialize($newGroups), $oldGroup);
}
$this->executeUpdates($queries, $conn);
}
/**
* @RawSql
*/
protected function fillUserTextContribDates(OutputInterface $output, $em)
{
$output->writeln('Filling dates by user_text_contrib');
$queries = array();
$conn = $em->getConnection();
$sql = 'SELECT id, created_at FROM text';
foreach ($conn->fetchAll($sql) as $data) {
$queries[] = sprintf('UPDATE user_text_contrib SET date = \'%s\' WHERE text_id = %d', $data['created_at'], $data['id']);
}
$this->executeUpdates($queries, $conn);
}
protected function prepareOldDatabase(OutputInterface $output, $em, $olddb)
{
$output->writeln('Preparing old database');
$this->nullifyColumnsIfNeeded($output, $em, $olddb);
}
/**
* @RawSql
*/
protected function nullifyColumnsIfNeeded(OutputInterface $output, $em, $olddb)
{
$queries = array(
'ALTER TABLE `%olddb%`.`text`
CHANGE `series` `series` SMALLINT(5) UNSIGNED NULL DEFAULT NULL,
CHANGE `license_trans` `license_trans` SMALLINT(5) UNSIGNED NULL DEFAULT NULL,
CHANGE `year` `year` SMALLINT(4) NULL DEFAULT NULL,
CHANGE `year2` `year2` SMALLINT(4) NULL DEFAULT NULL,
CHANGE `trans_year` `trans_year` SMALLINT(4) NULL DEFAULT NULL,
CHANGE `trans_year2` `trans_year2` SMALLINT(4) NULL DEFAULT NULL',
'UPDATE `%olddb%`.`text` SET series = NULL WHERE series = 0',
'UPDATE `%olddb%`.`text` SET license_trans = NULL WHERE license_trans = 0',
'UPDATE `%olddb%`.`text` SET year = NULL WHERE year = 0',
'UPDATE `%olddb%`.`text` SET year2 = NULL WHERE year2 = 0',
'UPDATE `%olddb%`.`text` SET trans_year = NULL WHERE trans_year = 0',
'UPDATE `%olddb%`.`text` SET trans_year2 = NULL WHERE trans_year2 = 0',
'ALTER TABLE `%olddb%`.`comment`
CHANGE `user` `user` INT(11) UNSIGNED NULL DEFAULT NULL,
CHANGE `replyto` `replyto` INT(11) UNSIGNED NULL DEFAULT NULL',
'UPDATE `%olddb%`.`comment` SET replyto = NULL WHERE replyto = 0',
'UPDATE `%olddb%`.`comment` SET user = NULL WHERE user = 0',
'ALTER TABLE `%olddb%`.`liternews`
CHANGE `user` `user` INT(11) UNSIGNED NULL DEFAULT NULL',
'UPDATE `%olddb%`.`liternews` SET user = NULL WHERE user = 0',
);
$conn = $em->getConnection();
foreach ($queries as $query) {
$query = strtr($query, array('%olddb%' => $olddb));
$output->writeln($query);
$conn->executeUpdate($query);
}
}
/**
* @RawSql
*/
protected function getRawCopyQueries()
{
return array(
'INSERT INTO `user` (id, username, realname, <PASSWORD>, <PASSWORD>, email, allowemail, groups, news, opts, login_tries, registration, touched) SELECT id, username, realname, password, <PASSWORD>, email, allowemail, `group`, news, opts, login_tries, registration, touched FROM `%olddb%`.`user`',
'INSERT INTO `category` (`id`, `slug`, `name`) VALUES
(1, "uncategorized", "Некатегоризирани"),
(2, "razkazi_v_kartinki", "Разкази в картинки")',
'INSERT INTO `book` (id, slug, title_author, title, subtitle, orig_title, lang, year, type, mode, category_id) SELECT id, id, title_author, title, subtitle, orig_title, lang, year, type, mode, 1 FROM `%olddb%`.`book`',
'INSERT INTO `sequence` (id, slug, name) SELECT id, name, name FROM `%olddb%`.`pic_series`',
'INSERT INTO `book` (slug, title, sequence_id, seqnr, year, trans_year, lang, orig_lang, created_at, type, has_cover, category_id) SELECT id, name, series, sernr, year, trans_year, lang, orig_lang, created_at, "pic", 1, 2 FROM `%olddb%`.`pic`',
'INSERT INTO `series` (id, slug, name, orig_name, type) SELECT id, name, name, orig_name, type FROM `%olddb%`.`series`',
'INSERT INTO `license` SELECT * FROM `%olddb%`.`license`',
'ALTER TABLE text DROP FOREIGN KEY text_ibfk_4',
'INSERT INTO `text` (id, title, subtitle, lang, trans_year, trans_year2, orig_title, orig_subtitle, orig_lang, year, year2, orig_license_id, trans_license_id, type, series_id, sernr, sernr2, headlevel, size, zsize, created_at, cur_rev_id, dl_count, read_count, comment_count, rating, votes, has_anno, mode) SELECT id, title, subtitle, lang, trans_year, trans_year2, orig_title, orig_subtitle, orig_lang, year, year2, license_orig, license_trans, type, series, sernr, floor((sernr - floor(sernr) ) * 10), headlevel, size, zsize, entrydate, lastedit, dl_count, read_count, comment_count, rating, votes, has_anno, mode FROM `%olddb%`.`text`',
'INSERT INTO `text_revision` SELECT * FROM `%olddb%`.`edit_history`',
'ALTER TABLE text ADD FOREIGN KEY (cur_rev_id) REFERENCES text_revision(id)',
'INSERT INTO `person` (id, slug, name, orig_name, real_name, oreal_name, last_name, country, role, info) SELECT id, name, name, orig_name, real_name, oreal_name, last_name, country, role, info FROM `%olddb%`.`person`',
'INSERT INTO `person` (slug, person_id, name, last_name, orig_name, type) SELECT CONCAT(name, id), person, name, last_name, orig_name, type FROM `%olddb%`.`person_alt`',
'INSERT INTO `book_author` (book_id, person_id) SELECT book, author FROM `%olddb%`.`book_author`',
'INSERT INTO `book_text` (book_id, text_id, pos, share_info) SELECT * FROM `%olddb%`.`book_text`',
'ALTER TABLE text_comment DROP FOREIGN KEY text_comment_ibfk_3',
'INSERT INTO `text_comment` (id, text_id, rname, user_id, content, contenthash, time, ip, replyto_id, is_shown) SELECT * FROM `%olddb%`.`comment`',
'ALTER TABLE text_comment ADD FOREIGN KEY (replyto_id) REFERENCES text_comment(id)',
'INSERT INTO `text_header` (text_id, nr, level, name, fpos, linecnt) SELECT * FROM `%olddb%`.`header`',
'INSERT INTO `label` (id, slug, name) SELECT id, name, name FROM `%olddb%`.`label`',
'INSERT INTO `label_log` SELECT * FROM `%olddb%`.`label_log`',
'INSERT INTO `question` SELECT * FROM `%olddb%`.`question`',
'INSERT INTO `series_author` (person_id, series_id) SELECT person, series FROM `%olddb%`.`ser_author_of`',
'INSERT INTO `text_author` (person_id, text_id, pos, year) SELECT * FROM `%olddb%`.`author_of`',
'INSERT INTO `text_label` (text_id, label_id) SELECT text, label FROM `%olddb%`.`text_label`',
'INSERT INTO `text_rating` (text_id, user_id, rating, date) SELECT * FROM `%olddb%`.`text_rating`',
'INSERT INTO `user_text_read` (user_id, text_id, created_at) SELECT * FROM `%olddb%`.`reader_of`',
'INSERT INTO `text_translator` (person_id, text_id, pos, year) SELECT * FROM `%olddb%`.`translator_of`',
'INSERT INTO `work_entry` (id, type, title, author, user_id, comment, date, status, progress, is_frozen, tmpfiles, tfsize, uplfile) SELECT * FROM `%olddb%`.`work`',
'INSERT INTO `work_contrib` (id, entry_id, user_id, comment, progress, is_frozen, date, uplfile) SELECT * FROM `%olddb%`.`work_multi`',
'INSERT INTO `user_text_contrib` (user_id, text_id, size, percent) SELECT * FROM `%olddb%`.`user_text`',
'INSERT INTO wiki_site (code, name, url, intro) VALUES
("w", "Уикипедия", "http://bg.wikipedia.org/wiki/$1", "По-долу е показана статията за $1 от свободната енциклопедия <a href=\"http://bg.wikipedia.org/\">Уикипедия</a>, която може да се допълва и подобрява от своите читатели. <small>Текстовото й съдържание се разпространява при условията на лиценза „<a href=\"http://creativecommons.org/licenses/by-sa/3.0/\">Криейтив Комънс Признание — Споделяне на споделеното 3.0</a>“</small>."),
("f", "БГ-Фантастика", "http://bgf.zavinagi.org/index.php/$1", "По-долу е показана статията за $1 от свободната енциклопедия <a href=\"http://bgf.zavinagi.org/\">БГ-Фантастика</a>, която може да се допълва и подобрява от своите читатели. <small>Текстовото й съдържание се разпространява при условията на <a href=\"http://www.gnu.org/copyleft/fdl.html\">GNU Free Documentation License 1.2</a></small>."),
("m", "Моята библиотека", "http://wiki.chitanka.info/Личност:$1", "")',
'INSERT INTO book_site (name, url) VALUES
("SFBG", "http://sfbg.us/book/BOOKID"),
("ПУК!", "http://biblio.stage.bg/index.php?newsid=BOOKID")',
);
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Entity;
/**
*
*/
class LabelRepository extends EntityRepository
{
public function findBySlug($slug)
{
return $this->findOneBy(array('slug' => $slug));
}
public function getAll()
{
return $this->getQueryBuilder()
->orderBy('e.name', 'asc')
->getQuery()
->getArrayResult();
}
/**
* RAW_SQL
*/
public function getAllAsTree()
{
$labels = $this->_em->getConnection()->fetchAll('SELECT * FROM label ORDER BY name');
$labels = $this->convertArrayToTree($labels);
return $labels;
}
protected function convertArrayToTree($labels)
{
$labelsById = array();
foreach ($labels as $i => $label) {
$labelsById[ $label['id'] ] =& $labels[$i];
}
foreach ($labels as $i => $label) {
if ($label['parent_id']) {
$labelsById[$label['parent_id']]['children'][] =& $labels[$i];
}
}
return $labels;
}
public function getNames()
{
return $this->_em->createQueryBuilder()
->from($this->getEntityName(), 'l')->select('l.id, l.name')
->getQuery()->getResult('key_value');
}
public function getByNames($name)
{
return $this->getQueryBuilder()
->where('e.name LIKE ?1')
->setParameter(1, $this->stringForLikeClause($name))
->getQuery()
->getArrayResult();
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Tests\Controller;
class StatisticsControllerTest extends WebTestCase
{
/**
* @group html
*/
public function testIndex()
{
$page = $this->request('statistics');
$this->assertHtmlPageIs($page, 'statistics');
$this->assertCountGe(3, $page->filter('table.content'));
}
}
<file_sep><?php
namespace Chitanka\LibBundle\Legacy;
class ListPage extends Page {
const
PARAM_OBJ = 'o'
;
protected
$action = 'list',
$outFormats = array(
'html',
'csv',
),
$objects = array(
'title' => 'Опростен списък на заглавията',
'author' => 'Опростен списък на авторите',
'translator' => 'Опростен списък на преводачите',
),
$_objRawSep = '.'
;
public function __construct()
{
parent::__construct();
$this->title = 'Опростени списъци';
$this->objectRaw = $this->request->value(self::PARAM_OBJ, '', 1);
}
protected function buildContent()
{
if ( empty( $this->objectRaw ) ) {
return $this->getObjectsList();
}
$this->_initObjectVars( $this->objectRaw );
$listerClass = $this->object . 'List';
$lister = new $listerClass($this->db, $this);
$format = 'format' . ucfirst($this->outFormat);
return $this->$format( $lister->getList() );
}
protected function _initObjectVars($raw)
{
$parts = explode($this->_objRawSep, $raw);
$this->object = Legacy::normVal(@$parts[0], array_keys($this->objects));
$this->outFormat = Legacy::normVal(@$parts[1], $this->outFormats);
$this->title = $this->objects[ $this->object ];
}
public function getObjectsList()
{
$items = array();
foreach ( $this->objects as $object => $title ) {
$links = array();
foreach ( $this->outFormats as $format ) {
$params = array(
self::FF_ACTION => $this->action,
self::PARAM_OBJ => $object . $this->_objRawSep . $format,
);
$links[] = $this->out->internLink($format, $params, 2);
}
$items[] = "$title: " . implode(', ', $links);
}
return $this->out->ulist($items);
}
protected function formatHtml($data)
{
foreach ( $data[0] as $k => $v ) {
$data[0][$k] = array(
array('type' => 'header'),
$v
);
}
$this->addStyle('#navigation {display:none}');
return $this->out->simpleTable($this->title, $data);
}
protected function formatCsv($data)
{
$this->contentType = 'text/csv';
$this->fullContent = $this->getAsCsv($data);
return '';
}
/** Format an array as CSV */
public function getAsCsv($data, $valSep = ',', $valDelim = '"')
{
$o = '';
foreach ($data as $row) {
$o .= $valDelim
. implode($valDelim . $valSep . $valDelim, $row)
. $valDelim . "\n";
}
return $o;
}
}
abstract class ObjectList {
protected
// used also for ordering
$cols = array(),
$dbcols = array(),
$_sqlQuery = '',
// this will contain the columns which should be translated somehow
$_translate = array(),
// will contain the generated list data
$data = array()
;
public function __construct($db, $page)
{
$this->_db = $db;
$this->_page = $page;
$this->_init();
}
public function getList()
{
$this->data[] = array_values( $this->cols );
$this->_populateList();
return $this->data;
}
protected function _init() {}
protected function _getSqlQuery()
{
$sel = '';
foreach ( $this->cols as $col => $_ ) {
$sel .= ',' . (isset($this->dbcols[$col]) ? $this->dbcols[$col] : $col);
}
$this->_sqlQuery['SELECT'] = ltrim($sel, ',');
return $this->_db->extselectQ( $this->_sqlQuery );
}
protected function _populateList()
{
$this->_initTranslateList();
$this->_db->iterateOverResult($this->_getSqlQuery(), 'getListItem', $this);
}
public function getListItem($dbrow)
{
$this->data[] = $this->_translateDbRow($dbrow);
return '';
}
protected function _translateDbRow($dbrow)
{
foreach ( $this->_translate as $key => $tmethod ) {
$dbrow[$key] = $this->$tmethod($dbrow[$key], $dbrow);
}
return $dbrow;
}
protected function _initTranslateList()
{
if ( ! empty( $this->_translate ) ) {
return;
}
foreach ($this->cols as $col => $_) {
$method = $col;
if ( method_exists($this, $method) ) {
$this->_translate[$col] = $method;
}
}
}
}
class TitleList extends ObjectList {
protected
// used also for ordering
$cols = array(
'id' => '№',
'author' => 'Автор',
'title' => 'Заглавие',
//'sernr' => '',
'series' => 'Серия',
'translator' => 'Преводач',
'year' => 'Година',
'orig_title' => 'Оригинално заглавие',
'trans_year' => 'Година на превод',
'type' => 'Форма',
'labels' => 'Етикети',
'lastedit' => 'Посл. редакция',
),
$dbcols = array(
'id' => 't.id',
'author' => 'GROUP_CONCAT( DISTINCT a.name ORDER BY aof.pos SEPARATOR ", " )',
'title' => 't.title',
//'sernr' => 't.sernr',
'series' => 's.name',
'translator' => 'GROUP_CONCAT( DISTINCT tr.name ORDER BY tof.pos SEPARATOR ", " )',
'year' => 't.year',
'orig_title' => 't.orig_title',
'trans_year' => 't.trans_year',
'type' => 't.type',
'labels' => 'GROUP_CONCAT( DISTINCT l.name SEPARATOR ", " )',
'lastedit' => 'h.date',
)
;
protected function _init()
{
$this->_sqlQuery = array(
'FROM' => DBT_TEXT . ' t',
'LEFT JOIN' => array(
DBT_AUTHOR_OF . ' aof' => 't.id = aof.text',
DBT_PERSON . ' a' => 'aof.person = a.id',
DBT_SERIES . ' s' => 't.series = s.id',
DBT_TRANSLATOR_OF . ' tof' => 't.id = tof.text',
DBT_PERSON . ' tr' => 'tof.person = tr.id',
DBT_TEXT_LABEL . ' tl' => 't.id = tl.text',
DBT_LABEL . ' l' => 'tl.label = l.id',
DBT_EDIT_HISTORY . ' h' => 't.lastedit = h.id',
),
'GROUP BY' => 't.id',
'ORDER BY' => 'a.last_name, t.title, s.name, t.sernr'
);
}
public function type($dbval, $dbrow)
{
return work_type($dbval);
}
public function year($dbval, $dbrow)
{
return $this->_page->makeYearView($dbval);
}
public function trans_year($dbval, $dbrow)
{
return $this->_page->makeYearView($dbval);
}
}
abstract class PersonList extends ObjectList {
protected
// used also for ordering
$cols = array(
'name' => 'Име',
'orig_name' => 'Оригинално изписване',
'country' => 'Държава',
'alt_names' => 'Псевдоними',
),
$dbcols = array(
'name' => 'p.name',
'orig_name' => 'p.orig_name',
'country' => 'p.country',
'alt_names' => 'GROUP_CONCAT( DISTINCT alt.name SEPARATOR "; " )',
),
$_dbRoleBit = 0
;
protected function _init()
{
$this->_sqlQuery = array(
'FROM' => DBT_PERSON . ' p',
'WHERE' => array("role & $this->_dbRoleBit"),
'LEFT JOIN' => array(
DBT_PERSON_ALT . ' alt' => 'alt.person = p.id',
),
'GROUP BY' => 'p.id',
'ORDER BY' => 'p.last_name, p.name'
);
}
public function country($dbval, $dbrow)
{
return country_name($dbval);
}
}
class AuthorList extends PersonList {
protected $_dbRoleBit = 1;
}
class TranslatorList extends PersonList {
protected $_dbRoleBit = 2;
}
<file_sep><?php
namespace Chitanka\LibBundle\Legacy;
class CacheHolder {
private
$_bin = array();
public function exists($key)
{
return array_key_exists($key, $this->_bin);
}
public function get($key)
{
return array_key_exists($key, $this->_bin)
? $this->_bin[$key]
: false;
}
public function set($key, $value)
{
return $this->_bin[$key] = $value;
}
public function clear()
{
$this->_bin = array();
}
}<file_sep><?php
namespace Chitanka\LibBundle\Command;
use Symfony\Bundle\FrameworkBundle\Command\ContainerAwareCommand;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Input\InputArgument;
use Symfony\Component\Console\Input\InputOption;
use Symfony\Component\Console\Output\OutputInterface;
use Chitanka\LibBundle\Util\Fb2Validator;
use Doctrine\ORM\EntityManager;
use Chitanka\LibBundle\Entity\Text;
use Chitanka\LibBundle\Entity\Book;
use Chitanka\LibBundle\Legacy\Setup;
class ValidateFb2Command extends ContainerAwareCommand
{
/** @var EntityManager */
private $em;
protected function configure()
{
parent::configure();
$command = 'lib:validate-fb2';
$this
->setName($command)
->addArgument('id', InputArgument::IS_ARRAY, 'A text or a book ID or an ID range')
->setDescription('Validate FB2 archives of texts and books')
->setHelp(<<<EOT
The <info>$command</info> allows validation of text and book archives.
Example calls:
<info>$command</info> text:1
<info>$command</info> 1
Validate the text with an ID 1
<info>$command</info> text:1-10
<info>$command</info> 1-10
Validates texts with IDs between 1 and 10
<info>$command</info> book:1-10
Validates books with IDs between 1 and 10
<info>$command</info> text:1-5 book:1-10
Validates texts with IDs between 1 and 5, and books with IDs between 1 and 10
EOT
);
}
/**
* Executes the current command.
*
* @param InputInterface $input An InputInterface instance
* @param OutputInterface $output An OutputInterface instance
*
* @return integer 0 if everything went fine, or an error code
*
* @throws \LogicException When this abstract class is not implemented
*/
protected function execute(InputInterface $input, OutputInterface $output)
{
$this->em = $this->getContainer()->get('doctrine.orm.default_entity_manager');
$this->output = $output;
Setup::doSetup($this->getContainer());
list($textIds, $bookIds) = $this->parseInputIds($input->getArgument('id'));
$this->validator = new Fb2Validator();
$this->validateTexts($textIds);
$this->validateBooks($bookIds);
}
private function parseInputIds($inputIds)
{
$ids = array(
'text' => array(),
'book' => array(),
);
foreach ($inputIds as $inputId) {
if (strpos($inputId, ':') === false) {
$inputId = 'text:'.$inputId;
}
list($type, $idRange) = explode(':', $inputId);
if (strpos($idRange, '-') !== false) {
list($firstId, $lastId) = explode('-', $idRange);
$ids[$type] = array_merge($ids[$type], range($firstId, $lastId));
} else {
$ids[$type][] = (int) $idRange;
}
}
foreach ($ids as $type => $typeIds) {
$ids[$type] = array_unique($typeIds);
}
return array_values($ids);
}
private function validateTexts($textIds)
{
$this->validateWorks($textIds, 'Text');
}
private function validateBooks($bookIds)
{
$this->validateWorks($bookIds, 'Book');
}
private function validateWorks($workIds, $entity)
{
foreach ($workIds as $workId) {
$work = $this->em->getRepository("LibBundle:$entity")->find($workId);
if (!$work) {
continue;
}
$this->output->writeln("Validating $entity $workId");
$fb2 = $work->getContentAsFb2();
if (!$fb2) {
continue;
}
if (!$this->validator->isValid($fb2)) {
$this->saveFileInTmpDir($entity.'-'.$work->getId().'.fb2', $fb2);
throw new \Exception($this->validator->getErrors());
}
}
}
private function saveFileInTmpDir($filename, $contents)
{
file_put_contents(sys_get_temp_dir().'/'.$filename, $contents);
}
}
| 864b3be372ccdccc88b80bf7410a3832a23d4f98 | [
"Markdown",
"PHP"
] | 136 | PHP | ivko/chitanka | 43f8fc989fc3acfbb6aa944fb210310320742dc1 | 1633ec10b0b719c316def31355e1d4c03c9be194 |
refs/heads/main | <repo_name>Aksh-Bansal-dev/findrs<file_sep>/src/utils/mod.rs
use regex::Regex;
pub mod help;
pub fn is_present(pattern:&str, s: &str, case_sensitive: bool, is_regex:bool)-> bool{
let arr: Vec<&str> = s.split(' ').collect();
if is_regex{
let re = Regex::new(pattern).expect("Invalid regex");
for str in arr{
if re.is_match(str){
return true;
}
}
}
else {
for str in arr{
if str==pattern{
return true;
}
else if case_sensitive && str.to_lowercase() == pattern.to_lowercase() {
return true;
}
}
}
false
}
<file_sep>/src/utils/help.rs
pub fn print_help(){
print!(
"\n\
findrs: search for patterns in files\n\n\
Usage: findr <PATTERN> <FILE_PATH> [OPTIONS]\n\n\
Options: \n\
\t{: <20}\t {: <40}\n\
\t{: <20}\t {: <40}\n\
\t{: <20}\t {: <40}\n\
",
"-i, --ignore-case" ,
"Case insensitive search",
"-h, --help",
"display help for findrs",
"-d, --dir",
"search all the files in the directory"
);
}
<file_sep>/src/main.rs
use colored::*;
mod utils;
struct Arg{
pattern: String,
path: String,
ignore_case: bool,
is_dir: bool,
regex: bool
}
fn main() {
let len = std::env::args().len();
let mut arg: Arg = Arg{
pattern: std::env::args().nth(len-2).expect("no pattern given"),
path: std::env::args().nth(len-1).expect("no path given"),
ignore_case: false,
is_dir: false,
regex: false
};
for i in 1..len{
let cur = std::env::args().nth(i).unwrap();
match cur.as_str() {
"-i" | "--ignore-case" => arg.ignore_case = true,
"-d" | "--dir" => arg.is_dir = true,
"-r" | "--regex" => arg.regex = true,
"-h" | "--help" => { utils::help::print_help(); return},
_ => ()
}
}
// Execution Time start
let time_start = std::time::SystemTime::now();
if arg.is_dir{
visit_dirs(&arg.path, &arg).expect("Invalid path");
}
else{
search_file(&arg.path, &arg);
}
print!("\nExecuted in {}ms\n", time_start.elapsed().unwrap().as_millis());
}
fn visit_dirs(dir: &str, arg:&Arg) -> std::io::Result<()> {
for entry in std::fs::read_dir(dir)? {
let entry = entry?;
let path = entry.path();
if path.is_dir() {
visit_dirs(&path.to_str().unwrap(), arg)?;
} else {
search_file(&path.to_str().unwrap(), arg);
}
}
Ok(())
}
fn search_file(path:&str, arg:&Arg){
let contents = match std::fs::read_to_string(path){
Ok(v)=>v,
_ => return
};
let arr: Vec<&str> = contents.split('\n').collect();
let mut res = String::new();
let mut check = false;
for i in 0..arr.len(){
if utils::is_present(&arg.pattern, arr[i], arg.ignore_case, arg.regex) {
check = true;
res.push_str(&format!("{} {}\n", (i+1).to_string().yellow(), arr[i]));
}
}
if check {
print!("\n[{}]\n{}",get_filepath(path).blue(), res) ;
}
}
fn get_filepath(path:&str)->&str{
path
}<file_sep>/README.md
# findrs
Search for patterns in files
## How to install
```
cargo install --locked --all-features \
--git https://github.com/Aksh-Bansal-dev/findrs
```
## How to use
```
Usage: findrs <PATTERN> <PATH> [OPTIONS]
Options:
-i, --ignore-case Case insensitive search
-h, --help display help for findrs
-d, --dir search all the files in the directory
```
#### Example
```
findrs -d -r "^then" ./demo
```
This will search for 'then' at the beginning of line in the demo folder recursively.
> Note: if you want to try without installing(not recommended) `cargo run -- -d -r "^then" ./demo`
| 0fe7684b63b80d29b8ad2c8c90daebc7c139134b | [
"Markdown",
"Rust"
] | 4 | Rust | Aksh-Bansal-dev/findrs | 3c6c17ee6c0e3d0564e75c914f3ca8bf2a7db026 | 7a631731c2f13adf9067ad56e564e4e9ebde16fe |
refs/heads/master | <repo_name>seco/Lua-RTOS-ESP32<file_sep>/components/lua_rtos/Lua/platform/adc.c
#include "luartos.h"
#if LUA_USE_ADC
#include "Lua/modules/adc.h"
#include <drivers/adc/adc.h>
int platform_adc_exists( unsigned id ) {
return ((id >= 0) && (id <= NADC));
}
/*
u32 platform_adc_get_maxval( unsigned id )
{
return pow( 2, ADC_BIT_RESOLUTION ) - 1;
}
u32 platform_adc_set_smoothing( unsigned id, u32 length )
{
return adc_update_smoothing( id, ( u8 )intlog2( ( unsigned ) length ) );
}
void platform_adc_set_blocking( unsigned id, u32 mode )
{
adc_get_ch_state( id )->blocking = mode;
}
void platform_adc_set_freerunning( unsigned id, u32 mode )
{
adc_get_ch_state( id )->freerunning = mode;
}
u32 platform_adc_is_done( unsigned id )
{
return adc_get_ch_state( id )->op_pending == 0;
}
void platform_adc_set_timer( unsigned id, u32 timer )
{
elua_adc_dev_state *d = adc_get_dev_state( 0 );
if ( d->timer_id != timer )
d->running = 0;
platform_adc_stop( id );
d->timer_id = timer;
}
*/
#endif
<file_sep>/components/lua_rtos/luartos.h
#ifndef LUA_RTOS_LUARTOS_H_
#define LUA_RTOS_LUARTOS_H_
#include "sdkconfig.h"
/*
*
* UART
*
*/
// Use console?
#ifdef CONFIG_USE_CONSOLE
#define USE_CONSOLE CONFIG_USE_CONSOLE
#else
#define USE_CONSOLE 1
#endif
// Get the UART assigned to the console
#if CONFIG_LUA_RTOS_CONSOLE_UART0
#define CONSOLE_UART 1
#endif
#if CONFIG_LUA_RTOS_CONSOLE_UART1
#define CONSOLE_UART 2
#endif
#if CONFIG_LUA_RTOS_CONSOLE_UART2
#define CONSOLE_UART 3
#endif
// Get the console baud rate
#if CONFIG_LUA_RTOS_CONSOLE_BR_57600
#define CONSOLE_BR 57600
#endif
#if CONFIG_LUA_RTOS_CONSOLE_BR_115200
#define CONSOLE_BR 115200
#endif
// Get the console buffer length
#ifdef CONFIG_LUA_RTOS_CONSOLE_BUFFER_LEN
#define CONSOLE_BUFFER_LEN CONFIG_LUA_RTOS_CONSOLE_BUFFER_LEN
#else
#define CONSOLE_BUFFER_LEN 1024
#endif
#ifndef CONSOLE_UART
#define CONSOLE_UART 1
#endif
#ifndef CONSOLE_BR
#define CONSOLE_BR 115200
#endif
// SPIFFS?
#define SPIFFS_ERASE_SIZE 4096
#define SPIFFS_LOG_PAGE_SIZE 256
#if CONFIG_LUA_RTOS_USE_SPIFFS
#define USE_SPIFFS CONFIG_LUA_RTOS_USE_SPIFFS
#else
#define USE_SPIFFS 0
#endif
#ifdef CONFIG_LUA_RTOS_SPIFFS_LOG_BLOCK_SIZE
#define SPIFFS_LOG_BLOCK_SIZE CONFIG_LUA_RTOS_SPIFFS_LOG_BLOCK_SIZE
#else
#define SPIFFS_LOG_BLOCK_SIZE 8192
#endif
#ifdef CONFIG_LUA_RTOS_SPIFFS_BASE_ADDR
#define SPIFFS_BASE_ADDR CONFIG_LUA_RTOS_SPIFFS_BASE_ADDR
#else
#define SPIFFS_BASE_ADDR 180000
#endif
#ifdef CONFIG_LUA_RTOS_SPIFFS_SIZE
#define SPIFFS_SIZE CONFIG_LUA_RTOS_SPIFFS_SIZE
#else
#define SPIFFS_SIZE 524288
#endif
// LoRa WAN
#if CONFIG_LUA_RTOS_USE_LMIC
#define USE_LMIC 1
#else
#define USE_LMIC 0
#endif
#if CONFIG_LUA_RTOS_LORAWAN_RADIO_SX1276
#define CFG_sx1276_radio 1
#else
#if CONFIG_LUA_RTOS_LORAWAN_RADIO_SX1272
#define CFG_sx1272_radio 1
#else
#define CFG_sx1276_radio 1
#endif
#endif
#if CONFIG_LUA_RTOS_LORAWAN_BAND_EU868
#define CFG_eu868 1
#else
#if CONFIG_LUA_RTOS_LORAWAN_BAND_US915
#define CFG_us915 1
#else
#define CFG_eu868 1
#endif
#endif
#ifdef CONFIG_LUA_RTOS_LMIC_SPI
#define LMIC_SPI CONFIG_LUA_RTOS_LMIC_SPI
#else
#define LMIC_SPI 3
#endif
#ifdef CONFIG_LUA_RTOS_LMIC_RST
#define LMIC_RST CONFIG_LUA_RTOS_LMIC_RST
#else
#define LMIC_RST 27
#endif
#ifdef CONFIG_LUA_RTOS_LMIC_CS
#define LMIC_CS CONFIG_LUA_RTOS_LMIC_CS
#else
#define LMIC_CS 5
#endif
#ifdef CONFIG_LUA_RTOS_LMIC_DIO0
#define LMIC_DIO0 CONFIG_LUA_RTOS_LMIC_DIO0
#else
#define LMIC_DIO0 26
#endif
#ifdef CONFIG_LUA_RTOS_LMIC_DIO1
#define LMIC_DIO1 CONFIG_LUA_RTOS_LMIC_DIO1
#else
#define LMIC_DIO1 25
#endif
#ifdef CONFIG_LUA_RTOS_LMIC_DIO2
#define LMIC_DIO2 CONFIG_LUA_RTOS_LMIC_DIO2
#else
#define LMIC_DIO2 33
#endif
// SD Card / FAT
#if CONFIG_LUA_RTOS_USE_FAT
#define USE_FAT 1
#define USE_SD 1
#else
#define USE_FAT 0
#define USE_SD 0
#endif
#ifdef CONFIG_LUA_RTOS_SD_SPI
#define SD_SPI CONFIG_LUA_RTOS_SD_SPI
#else
#define SD_SPI 2
#endif
#ifdef CONFIG_LUA_RTOS_SD_CS
#define SD_CS CONFIG_LUA_RTOS_SD_CS
#else
#define SD_CS 15
#endif
#define THREAD_LOCAL_STORAGE_POINTER_ID 0
#endif
<file_sep>/components/lua_rtos/sys/driver.h
/*
* Lua RTOS, driver basics
*
* Copyright (C) 2015 - 2016
* IBEROXARXA SERVICIOS INTEGRALES, S.L. & CSS IBÉRICA, S.L.
*
* Author: <NAME> (<EMAIL> / <EMAIL>)
*
* All rights reserved.
*
* Permission to use, copy, modify, and distribute this software
* and its documentation for any purpose and without fee is hereby
* granted, provided that the above copyright notice appear in all
* copies and that both that the copyright notice and this
* permission notice and warranty disclaimer appear in supporting
* documentation, and that the name of the author not be used in
* advertising or publicity pertaining to distribution of the
* software without specific, written prior permission.
*
* The author disclaim all warranties with regard to this
* software, including all implied warranties of merchantability
* and fitness. In no event shall the author be liable for any
* special, indirect or consequential damages or any damages
* whatsoever resulting from loss of use, data or profits, whether
* in an action of contract, negligence or other tortious action,
* arising out of or in connection with the use or performance of
* this software.
*/
#ifndef DRIVER_H
#define DRIVER_H
#include "luartos.h"
#include <sys/list.h>
#include <sys/resource.h>
#define LORA_DRIVER_ID 1
#define DRIVER_EXCEPTION_BASE(n) (n << 24)
typedef struct {
const char *name;
const int exception_base;
const void *error;
} driver_t;
typedef enum {
LOCK, // Someone needs a resource which is locked
SETUP, // Something fails during setup
OPERATION // Something fails during normal operation
} driver_error_type;
typedef struct {
driver_error_type type;
resource_type_t resource;
int resource_unit;
resource_owner_t owner;
int owner_unit;
int id;
const char *msg;
int exception;
const driver_t *driver;
} driver_error_t;
const driver_t *driver_get(const char *name);
const char *driver_get_err_msg(driver_error_t *error);
const char *driver_get_name(driver_error_t *error);
driver_error_t *driver_lock_error(resource_lock_t *lock);
driver_error_t *driver_setup_error(const driver_t *driver, unsigned int code, const char *msg);
driver_error_t *driver_operation_error(const driver_t *driver, unsigned int code, const char *msg);
#endif
<file_sep>/components/lua_rtos/drivers/gpio.c
/*
* Lua RTOS, gpio driver
*
* Copyright (C) 2015 - 2016
* IBEROXARXA SERVICIOS INTEGRALES, S.L. & CSS IBÉRICA, S.L.
*
* Author: <NAME> (<EMAIL> / <EMAIL>)
*
* All rights reserved.
*
* Permission to use, copy, modify, and distribute this software
* and its documentation for any purpose and without fee is hereby
* granted, provided that the above copyright notice appear in all
* copies and that both that the copyright notice and this
* permission notice and warranty disclaimer appear in supporting
* documentation, and that the name of the author not be used in
* advertising or publicity pertaining to distribution of the
* software without specific, written prior permission.
*
* The author disclaim all warranties with regard to this
* software, including all implied warranties of merchantability
* and fitness. In no event shall the author be liable for any
* special, indirect or consequential damages or any damages
* whatsoever resulting from loss of use, data or profits, whether
* in an action of contract, negligence or other tortious action,
* arising out of or in connection with the use or performance of
* this software.
*/
#include <drivers/gpio.h>
#include <drivers/cpu.h>
// Configure gpio as input using a mask
// If bit n on mask is set to 1 the gpio is configured
void gpio_pin_input_mask(unsigned int port, gpio_port_mask_t pinmask) {
gpio_port_mask_t mask = 1;
int i;
for(i=0; i < GPIO_PER_PORT; i++) {
if (pinmask & mask) {
gpio_pin_input(i);
}
mask = (mask << 1);
}
}
// Configure all gpio's port as input
void gpio_port_input(unsigned int port) {
gpio_pin_input_mask(port, GPIO_ALL);
}
// Configure gpio as output using a mask
// If bit n on mask is set to 1 the gpio is configured
void gpio_pin_output_mask(unsigned int port, gpio_port_mask_t pinmask) {
gpio_port_mask_t mask = 1;
int i;
for(i=0; i < GPIO_PER_PORT; i++) {
if (pinmask & mask) {
gpio_pin_output(i);
}
mask = (mask << 1);
}
}
// Configure all gpio's port as output
void gpio_port_output(unsigned int port) {
gpio_pin_output_mask(port, GPIO_ALL);
}
// Set gpio pull-up using a mask
// If bit n on mask is set to 1 the gpio is set pull-up
void gpio_pin_pullup_mask(unsigned int port, gpio_port_mask_t pinmask) {
gpio_port_mask_t mask = 1;
int i;
for(i=0; i < GPIO_PER_PORT; i++) {
if (pinmask & mask) {
gpio_pin_pullup(i);
}
mask = (mask << 1);
}
}
// Set gpio pull-down using a mask
// If bit n on mask is set to 1 the gpio is set pull-up
void gpio_pin_pulldwn_mask(unsigned int port, gpio_port_mask_t pinmask) {
gpio_port_mask_t mask = 1;
int i;
for(i=0; i < GPIO_PER_PORT; i++) {
if (pinmask & mask) {
gpio_pin_pulldwn(i);
}
mask = (mask << 1);
}
}
// Set gpio with no pull-up and no pull-down using a mask
// If bit n on mask is set to 1 the gpio with no pull-up and no pull-down
void gpio_pin_nopull_mask(unsigned int port, gpio_port_mask_t pinmask) {
gpio_port_mask_t mask = 1;
int i;
for(i=0; i < GPIO_PER_PORT; i++) {
if (pinmask & mask) {
gpio_pin_nopull(i);
}
mask = (mask << 1);
}
}
// Put gpio on the high state using a mask
// If bit n on mask is set to 1 the gpio is put on the high state
void gpio_pin_set_mask(unsigned int port, gpio_port_mask_t pinmask) {
gpio_port_mask_t mask = 1;
int i;
for(i=0; i < GPIO_PER_PORT; i++) {
if (pinmask & mask) {
gpio_pin_set(i);
}
mask = (mask << 1);
}
}
// Put port gpio's on the high state
// If bit n on mask is set to 1 the gpio is put on the high state
void gpio_port_set(unsigned int port, gpio_port_mask_t pinmask) {
gpio_port_mask_t mask = 1;
int i;
for(i=0; i < GPIO_PER_PORT; i++) {
if (pinmask & mask) {
gpio_pin_set(i);
}
mask = (mask << 1);
}
}
// Put gpio on the low state using a mask
// If bit n on mask is set to 1 the gpio is put on the low state
void gpio_pin_clr_mask(unsigned int port, gpio_port_mask_t pinmask) {
gpio_port_mask_t mask = 1;
int i;
for(i=0; i < GPIO_PER_PORT; i++) {
if (pinmask & mask) {
gpio_pin_clr(i);
}
mask = (mask << 1);
}
}
// Get gpio values using a mask
gpio_port_mask_t gpio_pin_get_mask(unsigned int port, gpio_port_mask_t pinmask) {
gpio_port_mask_t mask = 1;
gpio_port_mask_t get_mask = 0;
int i;
for(i=0; i < GPIO_PER_PORT; i++) {
if (pinmask & mask) {
get_mask |= (gpio_pin_get(i) << i);
}
mask = (mask << 1);
}
return get_mask;
}
// Get port gpio values
gpio_port_mask_t gpio_port_get(unsigned int port) {
return gpio_pin_get_mask(port, GPIO_ALL);
}
// Get all port gpio values
gpio_port_mask_t gpio_port_get_mask(unsigned int port) {
return gpio_pin_get_mask(port, GPIO_ALL);
}
char gpio_portname(int pin) {
return '0';
}
int gpio_pinno(int pin) {
return pin;
}
void gpio_disable_analog(int pin) {
}
<file_sep>/components/lua_rtos/Lua/modules/spi.h
#ifndef LSPI_H
#define LSPI_H
#include "drivers/spi/spi.h"
typedef struct {
unsigned char spi;
unsigned char cs;
unsigned int speed;
unsigned int mode;
} spi_userdata;
typedef u32 spi_data_type;
#endif
<file_sep>/components/lua_rtos/sys/driver.c
/*
* Lua RTOS, driver basics
*
* Copyright (C) 2015 - 2016
* IBEROXARXA SERVICIOS INTEGRALES, S.L. & CSS IBÉRICA, S.L.
*
* Author: <NAME> (<EMAIL> / <EMAIL>)
*
* All rights reserved.
*
* Permission to use, copy, modify, and distribute this software
* and its documentation for any purpose and without fee is hereby
* granted, provided that the above copyright notice appear in all
* copies and that both that the copyright notice and this
* permission notice and warranty disclaimer appear in supporting
* documentation, and that the name of the author not be used in
* advertising or publicity pertaining to distribution of the
* software without specific, written prior permission.
*
* The author disclaim all warranties with regard to this
* software, including all implied warranties of merchantability
* and fitness. In no event shall the author be liable for any
* special, indirect or consequential damages or any damages
* whatsoever resulting from loss of use, data or profits, whether
* in an action of contract, negligence or other tortious action,
* arising out of or in connection with the use or performance of
* this software.
*/
#include "luartos.h"
#include <stdlib.h>
#include <string.h>
#include <sys/driver.h>
extern const char *lora_lmic_errors[11];
const driver_t drivers[] = {
#if USE_LMIC
{"lora", DRIVER_EXCEPTION_BASE(LORA_DRIVER_ID), (void *)lora_lmic_errors},
#endif
{NULL, 0, NULL}
};
// Get driver info by it's name
const driver_t *driver_get(const char *name) {
const driver_t *cdriver;
cdriver = drivers;
while (cdriver->name) {
if (strcmp(name, cdriver->name) == 0) {
return cdriver;
}
}
return NULL;
}
// Get error message string fom a driver error
const char *driver_get_err_msg(driver_error_t *error) {
return (const char *)(*(unsigned int *)(error->driver->error + sizeof(void *) * (~error->driver->exception_base & error->exception)));
}
// Get driver name from a driver error
const char *driver_get_name(driver_error_t *error) {
return error->driver->name;
}
// Create a driver error of type lock from a lock structure
driver_error_t *driver_lock_error(resource_lock_t *lock) {
driver_error_t *error;
error = (driver_error_t *)malloc(sizeof(driver_error_t));
if (error) {
error->type = LOCK;
error->resource = lock->type;
error->resource_unit = lock->unit;
error->owner = lock->owner;
error->owner_unit = lock->owner_unit;
}
free(lock);
return error;
}
// Create a driver error of type setup
driver_error_t *driver_setup_error(const driver_t *driver, unsigned int exception, const char *msg) {
driver_error_t *error;
error = (driver_error_t *)malloc(sizeof(driver_error_t));
if (error) {
error->type = SETUP;
error->driver = driver;
error->exception = exception;
error->msg = msg;
}
return error;
}
// Create a driver error of type operation
driver_error_t *driver_operation_error(const driver_t *driver, unsigned int exception, const char *msg) {
driver_error_t *error;
error = (driver_error_t *)malloc(sizeof(driver_error_t));
if (error) {
error->type = OPERATION;
error->driver = driver;
error->exception = exception;
error->msg = msg;
}
return error;
}
<file_sep>/components/lua_rtos/drivers/cpu.h
/*
* Lua RTOS, cpu driver
*
* Copyright (C) 2015 - 2016
* IBEROXARXA SERVICIOS INTEGRALES, S.L. & CSS IBÉRICA, S.L.
*
* Author: <NAME> (<EMAIL> / <EMAIL>)
*
* All rights reserved.
*
* Permission to use, copy, modify, and distribute this software
* and its documentation for any purpose and without fee is hereby
* granted, provided that the above copyright notice appear in all
* copies and that both that the copyright notice and this
* permission notice and warranty disclaimer appear in supporting
* documentation, and that the name of the author not be used in
* advertising or publicity pertaining to distribution of the
* software without specific, written prior permission.
*
* The author disclaim all warranties with regard to this
* software, including all implied warranties of merchantability
* and fitness. In no event shall the author be liable for any
* special, indirect or consequential damages or any damages
* whatsoever resulting from loss of use, data or profits, whether
* in an action of contract, negligence or other tortious action,
* arising out of or in connection with the use or performance of
* this software.
*/
#ifndef CPU_H
#define CPU_H
#include <stdint.h>
/*
* ----------------------------------------------------------------
* GPIO
* ----------------------------------------------------------------
*/
// ESP32 pin constants
#define PIN_GPIO36 5
#define PIN_GPIO37 6
#define PIN_GPIO38 7
#define PIN_GPIO39 8
#define PIN_GPIO34 10
#define PIN_GPIO35 11
#define PIN_GPIO32 12
#define PIN_GPIO33 13
#define PIN_GPIO25 14
#define PIN_GPIO26 15
#define PIN_GPIO27 16
#define PIN_GPIO14 17
#define PIN_GPIO12 18
#define PIN_GPIO13 20
#define PIN_GPIO15 21
#define PIN_GPIO2 22
#define PIN_GPIO0 23
#define PIN_GPIO4 24
#define PIN_GPIO16 25
#define PIN_GPIO17 27
#define PIN_GPIO9 28
#define PIN_GPIO10 29
#define PIN_GPIO11 30
#define PIN_GPIO6 31
#define PIN_GPIO7 32
#define PIN_GPIO8 33
#define PIN_GPIO5 34
#define PIN_GPIO18 35
#define PIN_GPIO23 36
#define PIN_GPIO19 38
#define PIN_GPIO22 39
#define PIN_GPIO3 40
#define PIN_GPIO1 41
#define PIN_GPIO21 42
// ESP32 available GPIO pins
#define GPIO0 0
#define GPIO1 1
#define GPIO2 2
#define GPIO3 3
#define GPIO4 4
#define GPIO5 5
#define GPIO6 6
#define GPIO7 7
#define GPIO8 8
#define GPIO9 9
#define GPIO10 10
#define GPIO11 11
#define GPIO12 12
#define GPIO13 13
#define GPIO14 14
#define GPIO15 15
#define GPIO16 16
#define GPIO17 17
#define GPIO18 18
#define GPIO19 19
#define GPIO21 21
#define GPIO22 22
#define GPIO23 23
#define GPIO25 25
#define GPIO26 26
#define GPIO27 27
#define GPIO32 32
#define GPIO33 33
#define GPIO34 34
#define GPIO35 35
#define GPIO36 36
#define GPIO37 37
#define GPIO38 38
#define GPIO39 39
// ESP32 available pin names
#define GPIO0_NAME "GPIO0"
#define GPIO1_NAME "GPIO1"
#define GPIO2_NAME "GPIO2"
#define GPIO3_NAME "GPIO3"
#define GPIO4_NAME "GPIO4"
#define GPIO5_NAME "GPIO5"
#define GPIO6_NAME "GPIO6"
#define GPIO7_NAME "GPIO7"
#define GPIO8_NAME "GPIO8"
#define GPIO9_NAME "GPIO9"
#define GPIO10_NAME "GPIO10"
#define GPIO11_NAME "GPIO11"
#define GPIO12_NAME "GPIO12"
#define GPIO13_NAME "GPIO13"
#define GPIO14_NAME "GPIO14"
#define GPIO15_NAME "GPIO15"
#define GPIO16_NAME "GPIO16"
#define GPIO17_NAME "GPIO17"
#define GPIO18_NAME "GPIO18"
#define GPIO19_NAME "GPIO19"
#define GPIO21_NAME "GPIO21"
#define GPIO22_NAME "GPIO22"
#define GPIO23_NAME "GPIO23"
#define GPIO25_NAME "GPIO25"
#define GPIO26_NAME "GPIO26"
#define GPIO27_NAME "GPIO27"
#define GPIO32_NAME "GPIO32"
#define GPIO33_NAME "GPIO33"
#define GPIO34_NAME "GPIO34"
#define GPIO35_NAME "GPIO35"
#define GPIO36_NAME "GPIO36"
#define GPIO37_NAME "GPIO37"
#define GPIO38_NAME "GPIO38"
#define GPIO39_NAME "GPIO39"
// ESP32 has only 1 GPIO port
#define GPIO_PORTS 1
// ESP32 has 16 GPIO per port
#define GPIO_PER_PORT 39
// ESP32 needs 64 bits for port mask
typedef uint64_t gpio_port_mask_t;
#define GPIO_ALL 0b111111110000111011101111111111111111111UL
/*
* ----------------------------------------------------------------
* IC2
* ----------------------------------------------------------------
*/
// Number of I2C units (hardware / software)
#define NI2CHW 0
#define NI2CBB 1
// ESP32 available bit bang i2c ids
#define I2CBB1 1
// ESP32 available bit bang i2c names
#define I2CBB1_NAME "I2CBB1"
#endif
void _cpu_init();
int cpu_revission();
void cpu_model(char *buffer);
void cpu_reset();
void cpu_show_info();
unsigned int cpu_pins();
void cpu_assign_pin(unsigned int pin, unsigned int by);
void cpu_release_pin(unsigned int pin);
unsigned int cpu_pin_assigned(unsigned int pin);
unsigned int cpu_pin_number(unsigned int pin);
unsigned int cpu_port_number(unsigned int pin);
unsigned int cpu_port_io_pin_mask(unsigned int port);
unsigned int cpu_port_adc_pin_mask(unsigned int port);
void cpu_idle(int seconds);
const char *cpu_pin_name(unsigned int pin);
const char *cpu_port_name(int pin);
unsigned int cpu_has_gpio(unsigned int port, unsigned int pin);
unsigned int cpu_has_port(unsigned int port);
void cpu_sleep(int seconds);
int cpu_reset_reason();<file_sep>/components/lua_rtos/drivers/i2chw.h
#if LUA_USE_I2C
#ifdef PLATFORM_ESP8266
#endif
#ifdef PLATFORM_PIC32MZ
#include <sys/drivers/platform/pic32mz/i2chw.h>
#endif
#endif | a311b3953e22e02f3793050f37bfdabb8379fd73 | [
"C"
] | 8 | C | seco/Lua-RTOS-ESP32 | a59e4e0773fe3e1ab34eab3027e2ea55f2fea7ec | 31431083acaf09cb31c8b9a6808421f256d334ce |
refs/heads/master | <file_sep>package testlog;
import org.apache.log4j.Logger;
import org.junit.Test;
public class TestLog {
@Test
public void debug() {
Logger log = Logger.getLogger(TestLog.class);
// log.debug("这是一个调试信息");
// log.info("这是一个info信息");
log.warn("这是一个警告信息");
// log.error("这是一个错误信息");
// log.fatal("这是一个灾难信息");
}
}
| 75bef1acd8024d73cfc8c5a905d26e6fd2ec3a0c | [
"Java"
] | 1 | Java | wangdadou8129/ssm_shiro | 28c87b8a6860ea13f59831c5b5c032ef7b591926 | cab4e091a87a7673b8ebd7a59f465b87d2c40cb4 |
refs/heads/master | <file_sep>//Variable definitions
const myform = document.querySelector("#my-form");
const movie = document.querySelector("#movie");
const year = document.querySelector("#year");
const table = document.querySelector(".movie-list");
const info = document.querySelector('#info');
const movies = [];
//Define a class MovieHandler that will handle all about movies
//The class is a representaion for handling a quotes for a movie
class MovieHandler {
constructor(movies, ths) {
this.movies = movies; //Represents the result for movies
this.ths = ths; //Represents the th for the thead
}
//Create the thead with all its children
createTableHead() {
var thead = document.createElement("thead");
var tr = document.createElement("tr");
//Loop through the th list and add to thead and tr
for (let key of this.ths) {
var th = document.createElement("th");
th.classList.add(key);
th.innerHTML = key;
tr.appendChild(th);
thead.appendChild(tr);
}
table.appendChild(thead);
}
//Create a tbody with all its children
createTable() {
if (this.movies.length > 0) {
let tbody = document.createElement("tbody");
//Loop through the array of movies
for (let element of this.movies) {
let tr = table.insertRow();
//Loop through the elemets of the object
for (let key in element) {
if (key === "type")
continue;
let td = tr.insertCell();
td.innerHTML = element[key];
tr.appendChild(td);
}
tbody.appendChild(tr);
}
table.appendChild(tbody);
}
else{
info.style.display = "block";
document.querySelector('#info').innerHTML = "The was no match for this search condition";
}
}
//Delete all rows for thead and tbody
deleteRows() {
$("#myTable thead tr").remove();
$("#myTable tbody tr").remove();
}
//Add new movies to the class
addMovies(movies) {
this.movies = movies;
}
//Is called when you click the submit button
//Arguments:
//Both movie and year is the input value from the form field
getData = async (movie, year) => {
let res, response;
this.movies = [];
//Both movie and year is given
if (movie !== "" && year !== "") {
res = await fetch(`/movies/search/movie/${movie}/year/${year}`);
displayResult(await res.json());
}
//Only movie is given
else if (movie !== "" && year === "") {
res = await fetch(`/movies/search/movie/${movie}`);
displayResult(await res.json());
}
//Only year is given
else if (movie === "" && year !== "") {
res = await fetch(`/movies/search/year/${year}`);
displayResult(await res.json());
}
//Invalid search give error message
else {
alert("You must at least fill in one field");
}
};
} // end MovieHandler
//Instansiate a new class of MovieHandler
movieEngine = new MovieHandler(movies, ["quote", "movie", "year"]);
info.style.display = "none";
//Event handler that is called when clicking submit
myform.addEventListener("submit", (event) => {
event.preventDefault();
info.style.display = "none";
movieEngine.deleteRows();
info.innerHTML = "";
//Start the process to get from backend
movieEngine.getData(movie.value, year.value);
});
//Create the tbody with all its children
const createTable = (table, movies) => {
let tbody = document.createElement("tbody");
//Loop through the array of movied
for (let element of movies) {
let tr = table.insertRow(); //Create a tr
for (key in element) {
if (key === "type") continue;
let td = tr.insertCell(); //Create a td
td.innerHTML = element[key];
tr.appendChild(td);
}
tbody.appendChild(tr);
}
table.appendChild(tbody);
};
//Is called when we receive a result from backend
displayResult = (movies) => {
if (movies.length !== 0)
movieEngine.addMovies(movies);
movieEngine.createTableHead();
movieEngine.createTable();
};<file_sep>const express = require('express');
const app = express();
const PORT = process.env.PORT || 3000;
const movieRoutes = require('./routes/movies');
const morgan = require('morgan');
app.use(express.json());
morgan("dev");
app.use('/movies/search', movieRoutes);
app.use(express.static('public'));
app.listen(PORT,() => {
console.log(`Lets get some famous quotes from movie ${PORT}`);
});
| 85b79d99053c7676e9844f08acd3c9d464edbfc3 | [
"JavaScript"
] | 2 | JavaScript | tonytojo/front-end-utb-express-movie-quote | 70e1452ea10e861807908d6ec523a44f0cb5d4e3 | 523ff2902aef6b7649528c0896b5e83f828f310a |
refs/heads/master | <file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class SoundControl {
public static void updateSound(){
AudioListener.pause = PlayerPrefs.GetInt ("soundOn", 1) == 0;
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
using UnityEngine.SceneManagement;
public enum GameState {
NONE,
NORMAL,
STARTING,
SUPER_SPEED,
PAUSE,
DYING
}
public class PlayerScript : MonoBehaviour {
private int points = 0;
public GameObject scoreboard;
public GeneratorScript gen;
public float sceneTransTime = 3;
private bool isSceneTrans = false;
private float sceneTransTimePast = 0.0f;
public float superSpeed = 2f;
public float touchDurationForSuperSpeed = 1.5f;
private GameState gameState = GameState.NONE;
public static PlayerScript instance;
void Awake(){
instance = this;
}
void Start(){
SoundControl.updateSound ();
gameState = GameState.NORMAL;
}
void Update(){
if (isSceneTrans) {
sceneTransTimePast += Time.deltaTime;
if (sceneTransTimePast >= sceneTransTime) {
SceneManager.LoadScene("EndGameScene", LoadSceneMode.Single);
gameObject.SetActive (false);
}
}
if (isGameRunning ()) {
if (ClickManager.instance.getLongestPress () >= touchDurationForSuperSpeed) {
updateGameState (GameState.SUPER_SPEED);
} else {
updateGameState (GameState.NORMAL);
}
}
}
public void addPoints(int amount){
points += amount;
scoreboard.GetComponent<Animator> ().Play ("PointAnim");
scoreboard.GetComponent<Text>().text = points.ToString ();
}
public void lose(){
Debug.Log ("You lost");
gen.stopGenerating ();
PlayerPrefs.SetInt ("points", points);
isSceneTrans = true;
updateGameState(GameState.DYING);
}
public bool updateGameState(GameState state){
if (gameState == state)
return false;
switch (state) {
case GameState.DYING:
Time.timeScale = 1f;
break;
case GameState.PAUSE:
Time.timeScale = 0.00000001f;
break;
case GameState.NORMAL:
Time.timeScale = 1f;
break;
case GameState.SUPER_SPEED:
Time.timeScale = superSpeed;
break;
}
gameState = state;
return true;
}
public GameState getGameState(){
return gameState;
}
void OnDestroy(){
instance = null;
}
public bool isGameRunning(){
return gameState == GameState.NORMAL || gameState == GameState.SUPER_SPEED;
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class ObScript : MonoBehaviour {
public Vector3 movement;
public float groundHitDamage = 1;
public Destroyable destroyable;
protected float xStart;
// Use this for initialization
void Start () {
xStart = transform.position.x;
float z = transform.localScale.x + transform.localScale.y;
transform.position = new Vector3 (xStart, transform.position.y, z);
if (groundHitDamage > 1)
destroyable.enableRedParticles ();
init ();
}
protected virtual void init(){}
// Update is called once per frame
void Update () {
if (destroyable.isNowDead ())
return;
move ();
}
protected virtual void move(){
transform.Translate (movement * Time.deltaTime);
}
void OnTriggerEnter2D(Collider2D c){
if (c == null || !c.gameObject.activeSelf)
return;
if(!c.gameObject.activeSelf)
return;
if (c.gameObject.tag == "Ground") {
GameObject.Find ("Ground").GetComponent<GroundScript> ().hit (groundHitDamage, transform.GetChild(0).gameObject.GetComponent<SpriteRenderer> ().color);
destroyable.groundHit ();
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class SpawnDestroy : Destroyable {
public GameObject child;
public int childCount = 2;
public float range = 0.5f;
public float velocityRange = 2f;
override public void hit(int strength){
if(!destroyingAll && strength != GROUND_HIT)
for(int i = 0; i < childCount; i++) {
float velFactor = ((float)i / (float)(childCount - 1) - 0.5f) * 2f;
float x = range * 2f * (float) i / (float) (childCount - 1) - range + transform.position.x;
GameObject o = Instantiate (child, new Vector3 (x, transform.position.y - 0.3f, 1f), Quaternion.identity);
ChildOb cOb = o.GetComponent<ChildOb> ();
if (cOb != null) {
cOb.movement.x = velFactor * velocityRange;
cOb.movement.y *= Random.Range (0.8f, 1.3f);
}
}
destroy (strength >= 0);
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class EnableGameMode : StateMachineBehaviour {
override public void OnStateEnter(Animator animator, AnimatorStateInfo stateInfo, int layerIndex) {
PlayerScript.instance.updateGameState (GameState.STARTING);
}
override public void OnStateExit(Animator animator, AnimatorStateInfo stateInfo, int layerIndex) {
PlayerScript.instance.updateGameState (GameState.NORMAL);
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
public class EventScript : MonoBehaviour {
//private GameObject text;
private GameObject ps;
private Vector2 screenSize;
private Vector2 canvasSize;
void Start () {
//text = transform.GetChild (0).gameObject;
ps = transform.GetChild (1).gameObject;
ps.transform.parent = null;
screenSize = new Vector2 (Camera.main.orthographicSize * Screen.width / Screen.height, Camera.main.orthographicSize);
canvasSize = transform.root.gameObject.GetComponent<CanvasScaler> ().referenceResolution;
Debug.Log (GetComponent<RectTransform>().anchoredPosition.x / canvasSize.x);
setPsPos ();
}
private void setPsPos(){
ps.transform.position = new Vector3 (GetComponent<RectTransform>().anchoredPosition.x / canvasSize.x * screenSize.x, GetComponent<RectTransform>().anchoredPosition.y / canvasSize.y * screenSize.y, 0);
}
// Update is called once per frame
void Update () {
setPsPos ();
}
void OnDestroy(){
Destroy (ps);
}
}
<file_sep>using UnityEngine.UI;
using UnityEngine;
using System.Collections;
public class FPSShower : MonoBehaviour {
private Text text;
private int frameCounter = 0;
private float lastFrameTime = 0f;
// Use this for initialization
void Start () {
text = GetComponent<Text> ();
if (PlayerPrefs.GetInt ("debugMode", 0) != 1)
gameObject.SetActive (false);
else
StartCoroutine ("displayFPS");
}
IEnumerator displayFPS(){
yield return new WaitForSecondsRealtime (0.4f);
float deltaTime = Time.unscaledTime - lastFrameTime;
float fps = (float)frameCounter / deltaTime;
lastFrameTime = Time.unscaledTime;
frameCounter = 0;
text.text = fps.ToString ("0.00") + " FPS";
StartCoroutine ("displayFPS");
}
// Update is called once per frame
void Update () {
frameCounter++;
}
public void updateVisibility(){
if (PlayerPrefs.GetInt ("debugMode", 0) != 1) {
StopCoroutine ("displayFPS");
gameObject.SetActive (false);
} else {
if (!gameObject.activeSelf) {
gameObject.SetActive (true);
frameCounter = 0;
lastFrameTime = Time.time;
StartCoroutine ("displayFPS");
}
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
public class ChellengesMenuOrga : MonoBehaviour {
public GameObject chButtonPrefab;
public GameObject chButtonsPanel;
private int chCount = 20;
// Use this for initialization
void Start () {
for (int i = 1; i <= chCount; i++) {
GameObject o = Instantiate (chButtonPrefab, chButtonsPanel.transform);
o.GetComponent<Text> ().text = i.ToString ();
o.gameObject.GetComponent<ChButtonScript> ().setIndex (i);
}
}
public void clickOnChellenge(int i){
Debug.Log ("Start chellenge " + i);
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class MultiLiveDestroy : Destroyable {
public int health = 2;
override public void hit(int strength){
health -= strength;
if (health <= 0)
destroy (true);
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class ClickManager : MonoBehaviour {
public static ClickManager instance;
public void Awake(){
instance = this;
}
private float longestPress = 0.0f;
private float[] pressStarts = new float[40];
void Update(){
if(PlayerScript.instance == null || PlayerScript.instance.isGameRunning() || PlayerScript.instance.getGameState() == GameState.STARTING){
if (SystemInfo.deviceType == DeviceType.Desktop) {
if (Input.GetMouseButton (0)) {
longestPress += Time.deltaTime;
if(Input.GetMouseButtonDown(0))
castClick (Input.mousePosition);
} else
longestPress = 0.0f;
} else {
longestPress = 0.0f;
foreach (Touch t in Input.touches) {
if (t.phase == TouchPhase.Began) {
pressStarts [t.fingerId] = Time.time;
} else {
longestPress = Mathf.Max (longestPress, Time.time - pressStarts [t.fingerId]);
}
if (t.phase != TouchPhase.Began)
continue;
castClick (t.position);
}
}
}
}
private void castClick(Vector2 pos){
RaycastHit2D hit = Physics2D.Raycast (Camera.main.ScreenToWorldPoint(pos), Vector2.zero);
if (hit != null && hit.collider != null) {
GameObject o = hit.collider.gameObject;
if (o.tag == "Obstacle") {
o.GetComponent<Destroyable> ().click ();
}
}
}
public float getLongestPress(){
return longestPress;
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
using UnityEngine.SceneManagement;
public class PointOrga : MonoBehaviour {
public static PointOrga instance;
void Awake(){instance = this;}
public Text scoreOut;
public Text highscoreOut;
private bool newHighscore = false;
public float timeOffset = 1;
private float timeOffsetPast = 0;
public List<ParticleSystem> highscorePSs;
public float psPlayOffset = 0.2f;
private float psTimePast = 0.0f;
private int psPlayCount = 0;
// Use this for initialization
void Start () {
int score = PlayerPrefs.GetInt("points");
int highscore = PlayerPrefs.GetInt ("highscore", 0);
scoreOut.text = score.ToString();
if (score > highscore) {
highscoreOut.text = "New Highscore!";
PlayerPrefs.SetInt ("highscore", score);
newHighscore = true;
} else if (score == highscore && score > 0) {
highscoreOut.text = "Soo Close!";
} else {
highscoreOut.text = "Highscore\n" + highscore.ToString ();
}
PlayerPrefs.Save ();
}
void Update() {
timeOffsetPast += Time.deltaTime;
if(timeOffsetPast >= timeOffset){
if(Input.GetMouseButtonDown(0))
SceneManager.LoadScene("MenuScene", LoadSceneMode.Single);
if (newHighscore && psPlayCount < highscorePSs.Count) {
psTimePast += Time.deltaTime;
if (psTimePast >= psPlayCount * psPlayOffset) {
highscorePSs [psPlayCount].Play ();
psPlayCount++;
}
}
}
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public abstract class Item : MonoBehaviour {
private GameObject root;
private Animator animator;
private bool vanishing = false;
private float animTime = 0.0f;
public float fallSpeed = 2;
protected bool reactOnClick = true;
private static List<Item> items = new List<Item>();
// Use this for initialization
void Start () {
root = transform.parent.gameObject;
animator = GetComponent<Animator> ();
init ();
}
void OnEnable(){
items.Add(this);
}
void OnDisable(){
items.Remove(this);
}
protected abstract void init ();
// Update is called once per frame
void Update () {
if (vanishing) {
animTime += Time.deltaTime;
if (animTime > 0.6f) {
Destroy (root);
}
} else {
root.transform.Translate (0, -fallSpeed * Time.deltaTime, 0);
}
if (root.transform.position.y < -Camera.main.orthographicSize - 3)
Destroy (root);
}
void OnMouseDown(){
if (reactOnClick)
activate ();
}
protected abstract void activate ();
protected void doVanishing(){
if (!vanishing) {
animator.Play ("ItemVanish");
vanishing = true;
}
}
public static void doVanishingAll(){
for (int i = items.Count - 1; i >= 0; i--)
items [i].doVanishing ();
items.Clear ();
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public struct Level {
public float start;
public float[] delays;
public Level(float s, float[] d){
start = s;
delays = d;
}
}
public class GeneratorScript : MonoBehaviour {
private float genHeight;
private float genRange;
public float genSaveDistance = 0.5f;
public float itemReloadTime = 1f;
public float itemChance = 1f;
private float itemReloadTimePast = 0f;
private float startTime;
public List<GameObject> obstacles;
public List<GameObject> items;
private float[] times;
private int crntLevel = 0;
/*
0: basic
1: fat and bloody
2: speedy
3: sin mover
4: zigzag fat
5: zigzag speedy
6: more speed
7: blow up
8: more lifes
*/
private Level[] levels = new Level[]{
new Level( // 0 Intro #0
0.0f,
new float[]{1.1f}),
new Level( // 1 Intro #1
9.0f,
new float[]{1.53f, 2.1f}),
new Level( // 2 Intro #2
22.0f,
new float[]{3f, 1.8f, 2.03f}),
new Level( // 3 Showoff #2
33.0f,
new float[]{2f, 2.2f, 1.8f}),
new Level( // 4 Intro #3
40.0f,
new float[]{1.2f, 2.5f, 3.49f, 3.3f}),
new Level( // 5 Intro #4
55.0f,
new float[]{3.02f, 2.76f, -1f, 1.77f, 2.03f}),
new Level( // 6 Intro #5
64.2f,
new float[]{2.83f, 3.87f, 3.49f, 3.5f, 3.37f, 4.03f}),
new Level( // 7 Intro #7, #8
82.0f,
new float[]{2.83f, 3.91f, -1f, -1f, 7.3f, -1f, -1f, 5.32f, 7.66f}),
new Level( // 8 Showoff #7
105.0f,
new float[]{4.76f, -1f, -1f, 5.32f, 6.03f, 3.42f, -1f, 5.32f}),
new Level( // 9 Intro #6
115.0f,
new float[]{-1f, 5.01f, -1f, -1f, -1f, -1f, 4.37f, 7.23f, 4.02f}),
new Level( // 10 abfuck
135.0f,
new float[]{6.78f, 7.45f, 4.38f, 5.67f, 6.53f, 6.35f, 9.71f, 5.12f, 8.3f})
};
private bool generating = true;
// Use this for initialization
void Start () {
genHeight = Camera.main.orthographicSize;
genRange = genHeight * Screen.width / Screen.height;
times = new float[obstacles.Count];
}
void OnEnable(){
startTime = Time.time - (crntLevel == 0 ? 0f : levels[crntLevel-1].start);
}
// Update is called once per frame
void Update () {
if (generating) {
if (crntLevel < levels.Length - 1 && Time.time - startTime > levels [crntLevel + 1].start) {
Debug.Log ("Level Up " + (crntLevel+1));
times = new float[obstacles.Count];
crntLevel++;
for (int i = 0; i < levels[crntLevel].delays.Length; i++) {
float delay = levels [crntLevel].delays [i];
if (delay <= 0f)
continue;
times [i] = Random.Range (-delay*0.5f, delay*0.9f);
}
}
Level level = levels [crntLevel];
for (int i = 0; i < times.Length; i++) {
if (i >= level.delays.Length)
break; // Level doesn't support these objects
if (level.delays [i] <= 0)
continue;
float time = times [i];
time += Time.deltaTime;
if (time >= level.delays [i]) {
instantiateObstacle (obstacles [i], Random.Range (-1.0f, 1.0f));
time -= level.delays [i];
}
times [i] = time;
}
// Items
itemReloadTimePast += Time.deltaTime;
if (itemReloadTimePast >= itemReloadTime) {
itemReloadTimePast -= itemReloadTime;
if (Random.value <= itemChance) {
int itemIndex = Random.Range (0, items.Count);
instantiateObstacle (items [itemIndex], Random.Range (-1.0f, 1.0f));
}
}
}
}
void instantiateObstacle(GameObject prefab, float x){
SpriteRenderer sp = prefab.GetComponent<SpriteRenderer> ();
if (sp == null)
sp = prefab.GetComponentInChildren<SpriteRenderer> ();
Vector3 size = sp.sprite.bounds.size;
float height = size.y * prefab.transform.localScale.y;
float width = size.x * prefab.transform.localScale.x;
float range = genRange - width - genSaveDistance;
Instantiate(prefab, new Vector3(Mathf.Lerp(0, range*2, (x+1)/2f)-range, genHeight + height + genSaveDistance, 1f), Quaternion.identity);
}
public void stopGenerating(){
generating = false;
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class ChButtonScript : MonoBehaviour {
private int index = 1;
public void setIndex(int i){
index = i;
}
public void call(){
Debug.Log ("Start Chel " + index);
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class GroundResize : MonoBehaviour {
private const float standartWidth = 2.929688f;
// Use this for initialization
void Start () {
float width = Camera.main.orthographicSize * Screen.width / Screen.height;
gameObject.transform.localScale = new Vector3(1f / standartWidth * width, 1f, 1f);
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
public class GroundScript : MonoBehaviour {
public float startHealth = 10;
private float health;
public Color fullHealthColor;
public Color noHealthColor;
private SpriteRenderer sp;
private Animator animator;
private PlayerScript player;
public float colorTransTime = 5f;
public float colorTransAmount = 0.25f;
private float colorTransTimePast = 0.0f;
private bool isColorTrans = false;
private Color[] transColors = new Color[2];
private Color rawColor;
public ParticleSystem exterminatePS;
public BarScript healthbar;
// Use this for initialization
void Start () {
sp = GetComponent<SpriteRenderer> ();
animator = GetComponent<Animator> ();
player = GameObject.FindWithTag ("Orga").GetComponent<PlayerScript> ();
health = startHealth;
if (PlayerPrefs.HasKey ("groundColR")) {
Color savedColor = new Color (PlayerPrefs.GetFloat ("groundColR", 1f), PlayerPrefs.GetFloat ("groundColG", 1f), PlayerPrefs.GetFloat ("groundColB", 1f), 1.0f);
fullHealthColor = Color.Lerp (savedColor, fullHealthColor, 0.25f);
}
sp.color = fullHealthColor;
rawColor = new Color(0f, 0f, 0f);
}
void Update(){
if(isColorTrans){
colorTransTimePast += Time.deltaTime;
if (colorTransTimePast >= colorTransTime) {
colorTransTimePast = 0.0f;
sp.color = transColors [1];
isColorTrans = false;
} else {
sp.color = Color.Lerp (transColors [0], transColors [1], colorTransTimePast / colorTransTime);
}
}
}
public void hit(float strength, Color color){
animator.Play ("Wabble");
health -= strength;
healthbar.applyChange (health / startHealth);
if (health <= 0) {
if(rawColor == new Color(0f, 0f ,0f))
sp.color = color;
die ();
}
else
doColorTrans (color, 1f + 0.5f * (strength - 1f));
}
public void heal(float amount){
health += amount;
health = Mathf.Min (health, startHealth);
healthbar.applyChange (health / startHealth);
doColorTrans (fullHealthColor);
}
void doColorTrans(Color color, float weight = 1){
if (rawColor == new Color (0f, 0f, 0f))
rawColor = color;
else if (color != fullHealthColor) {
float h, s, v;
Color.RGBToHSV (Color.Lerp (rawColor, color, colorTransAmount * weight), out h, out s, out v);
rawColor = Color.HSVToRGB (h, 0.7f, 0.9f);
}
// float rh = health / startHealth;
colorTransTimePast = 0.0f;
transColors [0] = sp.color;
transColors [1] = rawColor;
// transColors [1] = Color.Lerp (noHealthColor, fullHealthColor, rh);
isColorTrans = true;
}
void die(){
ParticleSystem.MainModule newMain = exterminatePS.main;
newMain.startColor = new ParticleSystem.MinMaxGradient(sp.color);
PlayerPrefs.SetFloat ("groundColR", sp.color.r);
PlayerPrefs.SetFloat ("groundColG", sp.color.g);
PlayerPrefs.SetFloat ("groundColB", sp.color.b);
exterminatePS.Play ();
Destroyable.destroyAll ();
Item.doVanishingAll ();
player.lose ();
gameObject.SetActive (false);
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.SceneManagement;
using UnityEngine.UI;
public class SettungsOrga : MonoBehaviour {
public GameObject soundOnButton;
public GameObject soundOffButton;
public Text debugText;
public FPSShower fpsShower;
// Use this for initialization
void Start () {
bool soundOn = PlayerPrefs.GetInt ("soundOn", 1) == 1;
soundOffButton.GetComponent<CanvasRenderer> ().SetAlpha (soundOn ? 0.3f : 1.0f);
soundOnButton.GetComponent<CanvasRenderer> ().SetAlpha (soundOn ? 1.0f : 0.3f);
callDebug (false);
}
void OnEnable(){
Start ();
}
public void callBack(){
MangeManu.instance.startMainMenu ();
}
public void callSound(bool active){
if (active) {
soundOffButton.GetComponent<CanvasRenderer> ().SetAlpha (0.3f);
soundOnButton.GetComponent<CanvasRenderer> ().SetAlpha (1.0f);
PlayerPrefs.SetInt ("soundOn", 1);
} else {
soundOnButton.GetComponent<CanvasRenderer> ().SetAlpha (0.3f);
soundOffButton.GetComponent<CanvasRenderer> ().SetAlpha (1.0f);
PlayerPrefs.SetInt ("soundOn", 0);
}
SoundControl.updateSound ();
}
public void callReset(){
PlayerPrefs.DeleteAll ();
callSound (true);
callDebug (false);
}
public void callDebug(bool change){
if (PlayerPrefs.GetInt ("debugMode", 0) == 0 ^ !change) {
debugText.text = "Debug: On";
PlayerPrefs.SetInt ("debugMode", 1);
} else {
debugText.text = "Debug: Off";
PlayerPrefs.SetInt ("debugMode", 0);
}
if (change)
fpsShower.updateVisibility ();
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class ChildOb : ObScript {
public float horizontalGrip;
private float bounds;
override protected void init(){
bounds = Camera.main.orthographicSize * Screen.width / Screen.height;
bounds *= 0.8f;
}
override protected void move(){
movement.x *= 1 - (horizontalGrip * Time.deltaTime);
if (transform.position.x > bounds)
movement.x = -Mathf.Abs (movement.x);
else if (transform.position.x < -bounds)
movement.x = Mathf.Abs (movement.x);
base.move ();
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
using UnityEngine.SceneManagement;
public class MangeManu : MonoBehaviour {
public static MangeManu instance;
void Awake(){
instance = this;
}
public Text scoreboard;
public List<GameObject> buttons;
private List<Color> buttonTextsColors;
public GameObject mainMenuRoot;
public GameObject settingsRoot;
public GameObject achRoot;
private short callState = 0;
public float callWaitTime = 1.5f;
private float callTimePast = 0.0f;
private const short PLAY = 1;
private const short UPGRADES = 2;
// Use this for initialization
void Start () {
updateHighscoreScreen();
buttonTextsColors = new List<Color> ();
foreach(GameObject o in buttons){
buttonTextsColors.Add(o.transform.GetChild (0).gameObject.GetComponent<Text> ().color);
}
SoundControl.updateSound ();
startMainMenu ();
}
// Update is called once per frame
void Update () {
if (callState != 0) {
callTimePast += Time.deltaTime;
if (callTimePast >= callWaitTime) {
switch (callState) {
case PLAY:
SceneManager.LoadScene ("GameScene", LoadSceneMode.Single);
break;
case UPGRADES:
Debug.Log ("Upgrades");
break;
}
callState = 0;
callTimePast = 0.0f;
}
for(int i = 0; i < buttons.Count; i++){
Color c = buttonTextsColors[i];
if (callTimePast == 0)
c.a = 0.0f;
else
c.a = 1 - callTimePast / callWaitTime;
buttons[i].transform.GetChild (0).gameObject.GetComponent<Text> ().color = c;
}
}
}
private void updateHighscoreScreen(){
scoreboard.text = PlayerPrefs.GetInt ("highscore", 0).ToString();
}
public void callPlay(){
prepareCall ();
callState = PLAY;
}
public void callChellenges(){
mainMenuRoot.SetActive(false);
settingsRoot.SetActive (false);
achRoot.SetActive (true);
}
public void callAchievemnts(){
}
public void callSettings(){
mainMenuRoot.SetActive(false);
achRoot.SetActive (false);
settingsRoot.SetActive (true);
}
void prepareCall(){
foreach (GameObject o in buttons) {
o.GetComponent<Button> ().interactable = false;
}
Destroyable.destroyAll ();
gameObject.GetComponent<SimpleGeneratorScript> ().stopGenerating ();
}
public void startMainMenu(){
settingsRoot.SetActive (false);
achRoot.SetActive (false);
mainMenuRoot.SetActive (true);
updateHighscoreScreen();
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class SimpleDestroy : Destroyable {
override public void hit(int strength){
destroy (strength >= 0);
}
}
<file_sep>using UnityEngine;
using System.Collections;
public class ObZickZackMover : ObScript {
private bool moveRight;
private bool first = true;
public float changeChance = 0.01f;
public float bounds = 3;
protected override void init ()
{
StartCoroutine ("changeDir");
}
override protected void move(){
if (first) {
moveRight = Random.value >= 0.5;
first = false;
}
if (transform.position.x > bounds)
moveRight = false;
else if (transform.position.x < -bounds)
moveRight = true;
transform.Translate (new Vector3(moveRight ? movement.x : -movement.x, movement.y, movement.z) * Time.deltaTime);
}
IEnumerator changeDir(){
yield return new WaitForSeconds (0.1f);
if (Random.value <= changeChance)
moveRight = moveRight ? false : true;
StartCoroutine ("changeDir");
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class SimpleGeneratorScript : MonoBehaviour {
private float genHeight;
private float genRange;
public float genSaveDistance = 0.5f;
public float genTime;
private float genTimePast = 0.0f;
public List<GameObject> obstacles;
private bool generating = true;
// Use this for initialization
void Start () {
genHeight = Camera.main.orthographicSize;
genRange = genHeight * Screen.width / Screen.height;
}
// Update is called once per frame
void Update () {
if (generating) {
genTimePast += Time.deltaTime;
if (genTimePast >= genTime) {
instantiateObstacle (obstacles [Random.Range (0, obstacles.Count)], Random.Range (-1f, 1f));
genTimePast = 0;
}
}
}
void instantiateObstacle(GameObject prefab, float x){
SpriteRenderer sp = prefab.GetComponent<SpriteRenderer> ();
if (sp == null)
sp = prefab.GetComponentInChildren<SpriteRenderer> ();
Vector3 size = sp.sprite.bounds.size;
float height = size.y * prefab.transform.localScale.y;
float width = size.x * prefab.transform.localScale.x;
float range = genRange - width - genSaveDistance;
Instantiate(prefab, new Vector3(Mathf.Lerp(0, range*2, (x+1)/2f)-range, genHeight + height + genSaveDistance, prefab.transform.position.z), Quaternion.identity);
}
public void stopGenerating(){
generating = false;
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class RemoveBehavior : StateMachineBehaviour {
public bool removeParent = false;
override public void OnStateEnter(Animator animator, AnimatorStateInfo stateInfo, int layerIndex) {
if (removeParent)
Destroy (animator.gameObject.transform.parent.gameObject);
else
Destroy (animator.gameObject);
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
public class IngameUiOrga : MonoBehaviour {
public GameObject pauseScreen;
public Button pauseButton;
// Use this for initialization
void Start () {
}
void Update(){
if (!pauseButton.enabled && PlayerScript.instance.isGameRunning ())
pauseButton.enabled = true;
if (PlayerScript.instance.getGameState () == GameState.PAUSE && Input.GetMouseButtonDown (0))
cancelPause ();
}
public void callPause(){
if (PlayerScript.instance.isGameRunning ()) {
PlayerScript.instance.updateGameState (GameState.PAUSE);
pauseScreen.SetActive (true);
pauseButton.enabled = false;
}
}
private void cancelPause(){
PlayerScript.instance.updateGameState (GameState.NORMAL);
pauseScreen.SetActive (false);
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class ObSinMover : ObScript {
public float speed;
private float timeOffset;
override protected void init(){
timeOffset = Random.value;
}
override protected void move(){
Vector3 v = transform.position;
v.x = xStart + Mathf.Sin ((Time.time + timeOffset) * speed) * movement.x;
v.y += movement.y * Time.deltaTime;
transform.position = v;
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class AddHealthItem : Item {
public int healAmount = 1;
private GroundScript ground;
protected override void init() {
ground = GameObject.FindWithTag ("Ground").GetComponent<GroundScript>();
}
protected override void activate ()
{
reactOnClick = false;
ground.heal (healAmount);
doVanishing ();
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
abstract public class Destroyable : MonoBehaviour {
public GameObject psPrefab;
public Color particleColor = new Color(1, 1, 1, 1);
private static Color richTrailPsColor = new Color (1f, 0.9f, 0.5f);
private static Color deadlyTrailPsColor = new Color (1f, 0.65f, 0.6f);
public int worthyness = 1;
private PlayerScript player;
public abstract void hit (int strength);
private static List<Destroyable> destrs = new List<Destroyable> ();
public GameObject trailPS;
private GameObject realTrailPS;
private bool clicked = false;
private bool deadlyParticlesRequested = false;
protected static bool destroyingAll = false;
public static int GROUND_HIT = -3;
private Animator animator;
private bool isDead = false;
void Start(){
player = GameObject.FindWithTag ("Orga").GetComponent<PlayerScript> ();
animator = transform.GetChild (0).gameObject.GetComponent<Animator> ();
realTrailPS = Instantiate (trailPS);
if (worthyness > 1) {
ParticleSystem.MainModule main = realTrailPS.GetComponent<ParticleSystem>().main;
main.startColor = richTrailPsColor;
}
if (deadlyParticlesRequested)
enableRedParticles();
}
public void enableRedParticles(){
if (realTrailPS == null) {
deadlyParticlesRequested = true;
} else {
ParticleSystem.MainModule main = realTrailPS.GetComponent<ParticleSystem>().main;
main.startColor = deadlyTrailPsColor;
}
}
void OnEnable(){
destrs.Add (this);
}
void OnDisable(){
destrs.Remove (this);
}
void Update(){
if (isDead)
return;
setPsPos ();
clicked = false;
if (transform.position.y < -Camera.main.orthographicSize - 2.0f) {
realTrailPS.GetComponent<ParticleSystem> ().Stop ();
Destroy (gameObject);
}
}
protected void destroy(bool givePoints) {
ParticleSystem ps = Instantiate (psPrefab, transform.position, Quaternion.Euler (270, 0, 0)).GetComponent<ParticleSystem> ();
ParticleSystem.MainModule newMain = ps.main;
newMain.startColor = new ParticleSystem.MinMaxGradient(particleColor );
ps.Play ();
if(player != null && givePoints)
player.addPoints (worthyness);
if(realTrailPS != null)
realTrailPS.GetComponent<ParticleSystem> ().Stop ();
animator.Play ("ObDie");
GetComponent<CircleCollider2D> ().enabled = false;
isDead = true;
}
void setPsPos(){
realTrailPS.transform.position = new Vector3(transform.position.x, transform.position.y, transform.position.z + 1f);
}
public void groundHit(){
destroy (false);
}
public static void destroyAll(){
destroyingAll = true;
for (int i = destrs.Count - 1; i >= 0; i--)
if(destrs[i].enabled)
destrs [i].destroy (false);
destrs.Clear ();
destroyingAll = false;
}
public void click(){
if (!clicked) {
clicked = true;
animator.Play("ObHit");
hit (1);
}
}
public bool isNowDead(){
return isDead;
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
public class BarScript : MonoBehaviour {
private Image image;
public Color full;
public Color empty;
private float crntScale = 1.0f;
private bool shouldSetScale = false;
void Start(){
image = GetComponent<Image> ();
image.color = full;
}
void LateUpdate(){
if(shouldSetScale)
setSize (crntScale);
}
public void applyChange(float percentage){
shouldSetScale = true;
crntScale = percentage;
image.color = Color.Lerp (empty, full, percentage);
}
private void setSize(float f){
image.rectTransform.localScale = new Vector3 (f, 1f, 1f);
}
}
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.EventSystems;
public class AcceleratorOb : ObScript {
public float accFactor = 1.4f;
override protected void move(){
if (Input.GetMouseButtonDown (0))
movement.y *= accFactor;
base.move ();
}
}
<file_sep># Protect
Protect is a simple mobile game made with Unity.
*The name of this project is not very creative and will probably be changed in further development.*
## Game
You have to "Protect" the wabbly object on the bottom of the screen. To do so, tap or click on the circular obstacles falling from the top. Then they'll explode and the ground won't be hurt by them.
The main goal of the game is to bump up your highscore as much as you can.
## Work in progress
The work on this project is in progress. Here are some outlooks on coming features:
**Items** will give you some help like healing you.
With **Achievements** you will be able to use some specual features to get more points and beat your highscore.
Some of these Features could be:
* getting double points,
* decreasing the damage of obstacles
* or increasing the default health.
| 6ac417433963eef113a9a33c08350f17dfbb9e4c | [
"Markdown",
"C#"
] | 30 | C# | Mioriarty/Protect | 2c915e47eca01d2f576d75d25530dd66ed29ccae | 3e82dc3f03055bc09ddc6c4b5a9b30672cfdfedc |
refs/heads/master | <file_sep>'use strict';
const namespace = safari.extension.baseURI;
function message(event) {
if (event.name === namespace) {
document.documentElement.classList.toggle('monochrome', event.message === '1');
}
}
function keypress(event) {
if (event.metaKey && event.shiftKey && event.key === 'm') {
safari.self.tab.dispatchMessage(namespace, 'hotkey');
}
}
safari.self.addEventListener('message', message, true);
window.addEventListener('keypress', keypress, true);
<file_sep>#!/usr/bin/env bash
dist=dist/monochrome.safariextension
rm -rf $dist
mkdir -p $dist
cp assets/icon-16.png $dist/Icon-16.png
cp assets/icon-64.png $dist/Icon.png
cp *.css $dist/
cp *.js $dist/
cp *.html $dist/
cp Info.plist $dist/
cp Settings.plist $dist/
echo "Use Safari's Extension Builder to create the signed extension package"
<file_sep># monochrome
> Safari extension for monochrome web
## License
MIT
| 3c20d04e2ec7fc748218f99998b5b71346ac6418 | [
"JavaScript",
"Markdown",
"Shell"
] | 3 | JavaScript | andrepolischuk/monochrome | 3de8c29c115d386da37d64f1163f495ca057b541 | 24094b5d40eb470ac5fe593642d0fb298d51dce8 |
refs/heads/master | <repo_name>TreeinRandomForest/battleship<file_sep>/battleship.py
import numpy as np
#Can be clever and combine the horizontal/vertical parts but keeping for clarity
class Board:
def __init__(self, N=10, N_limit=10):
self.N = N
self.N_limit = N_limit
self.board = np.zeros(shape=(N,N))
def place_ships(self):
N = self.N
for ship_length in range(1, 6):
placed = False
N_iter = 0
while (not placed) and N_iter < self.N_limit:
N_iter += 1
#horizontal or vertical
hv = np.random.randint(2)
if hv==0:
#horizontal
min_col = 0
max_col = (N-ship_length)
row = np.random.randint(N)
col = np.random.randint(low=min_col, high=max_col+1)
if self.board[row, col:col+ship_length].sum()==0:
placed = True
self.board[row, col:col+ship_length] = ship_length
elif hv==1:
#vertical
min_row = 0
max_row = (N-ship_length)
col = np.random.randint(N)
row = np.random.randint(low=min_row, high=max_row+1)
if self.board[row:row+ship_length, col].sum()==0:
placed = True
self.board[row:row+ship_length, col] = ship_length
self.n_to_hit = (self.board > 0).sum()
def play(self):
self.state = np.zeros(shape=(self.N, self.N)) #what the opposite player sees
def hit(self, row, col, print_state=True):
if row < 0 or row >= self.N:
raise ValueError(f"Please ensure row is between 0 and {self.N-1} [inclusive]")
if col < 0 or col >= self.N:
raise ValueError(f"Please ensure col is between 0 and {self.N-1} [inclusive]")
if self.state[row, col] != 0:
raise ValueError(f"Hitting cell ({row}, {col}) again")
if self.board[row, col] > 0:
print(f"Hit at ({row}, {col})")
self.state[row, col] = 1 #hit
return 1
else:
print(f"Miss at ({row}, {col})")
self.state[row, col] = -1 #miss
return -1
if print_state:
print(self.state)
#self.state[row, col] = 0 means unknown
def autoplay(self):
'''
1. Start with a uniform prior on full grid (N^2 elements)
2. Use np.random.choice to sample (might have to flatten, sample and reshape)
3. If hit: increase probability mass for 2 vertical neighbors, 2 horizontal neighbors
If miss: decrease probability mass for 2 vertical neighbors, 2 horizontal neighbors (not sure if this is optimal)
4. Go to step 2
Questions:
1. how much to increase/decrease weights by?
2. is there a principled way to do so? some likelihood model
Measure:
1. distribution of number of tries to discover all ships
Victory involves discovering before opponent discovers our ships
Modifications to rules below: updates to prob control moves
1. Use epsilon-greedy strategy: argmax probs with 1-epsilon and random from probs with epsilon
2.
'''
prob = np.ones_like(self.state) #start with uniform probability
prob_norm = prob / prob.sum()
#get mappings
flattened_domain = np.arange(self.N*self.N)
idx_to_loc = {}
loc_to_idx = {}
idx_mat = np.arange(self.N*self.N).reshape(self.N, -1)
for row in range(self.N):
for col in range(self.N):
idx = idx_mat[row, col]
idx_to_loc[idx] = (row, col)
loc_to_idx[(row, col)] = idx
n_current_hits = 0
n_misses = 0
while n_current_hits != self.n_to_hit:
#keep playing till hit all ships
#in real games, game might terminate if opponent hits all your ships
row, col = idx_to_loc[np.random.choice(flattened_domain, p=prob_norm.flatten())]
hit_or_miss = self.hit(row, col)
prob[row, col] = 0 #don't hit this spot again
if hit_or_miss == 1: #increase weights for neighbors if hit
n_current_hits += 1
if row+1 < self.N:
if self.state[row+1, col]==0: #if not probed before
prob[row+1, col] += 1
if row-1 >= 0:
if self.state[row-1, col]==0:
prob[row-1, col] += 1
if col+1 < self.N:
if self.state[row, col+1]==0:
prob[row, col+1] += 1
if col-1 >= 0:
if self.state[row, col-1]==0:
prob[row, col-1] += 1
print(self.board)
print("-----------")
print(self.state)
print("-----------")
print(prob)
print("-----------")
elif hit_or_miss==-1:
n_misses += 1
prob_norm = prob / prob.sum() #re-normalize
print(f'Hits: {n_current_hits} Misses: {n_misses}')
| ebfd842b167d97a0b4ea1f61e66b9e3c005ac064 | [
"Python"
] | 1 | Python | TreeinRandomForest/battleship | fc87c2e41071118aa3a9ce0a9cc9ff4f9f07cc0b | 75be76b07b4a13f8f5acf875f846fd5c77951b10 |
refs/heads/master | <repo_name>lmm22/ProgrammingAssignment2<file_sep>/cachematrix.R
## The makeCacheMatrix and cacheSolve functions are used together to store a matrix
## inputted by the user, and then to solve for the inverse of the matrix and store the
## computed inverse in memory.
## The makeCacheMatrix function takes a supplied matrix as input and creates a list of
## 3 functions: get, setinv, and getinv, which will be used, respectively, to print
## the matrix from memory, store the inverse, and print the inverse from memory.
makeCacheMatrix <- function(x = matrix()) {
i <- NULL
set <- function(y) {
x <<- y
i <<- NULL
}
get <- function() x
setinv <- function(inv) i <<- inv
getinv <- function() i
list(set = set, get = get,
setinv = setinv,
getinv = getinv)
}
## The cacheSolve function solves for the inverse of the matrix supplied to the makeCacheMatrix
## function. First, it checks the 'getinv' variable defined above to determine if the computed
## inverse is already stored in memory. If so, the inverse is printed from the cache. If the
## inverse has not been computed, cacheSolve will calculate the inverse of the matrix and supply
## the result to 'setinv', which stores the inverse in cache memory.
cacheSolve <- function(x, ...) {
## Return a matrix that is the inverse of 'x'
i <- x$getinv()
if(!is.null(i)) {
message("getting cached data")
return(i)
}
data <- x$get()
i <- solve(data, ...)
x$setinv(i)
i
}
| 55fcb86b7e6a99757442245abfae2be35e06e53e | [
"R"
] | 1 | R | lmm22/ProgrammingAssignment2 | 1ddcd4b501d3fc5a166f40dee5822ba76147bff6 | a2cccf89e5e6fabb8319567848b5652bf5dc16bb |
refs/heads/master | <file_sep>export const API_KEY = "<KEY>";
export const baseURL = "https://api.nasa.gov/";
<file_sep># NASA-svelte
NASA data from NASA API built with Svelte
| 8ef6f9e6859e1b83b1d27ed00e0895ebaebe15b3 | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | littlemousey/NASA-svelte | 459c349da1283a4131cb7bf1f6015949aea1d5dd | b9b3e8771c592fb9a016ecedb73e3266a33d8219 |
refs/heads/master | <file_sep>package com.sinovoice.reader.dao;
import java.util.Map;
import org.greenrobot.greendao.AbstractDao;
import org.greenrobot.greendao.AbstractDaoSession;
import org.greenrobot.greendao.database.Database;
import org.greenrobot.greendao.identityscope.IdentityScopeType;
import org.greenrobot.greendao.internal.DaoConfig;
import com.sinovoice.reader.bean.BookInfoBean;
import com.sinovoice.reader.bean.BookmarkBean;
import com.sinovoice.reader.bean.BookShelfBean;
import com.sinovoice.reader.bean.BookSourceBean;
import com.sinovoice.reader.bean.ChapterListBean;
import com.sinovoice.reader.bean.SearchBookBean;
import com.sinovoice.reader.dao.BookInfoBeanDao;
import com.sinovoice.reader.dao.BookmarkBeanDao;
import com.sinovoice.reader.dao.BookShelfBeanDao;
import com.sinovoice.reader.dao.BookSourceBeanDao;
import com.sinovoice.reader.dao.ChapterListBeanDao;
import com.sinovoice.reader.dao.SearchBookBeanDao;
// THIS CODE IS GENERATED BY greenDAO, DO NOT EDIT.
/**
* {@inheritDoc}
*
* @see org.greenrobot.greendao.AbstractDaoSession
*/
public class DaoSession extends AbstractDaoSession {
private final DaoConfig bookInfoBeanDaoConfig;
private final DaoConfig bookmarkBeanDaoConfig;
private final DaoConfig bookShelfBeanDaoConfig;
private final DaoConfig bookSourceBeanDaoConfig;
private final DaoConfig chapterListBeanDaoConfig;
private final DaoConfig searchBookBeanDaoConfig;
private final BookInfoBeanDao bookInfoBeanDao;
private final BookmarkBeanDao bookmarkBeanDao;
private final BookShelfBeanDao bookShelfBeanDao;
private final BookSourceBeanDao bookSourceBeanDao;
private final ChapterListBeanDao chapterListBeanDao;
private final SearchBookBeanDao searchBookBeanDao;
public DaoSession(Database db, IdentityScopeType type, Map<Class<? extends AbstractDao<?, ?>>, DaoConfig>
daoConfigMap) {
super(db);
bookInfoBeanDaoConfig = daoConfigMap.get(BookInfoBeanDao.class).clone();
bookInfoBeanDaoConfig.initIdentityScope(type);
bookmarkBeanDaoConfig = daoConfigMap.get(BookmarkBeanDao.class).clone();
bookmarkBeanDaoConfig.initIdentityScope(type);
bookShelfBeanDaoConfig = daoConfigMap.get(BookShelfBeanDao.class).clone();
bookShelfBeanDaoConfig.initIdentityScope(type);
bookSourceBeanDaoConfig = daoConfigMap.get(BookSourceBeanDao.class).clone();
bookSourceBeanDaoConfig.initIdentityScope(type);
chapterListBeanDaoConfig = daoConfigMap.get(ChapterListBeanDao.class).clone();
chapterListBeanDaoConfig.initIdentityScope(type);
searchBookBeanDaoConfig = daoConfigMap.get(SearchBookBeanDao.class).clone();
searchBookBeanDaoConfig.initIdentityScope(type);
bookInfoBeanDao = new BookInfoBeanDao(bookInfoBeanDaoConfig, this);
bookmarkBeanDao = new BookmarkBeanDao(bookmarkBeanDaoConfig, this);
bookShelfBeanDao = new BookShelfBeanDao(bookShelfBeanDaoConfig, this);
bookSourceBeanDao = new BookSourceBeanDao(bookSourceBeanDaoConfig, this);
chapterListBeanDao = new ChapterListBeanDao(chapterListBeanDaoConfig, this);
searchBookBeanDao = new SearchBookBeanDao(searchBookBeanDaoConfig, this);
registerDao(BookInfoBean.class, bookInfoBeanDao);
registerDao(BookmarkBean.class, bookmarkBeanDao);
registerDao(BookShelfBean.class, bookShelfBeanDao);
registerDao(BookSourceBean.class, bookSourceBeanDao);
registerDao(ChapterListBean.class, chapterListBeanDao);
registerDao(SearchBookBean.class, searchBookBeanDao);
}
public void clear() {
bookInfoBeanDaoConfig.clearIdentityScope();
bookmarkBeanDaoConfig.clearIdentityScope();
bookShelfBeanDaoConfig.clearIdentityScope();
bookSourceBeanDaoConfig.clearIdentityScope();
chapterListBeanDaoConfig.clearIdentityScope();
searchBookBeanDaoConfig.clearIdentityScope();
}
public BookInfoBeanDao getBookInfoBeanDao() {
return bookInfoBeanDao;
}
public BookmarkBeanDao getBookmarkBeanDao() {
return bookmarkBeanDao;
}
public BookShelfBeanDao getBookShelfBeanDao() {
return bookShelfBeanDao;
}
public BookSourceBeanDao getBookSourceBeanDao() {
return bookSourceBeanDao;
}
public ChapterListBeanDao getChapterListBeanDao() {
return chapterListBeanDao;
}
public SearchBookBeanDao getSearchBookBeanDao() {
return searchBookBeanDao;
}
}
<file_sep>package com.sinovoice.reader.widget;
import android.content.res.ColorStateList;
import android.graphics.PorterDuff;
import android.graphics.drawable.Drawable;
import android.support.annotation.ColorInt;
import android.support.annotation.NonNull;
import android.support.design.internal.NavigationMenuPresenter;
import android.support.design.internal.NavigationMenuView;
import android.support.design.widget.NavigationView;
import android.support.v4.graphics.drawable.DrawableCompat;
import android.support.v7.widget.AppCompatImageView;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.SearchView;
import android.support.v7.widget.Toolbar;
import android.text.Spannable;
import android.text.SpannableStringBuilder;
import android.text.style.ImageSpan;
import android.view.MenuItem;
import android.view.View;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.sinovoice.reader.R;
import com.sinovoice.reader.utils.ScreenUtils;
import java.lang.reflect.Field;
public class AppCompat {
public static void useCustomIconForSearchView(SearchView searchView, String hint, boolean showSearchIcon, boolean showBg) {
AppCompatImageView close = searchView.findViewById(R.id.search_close_btn);
close.setImageResource(R.drawable.ic_close_black_24dp);
setTint(close, searchView.getResources().getColor(R.color.menu_color_default));
close.setPadding(0, ScreenUtils.dpToPx(2), 0, 0);
AppCompatImageView search = searchView.findViewById(android.support.v7.appcompat.R.id.search_button);
search.setImageResource(R.drawable.ic_search_black_24dp);
setTint(search, searchView.getResources().getColor(R.color.menu_color_default));
SearchView.SearchAutoComplete searchText = searchView.findViewById(R.id.search_src_text);
LinearLayout plate = searchView.findViewById(R.id.search_plate);
if (showBg) {
Drawable bag = searchView.getResources().getDrawable(R.drawable.bg_textfield_search);
setTintList(bag, createSearchPlateBagState(searchView.getResources().getColor(R.color.colorAccent),
searchText.getCurrentHintTextColor()));
android.support.v4.view.ViewCompat.setBackground(plate, bag);
} else {
android.support.v4.view.ViewCompat.setBackground(plate, null);
}
setQueryHintForSearchText(searchText, hint, showSearchIcon);
}
public static void useCustomIconForSearchView(SearchView searchView, String hint) {
useCustomIconForSearchView(searchView, hint, true, true);
}
private static ColorStateList createSearchPlateBagState(int activeColor, int normalColor) {
int[] colors = new int[]{activeColor, activeColor, activeColor, normalColor, normalColor};
int[][] states = new int[5][];
states[0] = new int[]{android.R.attr.state_enabled, android.R.attr.state_focused};
states[1] = new int[]{android.R.attr.state_enabled, android.R.attr.state_activated};
states[2] = new int[]{android.R.attr.state_focused};
states[3] = new int[]{android.R.attr.state_window_focused};
states[4] = new int[]{};
return new ColorStateList(states, colors);
}
public static void setQueryHintForSearchText(SearchView.SearchAutoComplete textView, String hintText) {
setQueryHintForSearchText(textView, hintText, true);
}
public static void setQueryHintForSearchText(SearchView.SearchAutoComplete textView, String hintText, boolean showIcon) {
textView.setTextColor(textView.getResources().getColor(R.color.tv_text_default));
if (showIcon) {
final int textSize = (int) (textView.getTextSize() * 1.25);
Drawable mSearchHintIcon = textView.getResources().getDrawable(R.drawable.ic_search_black_24dp);
mSearchHintIcon.setBounds(0, 0, textSize, textSize);
setTint(mSearchHintIcon, textView.getCurrentTextColor());
final SpannableStringBuilder ssb = new SpannableStringBuilder(" ");
ssb.setSpan(new ImageSpan(mSearchHintIcon), 1, 2, Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
ssb.append(hintText);
textView.setHint(ssb);
} else {
textView.setHint(hintText);
}
}
public static void setNavigationViewLineStyle(NavigationView navigationView, @ColorInt final int color, final int height) {
try {
Field fieldByPressenter = navigationView.getClass().getDeclaredField("presenter");
fieldByPressenter.setAccessible(true);
NavigationMenuPresenter menuPresenter = (NavigationMenuPresenter) fieldByPressenter.get(navigationView);
Field fieldByMenuView = menuPresenter.getClass().getDeclaredField("menuView");
fieldByMenuView.setAccessible(true);
final NavigationMenuView mMenuView = (NavigationMenuView) fieldByMenuView.get(menuPresenter);
mMenuView.addOnChildAttachStateChangeListener(new RecyclerView.OnChildAttachStateChangeListener() {
@Override
public void onChildViewAttachedToWindow(View view) {
RecyclerView.ViewHolder viewHolder = mMenuView.getChildViewHolder(view);
if (viewHolder != null && "SeparatorViewHolder".equals(viewHolder.getClass().getSimpleName())) {
if (viewHolder.itemView instanceof FrameLayout) {
FrameLayout frameLayout = (FrameLayout) viewHolder.itemView;
View line = frameLayout.getChildAt(0);
line.setBackgroundColor(color);
line.getLayoutParams().height = height;
line.setLayoutParams(line.getLayoutParams());
}
}
}
@Override
public void onChildViewDetachedFromWindow(View view) {
}
});
} catch (Throwable e) {
e.printStackTrace();
}
}
public static void setTintList(Drawable drawable, ColorStateList tint, @NonNull PorterDuff.Mode tintMode) {
if (drawable == null) return;
final Drawable wrappedDrawable = DrawableCompat.wrap(drawable.mutate());
DrawableCompat.setTintList(wrappedDrawable, tint);
DrawableCompat.setTintMode(wrappedDrawable, tintMode);
}
public static void setTintList(Drawable drawable, ColorStateList tint) {
setTintList(drawable, tint, PorterDuff.Mode.SRC_ATOP);
}
public static void setTintList(View view, ColorStateList tint) {
if (view instanceof ImageView) {
Drawable drawable = ((ImageView) view).getDrawable();
setTintList(drawable, tint);
} else if (view instanceof TextView) {
Drawable[] drawables = ((TextView) view).getCompoundDrawables();
for (Drawable drawable : drawables) {
setTintList(drawable, tint);
}
}
}
public static void setTint(Drawable drawable, @ColorInt int tint, @NonNull PorterDuff.Mode tintMode) {
if (drawable == null) return;
final Drawable wrappedDrawable = DrawableCompat.wrap(drawable.mutate());
DrawableCompat.setTint(wrappedDrawable, tint);
DrawableCompat.setTintMode(wrappedDrawable, tintMode);
}
public static void setTint(Drawable drawable, @ColorInt int tint) {
setTint(drawable, tint, PorterDuff.Mode.SRC_ATOP);
}
public static void setTint(View view, int color) {
if (view instanceof ImageView) {
Drawable drawable = ((ImageView) view).getDrawable();
setTint(drawable, color);
} else if (view instanceof TextView) {
Drawable[] drawables = ((TextView) view).getCompoundDrawables();
for (Drawable drawable : drawables) {
setTint(drawable, color);
}
}
}
public static void setTint(MenuItem item, int color) {
if (item != null && item.getIcon() != null) {
setTint(item.getIcon(), color);
}
}
public static void setToolbarNavIconTint(Toolbar toolbar, int color) {
if (toolbar != null && toolbar.getNavigationIcon() != null) {
setTint(toolbar.getNavigationIcon(), color);
}
}
}
<file_sep>apply plugin: 'com.android.application'
apply plugin: 'org.greenrobot.greendao'
android {
compileSdkVersion 28
buildToolsVersion '28.0.3'
defaultConfig {
applicationId "com.sinovoice.reader"
minSdkVersion 19
targetSdkVersion 28
versionCode 1
versionName "1.0"
}
lintOptions {
abortOnError false
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
compileOptions {
targetCompatibility 1.8
sourceCompatibility 1.8
}
}
dependencies {
api project(':basemvplib')
implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation 'com.android.support:multidex:1.0.3'
implementation 'com.android.support.constraint:constraint-layout:1.1.3'
implementation "com.android.support:design:$support_library_version"
implementation "com.android.support:cardview-v7:$support_library_version"
implementation "com.android.support:support-compat:$support_library_version"
implementation "com.android.support:support-media-compat:$support_library_version"
implementation "com.android.support:support-v4:$support_library_version"
implementation 'com.google.code.gson:gson:2.8.2'
implementation 'com.google.android:flexbox:1.0.0'
implementation (group: 'com.google.guava', name: 'guava', version: '19.0')
//android
// implementation 'androidx.multidex:multidex:2.0.1'
// implementation 'androidx.constraintlayout:constraintlayout:2.0.0-alpha3'
// implementation 'androidx.cardview:cardview:1.0.0'
// implementation 'androidx.media:media:1.1.0-alpha01'
// implementation 'androidx.legacy:legacy-support-v4:1.0.0'
implementation 'com.google.code.gson:gson:2.8.2'
implementation 'com.google.android:flexbox:1.0.0'
//GreenDao
implementation 'org.greenrobot:greendao:3.2.2'
implementation 'com.github.yuweiguocn:GreenDaoUpgradeHelper:v2.1.0'
//codecMD5
implementation 'commons-codec:commons-codec:1.11'
//ProgressBar
implementation 'com.zhangmonke:MProgressBar:1.0.1'
//Glide
implementation 'com.github.bumptech.glide:glide:4.8.0'
annotationProcessor 'com.github.bumptech.glide:compiler:4.8.0'
//AutoFitTextView
implementation 'me.grantland:autofittextview:0.2.1'
//CircleImageView
implementation 'de.hdodenhof:circleimageview:2.2.0'
//bind view
implementation 'com.jakewharton:butterknife:8.8.1'
annotationProcessor 'com.jakewharton:butterknife-compiler:8.8.1'
//动画
implementation 'com.victor:lib:1.0.4'
implementation 'tyrantgit:explosionfield:1.0.1'
//简易权限获取
implementation 'pub.devrel:easypermissions:2.0.0'
//颜色选择
implementation 'com.github.QuadFlask:colorpicker:0.0.13'
//文件目录选择
implementation('com.github.gedoor.AndroidPicker:FilePicker:1.6.3') {
exclude group: 'com.android.support'
}
//简繁转换
implementation 'com.luhuiguo:chinese-utils:1.0'
//字符串比较
implementation 'net.ricecode:string-similarity:1.0.0'
//MarkDown
implementation 'ru.noties:markwon:2.0.0'
//epub
implementation('nl.siegmann.epublib:epublib-core:3.1') {
exclude group: 'xmlpull'
}
//
// //RxAndroid
// implementation 'io.reactivex.rxjava2:rxjava:2.2.4'
// implementation 'io.reactivex.rxjava2:rxandroid:2.1.0'
//
// //Retrofit
// implementation 'com.squareup.retrofit2:retrofit:2.5.0'
// implementation 'com.squareup.retrofit2:adapter-rxjava2:2.5.0'
// implementation 'com.squareup.retrofit2:converter-scalars:2.5.0'
greendao {
schemaVersion 51
daoPackage 'com.sinovoice.reader.dao'
targetGenDir 'src/main/java'
}
}
<file_sep>include ':readpage',':basemvplib'
| 369aede4b99969888abc5dc01d3395f93e87ff76 | [
"Java",
"Gradle"
] | 4 | Java | linzisme/reded | 7c3579601879d0248609365e3f8bb85f542c0c09 | 7dae1584792d6ace5712c3e9b12c83713ca52a8c |
refs/heads/master | <repo_name>JackLu1/TeenTitanics<file_sep>/TeenTitanics.wsgi
#!/usr/bin/python3
import sys
sys.path.insert(0,"/var/www/TeenTitanics/")c
sys.path.insert(0,"/var/www/TeenTitanics/TeenTitanics/")
from TeenTitanics import app as application
| 8dafb5943a71e0e98fb44b0964d0ad50a0a88bf5 | [
"Python"
] | 1 | Python | JackLu1/TeenTitanics | 52033474a5644e24a5f53901083314438d748ef3 | 2c3e09f120ab872833fde688653af8e55f153984 |
refs/heads/master | <repo_name>Rebecca2527/rebecca-sc<file_sep>/rebecca-hystrix-demo/src/main/java/com/hystrix/demo/controller/TestController.java
package com.hystrix.demo.controller;
import com.hystrix.demo.service.ConsumerService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;
/**
* 2 * @Author: Rebecca
* 3 * @Date: 2020/8/26 15:19
* 4
*/
@RestController
public class TestController {
@Autowired
ConsumerService consumerService;
@GetMapping("/consumer")
public String test() throws InterruptedException {
//Thread.sleep(5000L);
//String services = "Services: " + discoveryClient.getServices();
return consumerService.consumer();
}
}
<file_sep>/rebecca-nacos-swagger/src/main/java/com/swagger/demo/SwaggerApplication.java
package com.swagger.demo;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import springfox.documentation.swagger2.annotations.EnableSwagger2;
/**
* 2 * @Author: Rebecca
* 3 * @Date: 2020/8/21 10:30
* 4 swagger
* 5 http://blog.didispace.com/springbootswagger2/
*/
@SpringBootApplication
public class SwaggerApplication {
public static void main(String[] args) {
SpringApplication.run(SwaggerApplication.class, args);
}
}
<file_sep>/rebecca-hystrix-simple/src/main/java/rebecca/hystrix/simple/client/TestClient.java
package rebecca.hystrix.simple.client;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.GetMapping;
/**
* 2 * @Author: Rebecca
* 3 * @Date: 2020/9/1 17:58
* 4
*/
@FeignClient("rebecca-nacos-provide")
public interface TestClient {
@GetMapping("helloNacos")
public String helloNacos();
}
<file_sep>/rebecca-zuul-api/src/main/java/com/zuul/demo/config/DocumentationConfig.java
package com.zuul.demo.config;
import org.springframework.context.annotation.Primary;
import org.springframework.stereotype.Component;
import springfox.documentation.swagger.web.SwaggerResource;
import springfox.documentation.swagger.web.SwaggerResourcesProvider;
import java.util.ArrayList;
import java.util.List;
/**
* 2 * @Author: Rebecca
* 3 * @Date: 2020/8/21 11:01
* 4
*/
@Component
@Primary
public class DocumentationConfig implements SwaggerResourcesProvider {
public List<SwaggerResource> get() {
List resources = new ArrayList();
// application name/v2/api-docs
resources.add(swaggerResource("rebecca-swagger-a", "/rebecca-swagger-a/v2/api-docs", "2.0"));
resources.add(swaggerResource("rebecca-swagger-b", "/rebecca-swagger-b/v2/api-docs", "2.0"));
return resources;
}
private SwaggerResource swaggerResource(String name, String location, String version) {
SwaggerResource swaggerResource = new SwaggerResource();
swaggerResource.setName(name);
swaggerResource.setLocation(location);
swaggerResource.setSwaggerVersion(version);
return swaggerResource;
}
}
<file_sep>/pom.xml
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.example</groupId>
<artifactId>rebecca-sc</artifactId>
<packaging>pom</packaging>
<version>1.0-SNAPSHOT</version>
<modules>
<module>rebecca-nacos-provide</module>
<module>rebecca-nacos-consumer</module>
<module>rebecca-nacos-feign</module>
<module>rebecca-nacos-config</module>
<module>rebecca-nacos-config-share</module>
<module>rebecca-sentinel-limit</module>
<module>revecca-sentinel-save</module>
<module>rebecca-sentinel-annotation</module>
<module>rebecca-zuul-api</module>
<module>rebecca-nacos-swagger</module>
<module>rebecca-swagger-a</module>
<module>rebecca-swagger-b</module>
<module>rebecca-security-demo</module>
<module>rebecca-hystrix-demo</module>
<module>rebecca-spring-admin</module>
<module>rebecca-boot-admin</module>
<module>untitled</module>
<module>rebecca-admin-client</module>
<module>rebecca-nacos-admin</module>
<module>rebecca-hystrix-simple</module>
</modules>
<properties>
<java.version>1.8</java.version>
<spring-boot.version>2.0.4.RELEASE</spring-boot.version>
<spring-cloud.version>Finchley.RELEASE</spring-cloud.version>
<nacos.version>0.2.2.RELEASE</nacos.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
<version>2.0.2.RELEASE</version>
</dependency>
</dependencies>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-dependencies</artifactId>
<version>${spring-cloud.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-dependencies</artifactId>
<version>${spring-boot.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-alibaba-dependencies</artifactId>
<version>${nacos.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>
<repositories>
<repository>
<id>nexus-aliyun</id>
<name>Nexus aliyun</name>
<layout>default</layout>
<url>http://maven.aliyun.com/nexus/content/groups/public</url>
<snapshots>
<enabled>false</enabled>
</snapshots>
<releases>
<enabled>true</enabled>
</releases>
</repository>
</repositories>
</project><file_sep>/rebecca-hystrix-demo/src/main/java/com/hystrix/demo/service/ConsumerService.java
package com.hystrix.demo.service;
import com.hystrix.demo.client.TestClient;
import com.netflix.hystrix.contrib.javanica.annotation.HystrixCommand;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.web.client.RestTemplate;
/**
* 2 * @Author: Rebecca
* 3 * @Date: 2020/8/26 15:12
* 4
*/
@Service
public class ConsumerService {
@Autowired
RestTemplate restTemplate;
@Autowired
TestClient testClient;
@HystrixCommand(fallbackMethod = "fallback")
public String consumer() {
return testClient.getHello();
}
public String fallback() {
return "fallback";
}
}
<file_sep>/rebecca-security-demo/src/main/java/com/security/demo/controller/HelloController.java
package com.security.demo.controller;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;
/**
* 2 * @Author: Rebecca
* 3 * @Date: 2020/8/24 16:32
* 4
*/
@RestController
public class HelloController {
@GetMapping("/hello")
public String hello() {
return "hello";
}
}
<file_sep>/rebecca-sentinel-limit/src/main/java/com/sentinel/demo/SentinelApplication.java
package com.sentinel.demo;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
/**
* 2 * @Author: Rebecca
* 3 * @Date: 2020/8/19 14:59
* 4 初次使用sentinel测试
*/
@SpringBootApplication
public class SentinelApplication {
public static void main(String[] args) {
SpringApplication.run(SentinelApplication.class, args);
}
}
<file_sep>/rebecca-hystrix-demo/src/main/java/com/hystrix/demo/HystrixApplication.java
package com.hystrix.demo;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.client.SpringCloudApplication;
import org.springframework.cloud.client.circuitbreaker.EnableCircuitBreaker;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
import org.springframework.cloud.client.loadbalancer.LoadBalanced;
import org.springframework.context.annotation.Bean;
import org.springframework.web.client.RestTemplate;
/**
* 2 * @Author: Rebecca
* 3 * @Date: 2020/8/26 14:57
* 4 服务容错保护(Hystrix服务降级)
*/
//@EnableDiscoveryClient
//@SpringBootApplication
//@EnableCircuitBreaker
@SpringCloudApplication
public class HystrixApplication {
public static void main(String[] args) {
SpringApplication.run(HystrixApplication.class, args);
}
@Bean
@LoadBalanced
public RestTemplate restTemplate() {
return new RestTemplate();
}
}
<file_sep>/rebecca-nacos-feign/src/main/java/com/nacos/demo/NacosFeginApplication.java
package com.nacos.demo;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.openfeign.EnableFeignClients;
/**
* 2 * @Author: Rebecca
* 3 * @Date: 2020/8/18 16:13at
* 4
*/
@SpringBootApplication
@EnableFeignClients
public class NacosFeginApplication {
public static void main(String[] args) {
SpringApplication.run(NacosFeginApplication.class, args);
}
}
<file_sep>/rebecca-hystrix-demo/src/main/java/com/hystrix/demo/client/TestClient.java
package com.hystrix.demo.client;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.GetMapping;
/**
* 2 * @Author: Rebecca
* 3 * @Date: 2020/8/27 16:13
* 4
*/
@FeignClient(name = "provide", url = "127.0.0.1:9527")
public interface TestClient {
@GetMapping("/helloNacos")
String getHello();
}
<file_sep>/rebecca-nacos-config/src/main/java/com/nacos/demo/NacosConfigApplication.java
package com.nacos.demo;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
/**
* 2 * @Author: Rebecca
* 3 * @Date: 2020/8/19 11:09
* 4
*/
@SpringBootApplication
@EnableDiscoveryClient
public class NacosConfigApplication {
public static void main(String[] args) {
SpringApplication.run(NacosConfigApplication.class, args);
}
}
<file_sep>/rebecca-hystrix-simple/src/main/java/rebecca/hystrix/simple/HystrixSimpleApplication.java
package rebecca.hystrix.simple;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
/**
* 2 * @Author: Rebecca
* 3 * @Date: 2020/8/31 14:17
* 4
*/
@SpringBootApplication
public class HystrixSimpleApplication {
public static void main(String[] args) {
SpringApplication.run(HystrixSimpleApplication.class, args);
}
}
<file_sep>/rebecca-nacos-feign/src/main/java/com/nacos/demo/feign/RemoteHystrix.java
package com.nacos.demo.feign;
import org.springframework.stereotype.Component;
/**
* 2 * @Author: Rebecca
* 3 * @Date: 2020/8/18 16:19
* 4
*/
@Component
public class RemoteHystrix implements RemoteClient {
public String helloNacos() {
return "fallback test";
}
}
<file_sep>/rebecca-hystrix-simple/src/main/java/rebecca/hystrix/simple/controller/TestController.java
package rebecca.hystrix.simple.controller;
import com.netflix.hystrix.strategy.concurrency.HystrixRequestContext;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import rebecca.hystrix.simple.config.MyHystrixMergeService;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
/**
* 2 * @Author: Rebecca
* 3 * @Date: 2020/9/1 17:55
* 4 注解请求合并
*/
@RestController
public class TestController {
@Autowired
private MyHystrixMergeService myHystrixMergeService;
@RequestMapping("/merge")
public void merge() throws ExecutionException, InterruptedException {
HystrixRequestContext context = HystrixRequestContext.initializeContext();
Future<String> future1 = myHystrixMergeService.merge(1);
Future<String> future2 = myHystrixMergeService.merge(2);
Future<String> future3 = myHystrixMergeService.merge(3);
System.out.println(future1.get());
System.out.println(future2.get());
System.out.println(future3.get());
}
}
<file_sep>/rebecca-hystrix-simple/src/main/java/rebecca/hystrix/simple/config/MyHystrixCommand.java
package rebecca.hystrix.simple.config;
import com.netflix.hystrix.HystrixCommand;
import com.netflix.hystrix.HystrixCommandGroupKey;
import com.netflix.hystrix.HystrixCommandProperties;
import com.netflix.hystrix.HystrixThreadPoolProperties;
import com.netflix.hystrix.strategy.concurrency.HystrixRequestContext;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
/**
* 2 * @Author: Rebecca
* 3 * @Date: 2020/9/1 10:57
* 4
*/
public class MyHystrixCommand extends HystrixCommand<String> {
private final String name;
/*
通过构造函数设置一个 Groupkey
*/
protected MyHystrixCommand(String name) {
super(HystrixCommandGroupKey.Factory.asKey("MyBear"));
//信号量隔离
/*super(HystrixCommand.Setter.withGroupKey(HystrixCommandGroupKey.Factory.asKey("MyGroup"))
.andCommandPropertiesDefaults(HystrixCommandProperties.Setter()
.withExecutionIsolationStrategy(HystrixCommandProperties.ExecutionIsolationStrategy.THREAD))
.andThreadPoolPropertiesDefaults(HystrixThreadPoolProperties.Setter().withCoreSize(10)
.withMaxQueueSize(100).withMaximumSize(100)));*/
//线程池隔离 (默认)
/* super(HystrixCommand.Setter.withGroupKey(HystrixCommandGroupKey.Factory.asKey("MyGroup"))
.andCommandPropertiesDefaults(HystrixCommandProperties.Setter()
.withExecutionIsolationStrategy(HystrixCommandProperties.ExecutionIsolationStrategy.THREAD))
.andThreadPoolPropertiesDefaults(HystrixThreadPoolProperties.Setter().withCoreSize(10)
.withMaxQueueSize(100).withMaximumSize(100))); */
this.name = name;
}
/*
具体的逻辑在 run 方法中
*/
@Override
protected String run() throws Exception {
/*
try {
// 模拟超时情况
Thread.sleep(1000 * 10);
} catch (InterruptedException e) {
e.printStackTrace();
}
return this.name + ": " + Thread.currentThread().getName();
*/
System.err.println("get data");
return this.name + ":" + Thread.currentThread().getName();
}
@Override
protected String getFallback() {
return "faild . ";
}
/*
在 Hystrix 中也为我们提供了方法级别的缓存。
通过重写 getCacheKey 来判断是否返回缓存的数据,getCacheKey 可以根据参数来生成。
这样同样的参数就可以都用到缓存了。
*/
@Override
protected String getCacheKey() {
return String.valueOf(this.name);
}
public static void main(String[] args) throws ExecutionException, InterruptedException {
//String result = new MyHystrixCommand("Bear").execute();
//System.out.println(result);
/*
java.lang.InterruptedException: sleep interrupted
sleep 被强行打断了
*/
//异步
// Future<String> future = new MyHystrixCommand("Cat").queue();
// System.out.println(future.get());
//初始化HystrixRequestContext
HystrixRequestContext context = HystrixRequestContext.initializeContext();
String result = new MyHystrixCommand("Bear").execute();
System.out.println(result);
Future<String> future = new MyHystrixCommand("Bear").queue();
System.out.println(future.get());
context.shutdown();
}
}
<file_sep>/rebecca-nacos-config/src/main/java/com/nacos/demo/config/ConfigDemo.java
package com.nacos.demo.config;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.cloud.context.config.annotation.RefreshScope;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;
/**
* 2 * @Author: Rebecca
* 3 * @Date: 2020/8/19 11:10
* 4
*/
@RestController
@RefreshScope//使当前类下的配置支持动态更新
public class ConfigDemo {
@Value("${nacos.config}")
private String string;
@GetMapping("getValue")
public String getValue() {
return string;
}
}
<file_sep>/rebecca-zuul-api/src/main/java/com/zuul/demo/filter/AccessFilter.java
package com.zuul.demo.filter;
import com.netflix.zuul.ZuulFilter;
import com.netflix.zuul.context.RequestContext;
import com.netflix.zuul.exception.ZuulException;
import lombok.extern.slf4j.Slf4j;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
/**
* 2 * @Author: Rebecca
* 3 * @Date: 2020/8/20 18:00
* 4 使用zuulde的过滤器只需继承zuulFilter接口即可
*/
@Slf4j
public class AccessFilter extends ZuulFilter {
/*
过滤器的类型,它决定过滤器在请求的哪个生命周期中执行
这里定义为pre,代表会在请求被路由之前执行
*/
@Override
public String filterType() {
return "pre";
}
/*
过滤器的执行顺序
当请求在一个阶段中存在多个过滤器时,需要根据该方法返回的值来依次执行。
*/
@Override
public int filterOrder() {
return 0;
}
/*
判断该过滤器是否需要被执行。这里我们直接返回了true,
因此该过滤器对所有请求都会生效。实际运用中我们可以利用该函数来指定过滤器的有效范围。
*/
public boolean shouldFilter() {
return true;
}
/*
过滤器的具体逻辑。
这里我们通过ctx.setSendZuulResponse(false)令zuul过滤该请求,
不对其进行路由,然后通过ctx.setResponseStatusCode(401)设置了其返回的错误码,
当然我们也可以进一步优化我们的返回
比如,通过ctx.setResponseBody(body)对返回body内容进行编辑等
*/
public Object run() throws ZuulException {
/* RequestContext ctx = RequestContext.getCurrentContext();
HttpServletRequest request = ctx.getRequest();
log.info("send {} request to {}", request.getMethod(), request.getRequestURL().toString());
Object token = request.getParameter("token");
if (token == null) {
log.warn("token is empty . ");
ctx.setSendZuulResponse(false);
ctx.setResponseStatusCode(401);
return null;
}
log.info("token ok . ");
*/return null;
}
}
<file_sep>/rebecca-boot-admin/src/main/java/com/admin/demo/BootAdminApplication.java
package com.admin.demo;
import de.codecentric.boot.admin.server.config.EnableAdminServer;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
/**
* 2 * @Author: Rebecca
* 3 * @Date: 2020/8/27 13:24
* 4 Spring Boot Admin
*/
@SpringBootApplication
@EnableAdminServer //开启admin server功能
public class BootAdminApplication {
public static void main(String[] args) {
SpringApplication.run(BootAdminApplication.class, args);
}
}
<file_sep>/rebecca-hystrix-simple/src/main/java/rebecca/hystrix/simple/config/MyHystrixMergeService.java
package rebecca.hystrix.simple.config;
import com.netflix.hystrix.contrib.javanica.annotation.HystrixCollapser;
import com.netflix.hystrix.contrib.javanica.annotation.HystrixCommand;
import com.netflix.hystrix.contrib.javanica.annotation.HystrixProperty;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.web.client.RestTemplate;
import rebecca.hystrix.simple.client.TestClient;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.Future;
/**
* 2 * @Author: Rebecca
* 3 * @Date: 2020/9/1 17:41
* 4 使用注解的方式进行请求合并
*/
@Service
public class MyHystrixMergeService {
@Autowired
private RestTemplate restTemplate;
@Autowired
private TestClient testClient;
/**
* 指定批处理的方法,设置合并200ms之内的请求
* @param id
* @return
*/
@HystrixCollapser(batchMethod = "getMerge", collapserProperties = {@HystrixProperty(name = "timerDelayInMilliseconds", value = "200")})
public Future<String> merge(Integer id) {
return null;
}
@HystrixCommand(fallbackMethod = "fallback")
public List<String> getMerge(List<Integer> ids) {
System.out.println("合并的请求:" + ids.toString());
String[] result = restTemplate.getForEntity("http://eureka-client/merge?id={1}", String[].class, StringUtils.join(ids, ",")).getBody();
System.out.println("合并后的结果" + result);
return Arrays.asList(result);
}
/**
* 降级方法的参数,返回值类型,返回值数量要和上面的方法对应
* @param ids
* @return
*/
public List<String> fallback(List<Integer> ids) {
List<String> list = new ArrayList<>();
list.add("请求合并失败-1");
list.add("请求合并失败-2");
list.add("请求合并失败-3");
return list;
}
}
<file_sep>/revecca-sentinel-save/src/main/java/com/sentinel/demo/SentinelSaveApplication.java
package com.sentinel.demo;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
/**
* 2 * @Author: Rebecca
* 3 * @Date: 2020/8/19 16:00
* 4 使用nacos做sentinel规则的持久化
* 5 通过配置其持久化可以做到同步,这里不表
* 6 学习资料:
* 7 blog.didispace.com/spring-cloud-alibaba-sentinel-2-4/ (同步内容)
* 8 http://blog.didispace.com/spring-cloud-alibaba-sentinel-2-1/
*/
@SpringBootApplication
public class SentinelSaveApplication {
public static void main(String[] args) {
SpringApplication.run(SentinelSaveApplication.class, args);
}
}
| 87dd6cbef0c9aa892db4313d0cc31d245909cd15 | [
"Java",
"Maven POM"
] | 21 | Java | Rebecca2527/rebecca-sc | 944699e5f513dfc6170e37bd921bbfddfde4e311 | 611ed1af5f92dff6104ff2a925147369b17d6397 |
refs/heads/master | <file_sep>#require libraries
require 'sinatra'
require 'haml'
require 'less'
require 'mongo_mapper'
require 'yaml'
require 'socket'
require 'sinatra/asset_pipeline'
require 'sinatra_more/markup_plugin'
require 'digest/sha1'
require 'sinatra/flash'
require 'sinatra/prawn'
require "prawn"
require 'pdfkit'
class Billium < Sinatra::Base
enable :sessions
register SinatraMore::MarkupPlugin
register Sinatra::Flash
#-----------------
if ENV['RACK_ENV'] == 'development'
#result file loaded
@config = YAML.load_file("config/database.yaml")
# result enviroment type from config
@environment = ENV['RACK_ENV']
@db_host = @config[@environment]["host"]
@db_port = @config[@environment]["port"]
@db_name = @config[@environment]["database"]
@db_log = @config[@environment]["logfile"]
# create new connection with params
MongoMapper.connection = Mongo::Connection.new(@db_host, @db_port)
#set database name
MongoMapper.database = @db_name
#result boolean connect true
MongoMapper.connection.connect
elsif ENV['RACK_ENV'] == 'production'
elsif ENV['RACK_ENV'] == 'test'
#result file loaded
@config = YAML.load_file("config/database.yaml")
# result enviroment type from config
@environment = ENV['RACK_ENV']
@db_host = @config[@environment]["host"]
@db_port = @config[@environment]["port"]
@db_name = @config[@environment]["database"]
@db_log = @config[@environment]["logfile"]
# create new connection with params
MongoMapper.connection = Mongo::Connection.new(@db_host, @db_port)
#set database name
MongoMapper.database = @db_name
#result boolean connect true
MongoMapper.connection.connect
end
#----------------
set :prawn, { :page_layout => :landscape }
set :assets_precompile, %w(*.js *.css *.png *.jpg *.svg *.eot *.ttf *.woff)
# CSS minification
set :assets_css_compressor, :less
# Logical paths to your assets
set :assets_prefix, %w(app/assets)
register Sinatra::AssetPipeline
set :public_folder, 'public'
#set views location
set :views, Proc.new { File.join(root, "/app/views") }
#need to map PUT and DELETE HTTP verbs
use Rack::MethodOverride
use PDFKit::Middleware, :print_media_type => true
end
<file_sep>Billium
======
<file_sep>class BilliumController < Billium
get '/billium/test' do
@company = params[:company]
@amount = params[:amount]
content_type :pdf
html = haml(:index)
kit = PDFKit.new(html, page_size: 'legal',:margin_top=>'0.0in',:margin_right=>'0in',:margin_bottom =>'0.7in',:margin_left=>'0in')
kit.stylesheets << '/Users/Matteo/projects/sinatra/billium/app/assets/css/invoice.css'
kit.to_pdf
end
get '/billium/form' do
haml :form
end
end<file_sep>class BilliumController < Billium
#------------------------------ User Controller ------------------------------#
$currentuser = nil
get '/billium/user/logged?' do
@user = session[:user]
haml :user_logged
end
get '/billium/user/signup/?' do
haml :signup
end
post '/billium/user/signup/?' do
user = User.new
user.username = params[:username]
user.password = params[:password]
user.pIva = params[:pIva]
user.codeFiscal = params[:codeFiscal]
user.address = params[:address]
user.surname = params[:surname]
if user.save
redirect "/billium/user/login"
else
@error = user.errors.map{|k,v| "#{k}: #{v}"}.join("<br/> ")
halt haml :signup
end
end
get '/billium/user/login/?' do
haml :login
end
post '/billium/user/login/?' do
name = params[:username]
pass = params[:password]
if name == "" || pass == ""
@error = "Fields cannot be blank"
halt haml :login
elsif session[:user] = User.authenticate(name, pass)
session[:user] == User.authenticate(name, pass)
haml :form
else
haml :login
end
end
get '/billium/user/logout/?' do
session[:user] = nil
$currentuser = nil
redirect '/billium/user/login'
end
end<file_sep>class User
include MongoMapper::Document
key :username, String
key :surname, String
key :pIva, String
key :codeFiscal, String
key :address, String
key :hashPass, String
key :salt, String
attr_accessor :username
def random_string(len)
ranStr = ""
1.upto(len) { ranStr << rand(36).to_s(36) }
return ranStr
end
def self.encrypt(pass, salt)
Digest::SHA1.hexdigest(pass + salt)
end
def password=(pass)
@password = pass
self.salt = random_string(10)
self.hashPass = User.encrypt( @password, self.salt )
end
def self.authenticate(name, pass)
user = User.first(:username => name)
return nil if user.nil?
return user if User.encrypt(pass, user.salt) == user.hashPass
nil
end
end<file_sep>source 'https://rubygems.org'
gem 'sinatra'
gem 'puma' # webserver
gem 'rake'
gem 'rack-test'
gem 'sinatra_more'
gem 'therubyracer'
gem 'sinatra-flash', '~> 0.3.0'
gem 'pdfkit', '~> 0.6.2'
gem 'wkhtmltopdf-binary', '~> 0.9.9.3'
gem "sbfaulkner-sinatra-prawn"
gem 'prawn', '~> 1.3.0'
# database
gem 'mongo' # mongodb database
gem 'mongo_mapper' # mongodb interface
gem 'bson_ext' # language for db documents
# assets
gem 'sinatra-asset-pipeline' #for create assets pipeline on document
gem 'haml'
gem 'font-awesome-sass'
gem 'sass'
gem 'less'
# test & coverage
gem 'simplecov', :require => false, :group => :test # coverage of code for show % code tested
gem 'rspec' # test the app with rspec+
gem "cucumber"
gem "cucumber-sinatra"
gem "capybara"
gem "database_cleaner"<file_sep>class User
include MongoMapper::Document
key :workType, String
key :taxes, String
end<file_sep>class Company
include MongoMapper::Document
key :name, String
key :pIva, String
key :codeFiscal, String
end | de82bff1367c56029674a25661c12bf72ed6da20 | [
"Markdown",
"Ruby"
] | 8 | Ruby | Cyangen/Billium | 46343d8a8fb069d64e12fbabeb5aed7f4a4f1d1e | 928f710b7cfe3ba5a67e00a5c034496477d4d831 |
refs/heads/master | <file_sep>//
// Friend+CoreDataClass.swift
// Facebook_Messanger_clone
//
// Created by Steven on 2019/9/3.
// Copyright © 2019 Steven. All rights reserved.
//
//
import Foundation
import CoreData
@objc(Friend)
public class Friend: NSManagedObject {
}
<file_sep>//
// FriendsControllerHelper.swift
// Facebook_Messanger_clone
//
// Created by Steven on 2019/9/3.
// Copyright © 2019 Steven. All rights reserved.
//
import UIKit
import CoreData
extension FriendsController {
func clearData() {
let delegate = UIApplication.shared.delegate as? AppDelegate
if let context = delegate?.persistentContainer.viewContext {
do {
let fetchRequestForFriend: NSFetchRequest<Friend> = Friend.fetchRequest()
let objectsForFriend = try context.fetch(fetchRequestForFriend)
for object in objectsForFriend {
context.delete(object)
}
let fetchRequestForMessage: NSFetchRequest<Message> = Message.fetchRequest()
let objectsForMessage = try context.fetch(fetchRequestForMessage)
for object in objectsForMessage {
context.delete(object)
}
try (context.save())
} catch let err {
print(err)
}
}
}
func setupData() {
clearData()
let delegate = UIApplication.shared.delegate as? AppDelegate
if let context = delegate?.persistentContainer.viewContext {
let mark = NSEntityDescription.insertNewObject(forEntityName: "Friend", into: context) as! Friend
mark.name = "<NAME>"
mark.profileImageName = "zuckprofile"
let message = NSEntityDescription.insertNewObject(forEntityName: "Message", into: context) as! Message
message.friend = mark
message.text = "Hello, my name is Mark. Nice to meet you..."
message.date = NSDate()
let steve = NSEntityDescription.insertNewObject(forEntityName: "Friend", into: context) as! Friend
steve.name = "<NAME>"
steve.profileImageName = "steveprofile"
let messageSteve = NSEntityDescription.insertNewObject(forEntityName: "Message", into: context) as! Message
messageSteve.friend = steve
messageSteve.text = "Apple creates great iOS devices for the world..."
messageSteve.date = NSDate()
do {
try (context.save())
} catch let err {
print(err)
}
//messages = [message, messageSteve]
}
loadData()
}
func loadData() {
let delegate = UIApplication.shared.delegate as? AppDelegate
if let context = delegate?.persistentContainer.viewContext {
let fetchRequest: NSFetchRequest<Message> = Message.fetchRequest()
do {
messages = try context.fetch(fetchRequest)
} catch let err {
print(err)
}
}
}
}
<file_sep>//
// Message+CoreDataClass.swift
// Facebook_Messanger_clone
//
// Created by Steven on 2019/9/3.
// Copyright © 2019 Steven. All rights reserved.
//
//
import Foundation
import CoreData
@objc(Message)
public class Message: NSManagedObject {
}
| b948df572727005439b0e100144d2805942bf43c | [
"Swift"
] | 3 | Swift | stevenlin1015/Facebook_Messanger_clone | 0601735c84722bdbe98b61da1b239ba47f4330ef | 183a61cdef003dde13b4cec4a753de2fdde9f129 |
refs/heads/master | <repo_name>prismyland/prismy-csrf<file_sep>/specs/index.spec.ts
import test from 'ava'
import { testServer } from 'prismy-test-server'
import got from 'got'
import createCSRFProtection, { CSRFStrategy } from '../src'
import { Context } from 'prismy'
const testStrategy: CSRFStrategy = {
issuer() {
return 'test'
},
verifier(context: Context) {
return context.req.headers['csrf-token'] === 'test'
}
}
test('CSRFToken issues a token', async t => {
const { CSRFToken } = createCSRFProtection(testStrategy)
class MyHandler {
handle(@CSRFToken() csrfToken: string) {
return {
csrfToken
}
}
}
await testServer(MyHandler, async url => {
const response = await got(url, {
json: true
})
t.deepEqual(response.body, {
csrfToken: 'test'
})
})
})
test('CSRFMiddleware validates a token', async t => {
const { CSRFMiddleware } = createCSRFProtection(testStrategy)
class MyHandler {
handle() {
return 'Hello, World!'
}
}
await testServer([CSRFMiddleware, MyHandler], async url => {
const response = await got.post(url, {
headers: {
'CSRF-TOKEN': 'test'
}
})
t.deepEqual(response.body, 'Hello, World!')
})
})
test('CSRFMiddleware throws when invalid token is given', async t => {
const { CSRFMiddleware } = createCSRFProtection(testStrategy)
class MyHandler {
handle() {
/* istanbul ignore next */
return 'Hello, World!'
}
}
await testServer([CSRFMiddleware, MyHandler], async url => {
const response = await got.post(url, {
headers: {
'CSRF-TOKEN': 'wrong-<PASSWORD>'
},
throwHttpErrors: false
})
t.is(response.statusCode, 403)
t.is(response.body, 'Invalid CSRF token')
})
})
test('CSRFMiddleware ignores methods in `ignoreMethods` option', async t => {
const { CSRFMiddleware } = createCSRFProtection(testStrategy, {
ignoreMethods: ['GET', 'HEAD', 'OPTIONS', 'POST']
})
class MyHandler {
handle() {
return 'Hello, World!'
}
}
await testServer([CSRFMiddleware, MyHandler], async url => {
const response = await got.post(url, {
headers: {
'CSRF-TOKEN': '<PASSWORD>'
}
})
t.is(response.statusCode, 200)
t.is(response.body, 'Hello, World!')
})
})
<file_sep>/readme.md
# `prismy-csrf`
:shield: CSRF Protection for prismy
[](https://travis-ci.com/prismyland/prismy-csrf)
[](https://codecov.io/gh/prismyland/prismy-csrf)
[](https://www.npmjs.com/package/prismy-csrf)
[](https://lgtm.com/projects/g/prismyland/prismy-csrf/context:javascript)
```
npm i prismy-csrf
```
## Example
```ts
import {
prismy,
Context,
createInjectDecorators,
createTextBodySelector,
UrlEncodedBody
} from 'prismy'
import createCSRFProtection from 'prismy-csrf'
import JWTCSRFStrategy from 'prismy-csrf-strategy-jwt'
import querystring from 'querystring'
const { CSRFToken, CSRFMiddleware } = createCSRFProtection(
new JWTCSRFStrategy({
secret: 'RANDOM_HASH',
tokenSelector: (context: Context) => {
const body = createUrlEncodedBodySelector()(context)
return body._csrf
}
})
)
class MyHandler extends BaseHandler {
async handle(@CSRFToken() csrfToken: string) {
return [
'<!DOCTYPE html>',
'<body>',
'<form action="/" method="post">',
'<input name="message">',
`<input type="hidden" name="_csrf" value=${csrfToken}>`,
'<button type="submit">Send</button>',
'</form>',
'</body>'
].join('')
}
}
export default prismy([CSRFMiddleware, MyHandler])
```
<file_sep>/src/index.ts
import {
Context,
createInjectDecorators,
BaseHandler,
methodSelector,
createError
} from 'prismy'
export interface CSRFStrategy {
issuer(context: Context): Promise<string> | string
verifier(context: Context): Promise<boolean> | boolean
}
export interface CSRFOptions {
ignoreMethods?: string[]
}
export function createCSRFProtection(
strategy: CSRFStrategy,
options: CSRFOptions = {}
) {
const ignoreMethods = options.ignoreMethods || ['GET', 'HEAD', 'OPTIONS']
const csrfTokenSelector = strategy.issuer.bind(strategy)
const csrfTokenVerifier = strategy.verifier.bind(strategy)
function CSRFToken() {
return createInjectDecorators(csrfTokenSelector)
}
return {
csrfTokenSelector,
csrfTokenVerifier,
CSRFToken,
CSRFMiddleware: class extends BaseHandler {
handle() {
const method = this.select(methodSelector)
if (ignoreMethods.some(ignoreMethod => method === ignoreMethod)) {
return
}
if (!csrfTokenVerifier(this.context!)) {
throw createError(403, 'Invalid CSRF token')
}
}
}
}
}
export default createCSRFProtection
| 5c5e6b8577e9fbd11c9d6732f4ed3d5d87189075 | [
"Markdown",
"TypeScript"
] | 3 | TypeScript | prismyland/prismy-csrf | 4c2d5edba5c347e4510c1ddd335c1b24b0626ed3 | d3e10e3520d292aa5915aeb8140b48a1b8a762d7 |
refs/heads/main | <file_sep>import React from 'react';
import imageInSrc from '../../src/imageInSrc.jpg';
function Jsx() {
return (
<div>
<div style={{border: 'solid 1px black', maxWidth: '100vw'}}>
<h1 className="title red">Hello Tunisia</h1>
<br />
<img src={imageInSrc} />
<br />
<img src="/imageInPublic.jpg" />
</div>
<h1 className="title red">Tunisia: Like you've never seen before</h1>
<div class="arrow-wrapper">
<div class="arrow-down"></div>
<div class="arrow-down"></div>
<div class="arrow-down"></div>
<div class="arrow-down"></div>
<div class="arrow-down"></div>
</div>
<video width={'70%'} height={'70%'} controls>
<source src="tunisia.mp4" type="video/mp4" />
</video>
</div>
)
}
export default Jsx
| 1583e3d55596b2218f41996475abe9e3cc8dc300 | [
"JavaScript"
] | 1 | JavaScript | fetenhb/checkp-jsx | 40a46ed93362cfa7fd97834c9b4df81c33844ec0 | 8ae9b2f0bc4ccb18ce207db054bf48d9135ea35e |
refs/heads/master | <repo_name>KingCreeperDJ/Sudoku<file_sep>/src/main/java/Sudoku/Main.java
package Sudoku;
import net.java.html.boot.BrowserBuilder;
/** Bootstrap and initialization. */
public final class Main {
private Main() {
}
/** Launches the browser */
public static void main(String... args) throws Exception {
BrowserBuilder.newBrowser().
loadPage("Sudoku/index.html").
loadClass(Main.class).
invoke("onPageLoad", args).
showAndWait();
System.exit(0);
Field test = new Field();
test.createField(9, 9);
test.insert(1, 1, 5, test);
test.fillField(test);
test.printArray(test);
Field test2 = new Field();
test2.createField(3, 3);
test.printArray(test2);
}
/** Called when page is ready */
public static void onPageLoad(String... args) throws Exception {
}
}
| 46bf2184cfd412c5a6d3e1d85a25e38cf7cb995a | [
"Java"
] | 1 | Java | KingCreeperDJ/Sudoku | 43e44a1f7bf5b6aeb2cf7094aae5160b2be6688f | c9fc71db739c50f3ed1ae6b417ac8392a6edfa69 |
refs/heads/master | <file_sep>package com.cherry.entity;
import com.alibaba.excel.annotation.ExcelProperty;
import lombok.Data;
/**
* @author <EMAIL>
* @version 1.0
* @create 2020/11/7 17:03
* @desc
*/
@Data
public class UserInfo {
@ExcelProperty("家庭(住所)成员")
private String member;
@ExcelProperty("公民身份证号码(18位)")
private String IDnum;
@ExcelProperty("外出成员现住地(仅外出成员填写)")
private String address;
@ExcelProperty("手机号码")
private String phoneNum;
@ExcelProperty("户主")
private String houseHold;
@ExcelProperty("是否外出")
private String isOut;
@ExcelProperty("外出人员详细地址")
private String detailAddress;
@ExcelProperty("备注")
private String remark;
}
<file_sep>package com.cherry.dao;
import com.cherry.entity.User;
import java.util.List;
/**
* @author <EMAIL>
* @create 2018/7/9 9:59
* @desc
*/
public interface IUserDao {
List<User> selectUser(Integer id);
}
<file_sep><?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<!-- maven 模板版本号-->
<modelVersion>4.0.0</modelVersion>
<!--将来发布时的格式 java项目 默认为 jar web项目 一定是war-->
<packaging>war</packaging>
<name>common-framework</name>
<!-- 公司域名 org.apache.xxxx 标识-->
<groupId>com.cherry</groupId>
<!--当前项目名-->
<artifactId>common-framework</artifactId>
<!-- SNAPSHOT:快照 测试版 RELEASE:稳定版 FINAL:最终版-->
<version>1.0-SNAPSHOT</version>
<url>http://maven.apache.org</url>
<properties>
<slf4j.version>1.7.5</slf4j.version>
<junit.version>4.13.1</junit.version>
<java.version>1.8</java.version>
<jsp.version>2.2</jsp.version>
<servlet.version>2.5</servlet.version>
<jstl.version>1.2</jstl.version>
<spring-framework.version>5.2.11.RELEASE</spring-framework.version>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<hibernate.version>4.2.1.Final</hibernate.version>
<logback.version>1.0.13</logback.version>
</properties>
<dependencies>
<!--lombok-->
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>1.18.16</version>
<scope>provided</scope>
</dependency>
<!-- java ee -->
<dependency>
<groupId>javax</groupId>
<artifactId>javaee-api</artifactId>
<version>7.0</version>
</dependency>
<!-- 单元测试 -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>${junit.version}</version>
</dependency>
<!-- 实现slf4j接口并整合 -->
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>1.2.2</version>
</dependency>
<!-- JSON -->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.11.3</version>
</dependency>
<!-- 数据库 -->
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>8.0.22</version>
<scope>runtime</scope>
</dependency>
<!-- 数据库连接池 -->
<dependency>
<groupId>com.mchange</groupId>
<artifactId>c3p0</artifactId>
<version>0.9.5.5</version>
</dependency>
<!-- MyBatis -->
<dependency>
<groupId>org.mybatis</groupId>
<artifactId>mybatis</artifactId>
<version>3.4.5</version>
</dependency>
<!-- mybatis/spring整合包 -->
<dependency>
<groupId>org.mybatis</groupId>
<artifactId>mybatis-spring</artifactId>
<version>1.3.1</version>
</dependency>
<!-- Spring -->
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
<version>${spring-framework.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-beans</artifactId>
<version>${spring-framework.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<version>${spring-framework.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-jdbc</artifactId>
<version>${spring-framework.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-tx</artifactId>
<version>${spring-framework.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-web</artifactId>
<version>${spring-framework.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-webmvc</artifactId>
<version>${spring-framework.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<version>${spring-framework.version}</version>
</dependency>
<!-- StringUtils -->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.11</version>
</dependency>
<!--上面的dependency是SSM框架所需要的-->
<!-- JsonUtils -->
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
<version>1.9.13</version>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
<version>2.6</version>
</dependency>
<!-- fastjson -->
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>1.2.46</version>
</dependency>
<!--generator插件自动生成代码-->
<dependency>
<groupId>org.mybatis.generator</groupId>
<artifactId>mybatis-generator-core</artifactId>
<version>1.3.5</version>
</dependency>
<!--Excel读写利器easyexcel-->
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>easyexcel</artifactId>
<version>2.0.5</version>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter</artifactId>
<version>RELEASE</version>
<scope>compile</scope>
</dependency>
</dependencies>
<build>
<!--解决Intellij构建项目时,target/classes目录下不存在mapper.xml文件-->
<resources>
<resource>
<directory>src/main/java</directory>
<includes>
<include>**/*.xml</include>
</includes>
<filtering>true</filtering>
</resource>
</resources>
<plugins>
<plugin>
<groupId>org.mortbay.jetty</groupId>
<artifactId>maven-jetty-plugin</artifactId>
<version>6.1.7</version>
<configuration>
<connectors>
<connector implementation="org.mortbay.jetty.nio.SelectChannelConnector">
<port>8888</port>
<maxIdleTime>30000</maxIdleTime>
</connector>
</connectors>
<webAppSourceDirectory>${project.build.directory}/${pom.artifactId}-${pom.version}
</webAppSourceDirectory>
<contextPath>/</contextPath>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>8</source>
<target>8</target>
</configuration>
</plugin>
<!-- mybatis generator 自动生成代码插件 -->
<plugin>
<groupId>org.mybatis.generator</groupId>
<artifactId>mybatis-generator-maven-plugin</artifactId>
<version>1.3.2</version>
<configuration>
<configurationFile>${basedir}/src/main/resources/generatorConfig.xml</configurationFile>
<overwrite>true</overwrite>
<verbose>true</verbose>
</configuration>
</plugin>
</plugins>
</build>
</project>
| 79226712817176a8fdf141e0bfb3e22d1b0d0050 | [
"Java",
"Maven POM"
] | 3 | Java | Kevin-Adam/common-framework | 7cef45d2ab44b7fc5c58e5c2b69a7a92b12c8073 | 17c812c25f8b74beb5edcebab1f82c2ee37b651f |
refs/heads/main | <file_sep>using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using my_books.Data.Services;
using my_books.Data.ViewModels;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace my_books.Controllers
{
[Route("api/[controller]")]
[ApiController]
public class BooksController : ControllerBase
{
public BooksService _booksService;
public BooksController(BooksService booksService)
{
_booksService = booksService;
}
[HttpGet("get-allbooks")]
public IActionResult GetAllBooks()
{
var books = _booksService.GetAllBooks();
return Ok(books);
}
[HttpGet("get-bookbyid/{Id}")]
public IActionResult GetBookById(int Id)
{
var book = _booksService.GetBookById(Id);
if (book == null)
{
return NotFound("Did not found any book with given book id");
}
return Ok(book);
}
[HttpPost("add-book-with-Author-Publisher")]
public IActionResult AddBook(BookVM book)
{
_booksService.AddBook(book);
return Ok();
}
[HttpPut("update-bookbyid/{id}")]
public IActionResult UpdateBookById(int id,[FromBody] BookVM book)
{
var _book = _booksService.UpdateBookById(id, book);
return Ok(_book);
}
[HttpDelete("delete-bookbyid/{id}")]
public IActionResult DeleteBookById(int id)
{
var result = _booksService.DeleteBookById(id);
return Ok(result);
}
}
}
<file_sep>using my_books.Data.Models;
using my_books.Data.Paging;
using my_books.Data.ViewModels;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace my_books.Data.Services
{
public class PublishersService
{
private AppDbContext _context;
public PublishersService(AppDbContext context)
{
_context = context;
}
public Publisher AddPublisher(PublisherVM publisher)
{
var _publisher = new Publisher()
{
Name = publisher.Name
};
_context.Publishers.Add(_publisher);
_context.SaveChanges();
return _publisher;
}
public PublisherWithBooksAndAuthorsVM GetPublisherData(int id)
{
var _publisher = _context.Publishers.Where(n => n.Id == id).Select(n => new PublisherWithBooksAndAuthorsVM()
{
Name = n.Name,
BookAuthors = n.Books.Select( n=> new BookAuthorVM()
{
BookName = n.Title,
BookAuthors = n.Book_Authors.Select(n=>n.Author.FullName).ToList()
}).ToList()
}).FirstOrDefault();
return _publisher;
}
public void DeletePublisherById(int id)
{
var _publisher = _context.Publishers.FirstOrDefault(n=>n.Id == id);
if(_publisher != null)
{
_context.Publishers.Remove(_publisher);
_context.SaveChanges();
}
else
{
throw new Exception($"the publisher with given id:{id} does not exist");
}
}
public List<Publisher> GetAllPublishers(string sortBy, string searchString,int? pageNumber)
{
var model = _context.Publishers.ToList();
if(!string.IsNullOrEmpty(sortBy))
{
switch(sortBy)
{
case "id_desc":
model = model.OrderByDescending(n => n.Id).ToList();
break;
default:
break;
}
}
if (!string.IsNullOrEmpty(searchString))
{
model = model.Where(n => n.Name.Contains(searchString, StringComparison.CurrentCultureIgnoreCase)).ToList();
}
// Paging
int pageSize = 5;
model = PaginatedList<Publisher>.Create(model.AsQueryable(), pageNumber ?? 1, pageSize);
return model;
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace my_books.Exceptions
{
public class ExceptionMiddlewareExtensions
{
}
}
<file_sep>using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using my_books.Data.Services;
using my_books.Data.ViewModels;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace my_books.Controllers
{
[Route("api/[controller]")]
[ApiController]
public class PublishersController : ControllerBase
{
private PublishersService _publishersService;
public PublishersController(PublishersService publishersService)
{
_publishersService = publishersService;
}
[HttpGet("get-all-publishers")]
public IActionResult GetAllPublishers(string sortBy,string searchString, int pageNumber)
{
var response = _publishersService.GetAllPublishers(sortBy, searchString, pageNumber);
return Ok(response);
}
[HttpPost("add-publisher")]
public IActionResult AddPublisher([FromBody] PublisherVM publisher)
{
var response = _publishersService.AddPublisher(publisher);
return Created(nameof(AddPublisher), response);
}
[HttpGet("get-publisher-books-with-authors/{id}")]
public IActionResult GetPublisherData(int id)
{
var _response = _publishersService.GetPublisherData(id);
return Ok(_response);
}
[HttpDelete("delete - publisher - by - id /{id}")]
public IActionResult DeltePublisherById(int id)
{
try
{
_publishersService.DeletePublisherById(id);
return Ok();
}
catch(Exception ex)
{
return BadRequest(ex.Message);
}
}
}
}
| bf27ad2ad8c385f01ab44df88120a23ecd1e34d3 | [
"C#"
] | 4 | C# | sampreeth8/Asp.Net-Web-api | b8ec74fa5b81d5e8b3ffc8c7f019aaef810a2116 | d4ead464b3d7a37ef93753886632ae946eefdc04 |
refs/heads/master | <file_sep>namespace Full_GRASP_And_SOLID.Library
{
//Al usar esta interfaz para ambos tipos de impresión evitamos preguntar por el destino
//cumpliendo así con el patrón de Polimorfismo.
//Esta interfaz al igual que las dos clases que la implementan cumplen con el patrón de SRP
//porque sólo tienen la función de impresión.
//También se cumple con el principio de OCP ya que si se quiere agregar otro método de impresión
//sólo es necesario crear una clase que implemente la interfaz IPrinter, sin necesidad
//de modificar ésta o cualquier otra clase.
public interface IPrinter
{
void PrintRecipe(Recipe recipe);
}
} | bec49aeb07bdcf72b0537a74ccf7df0998aeb003 | [
"C#"
] | 1 | C# | LautaroDaRosa/Full_grasp_and_solid_3 | 58d3b97034d2f0e458a6a2447ecef80d1102e413 | 853647d68a5a9710e60f6ff2c356873f524afbad |
refs/heads/main | <repo_name>lebron-li/Visualization-of-risk-control-based-on-Enterprise-Association-Graph<file_sep>/control.py
# -*- coding: utf-8 -*-
import os
import json
import pandas as pd
import numpy as np
import networkx as nx
def getInitControlG(path):
"""
读取控制人关系的excel表格到DataFrame, 并切分子图
Params:
path: 含有控制人数据的excel表格
Returns:
subG: 根据表格数据切分得到的子图集合, 每个元素都是一副子图
"""
control = pd.read_csv(path, encoding="gb2312")
# 将列名索引修改为英文
control.columns = ["relTag", "src", "destn", "relType", "rate"]
# 将比例大于100的异常值修正为100,并将数字变为带百分号的字符串
control.rate[control.rate >= 100] = 100
control["rate"] = [str(x) + "%" for x in control["rate"]]
# Control关系中,relTag和src一一对应
G = nx.DiGraph()
# 构建初始图G
for _, row in control.iterrows():
# 默认每个节点非根且不存在交叉持股, 具体情况后续判定
if row["src"] not in G.nodes():
if row["relType"] == "Control":
G.add_node(row["src"], isRoot=0, isCross=0, isControl=1)
else:
G.add_node(row["src"], isRoot=0, isCross=0, isControl=0)
elif row["relType"] == "Control":
G.nodes[row["src"]]["isControl"] = 1
if row["destn"] not in G.nodes():
G.add_node(row["destn"], isRoot=0, isCross=0, isControl=0)
G.add_edge(
row["src"],
row["destn"],
rate=row["rate"],
)
print("----------控制人表数据读取完成----------")
# 切分子图
tmp = nx.to_undirected(G)
subG = list()
for c in nx.connected_components(tmp):
subG.append(G.subgraph(c))
print("----------控制人子图切分完成----------")
return subG
def getRootOfControlG(subG):
"""
找到各个节点的实际控制人
经检验, 每个子图要么无根, 要么有且仅有一个根, 因此可以简化计算
经检验, 有根的子图均不存在交叉持股现象
Params:
subG: 原子图的列表
Returns:
rootG: 含有交叉持股关系的子图
"""
rootG = list()
# 标记各个子图的根节点
for G in subG:
flag = False # 是否有根标记
for n in G.nodes:
if G.in_degree(n) == 0:
G.nodes[n]["isRoot"] = 1
flag = True
break # 仅有一个根, 找到即可退出
# 若图无根, 则将图逆置后仍无法拓扑排序的公司均为交叉持股, 交叉持股的公司风险绑定
if not flag:
tmpG = nx.reverse(G)
flag = True
while flag:
flag = False
s = list()
for n in tmpG.nodes:
if tmpG.in_degree(n) == 0:
s.append(n)
flag = True
tmpG.remove_nodes_from(s)
# 拓扑排序后仍有节点, 则这些节点构成交叉持股
for n in tmpG.nodes:
G.nodes[n]["isCross"] = 1
rootG.append(G)
continue
# 仅有一个根, 则该节点必为所有公司的实际控制人
# 用拓扑排序判断是否存在局部的交叉持股
tmpG = nx.DiGraph(G)
flag = True
while flag:
flag = False
s = list()
for n in tmpG.nodes:
if tmpG.in_degree(n) == 0:
s.append(n)
flag = True
tmpG.remove_nodes_from(s)
# 拓扑排序后仍有节点则这些节点构成交叉持股
if nx.number_of_nodes(tmpG):
# 一张子图内可能存在多个交叉持股的公司集群
rootG.append(G)
else:
# 无交叉持股的子图拓扑排序后为空
rootG.append(G)
print("----------控制人关系识别完成----------")
return rootG
def graphs2json(GList):
"""
将图数据输出为前端可视化用的json文件
Params:
GList: 图数据
Outputs:
输出转化后的json文件
"""
controlList = {"nodes": [], "links": []}
crossList = {"nodes": [], "links": []}
doubleCurList = {"nodes": [], "links": []}
multiCurList = {"nodes": [], "links": []}
tmp = {"nodes": [], "links": []}
i, j = 0, 0
Gid = 0 # 子图编号
doubleCount, multiCount = 0, 0
for item in GList:
tmp["nodes"], tmp["links"] = [], []
# 初始化子图数据, 先后加点和边
inControl, inCross, isIn = False, False, False
for n in item.nodes:
if item.nodes[n]["isControl"]:
group, c, size = 0, "control", 5
inControl, isIn = True, True
elif item.nodes[n]["isCross"]:
group, c, size = 1, "cross", 3
inCross, isIn = True, True
elif item.nodes[n]["isRoot"]:
group, c, size = 2, "root", 3
else:
group, c, size = 3, "normal", 1
tmp["nodes"].append({
"group": group,
"class": c,
"size": size,
"Gid": Gid,
"id": n
})
for u, v in item.edges:
tmp["links"].append({
"source": u,
"target": v,
"rate": item[u][v]["rate"]
})
# Control关系json
if inControl:
controlList["nodes"] += tmp["nodes"]
controlList["links"] += tmp["links"]
# 交叉持股关系json
if inCross:
crossList["nodes"] += tmp["nodes"]
crossList["links"] += tmp["links"]
if not isIn:
# 其他双节点json
if len(tmp["nodes"]) == 2:
doubleCount += 2
# 每个json存储的点不超过3000个
if doubleCount >= 3000:
path = "./frontend/public/res/json/control/" + "double_" + str(i) + ".json"
with open(path, "w") as f:
json.dump(doubleCurList, f)
i += 1
doubleCount = 0
doubleCurList["nodes"], doubleCurList["links"] = [], []
else:
doubleCurList["nodes"] += tmp["nodes"]
doubleCurList["links"] += tmp["links"]
# 其他多节点json
else:
multiCount += len(tmp["nodes"])
# 每个json存储的点以2950个为阈值
if multiCount >= 2950:
path = "./frontend/public/res/json/control/" + "multi_" + str(j) + ".json"
with open(path, "w") as f:
json.dump(multiCurList, f)
j += 1
multiCount = 0
multiCurList["nodes"], multiCurList["links"] = [], []
else:
multiCurList["nodes"] += tmp["nodes"]
multiCurList["links"] += tmp["links"]
Gid += 1
# 剩余子图信息存到下一个json中
if len(tmp["nodes"]) == 2:
doubleCurList["nodes"] += tmp["nodes"]
doubleCurList["links"] += tmp["links"]
else:
multiCurList["nodes"] += tmp["nodes"]
multiCurList["links"] += tmp["links"]
if doubleCurList:
path = "./frontend/public/res/json/control/" + "double_" + str(i) + ".json"
with open(path, "w") as f:
json.dump(doubleCurList, f)
if multiCurList:
path = "./frontend/public/res/json/control/" + "multi_" + str(j) + ".json"
with open(path, "w") as f:
json.dump(multiCurList, f)
# 将上述数据写入文件
with open(r"./frontend/public/res/json/control/control.json", "w") as f:
json.dump(controlList, f)
with open(r"./frontend/public/res/json/control/cross.json", "w") as f:
json.dump(crossList, f)
print("----------控制人json导出完成----------")
def ansJson(GList):
"""
将图数据输出为答案的json文件
Params:
GList: 图数据
Outputs:
输出转化后的json文件
"""
controlList = {"links": []}
crossList = {"links": []}
normalList = {"links": []}
for item in GList:
root = ""
controlNodes = list()
# 找根、交叉持股和control关系
inControl, inCross = False, False
for n in item.nodes:
if item.nodes[n]["isCross"]:
inCross = True
break
elif item.nodes[n]["isControl"]:
inControl = True
controlNodes.append(n)
elif item.nodes[n]["isRoot"]:
root = n
else:
continue
# 存交叉持股关系
if inCross:
for n in item.nodes():
crossList["links"].append({
"from": "null",
"to": n
})
continue
# 存Control关系
if inControl:
for n in item.nodes():
if not n in controlNodes:
controlList["links"].append({
"from": controlNodes,
"to": n
})
else:
controlList["links"].append({
"from": "null",
"to": n
})
continue
# 存实际控制人root关系
for n in item.nodes():
if not n == root:
normalList["links"].append({
"from": root,
"to": n
})
else:
normalList["links"].append({
"from": "null",
"to": n
})
# 将上述数据写入文件
with open(r"./answers/control/control.json", "w") as f:
json.dump(controlList, f)
with open(r"./answers/control/cross.json", "w") as f:
json.dump(crossList, f)
with open(r"./answers/control/normal.json", "w") as f:
json.dump(normalList, f)
print("----------控制人表的答案json导出完成----------")<file_sep>/moneyCollection.py
import re
import json
import csv
import networkx as nx
import matplotlib.pyplot as plt
# 资金归集识别条件
# txn: transaction, recip: reciprocal
txn = {
"code": ['EK95','8002','8003','7743'],
"isLoan": 0,
"txnAmountLimit": 90000.0,
}
loan = {
"code": ['6101' ,'6102', '6104' , '61' , '6151' , '2202' , '6002' , '6003' ,
'6005' , '6006' , '7641' , '7799', '7641' ,'7810', 'DK06' , 'DK05'],
"txnAmountLimit": 100000.0,
"isLoan": 1,
"status": 0,
"abstract": ['贷款还款', '委托贷款收回利息', '委托贷款收回本金',
'现金管理子账户占用上存金额补足本次扣款', '公积金放款', '贷款并账']
}
def getInitmoneyCollectionG(path):
"""
读取资金归集的excel表格到DataFrame, 并切分子图
Params:
path: 含有资金归集数据的csv表格
Returns:
GList: 根据表格数据切分得到的子图集合, 每个元素都是一副子图
"""
# 由于可能存在两个节点间重复建立交易关系, 故使用MultiDiGraph
G = nx.MultiDiGraph()
codes = [[], []]
# 由于原csv中存在非utf-8字符, 需过滤掉非uft-8字符
with open("./backend/res/moneyCollection.csv", encoding='utf-8', errors='ignore') as f:
# 给定的识别贷款和转账的条件
originData = csv.reader(f)
tag = {
"myId": 0, # 本人账户
"recipId": 29, # 对方账户
"txnDateTime": 1, # 交易日期
"txnCode": 4, # 交易码
"txnAmount": 7, # 交易金额
"isLoan": 6, # 借贷标志
"status": 33, # 状态码
"abstract": 21 # 摘要
}
i = 0
for line in originData:
# 是否读入数据的标志
canIn = False
# 忽略标题行和Id为空的行
if line[tag["myId"]] == '' or line[tag["recipId"]] == '' or i == 0:
i += 1
continue
# 贷款和转账条件筛选
# 转账条件筛选
if (
int(line[tag["isLoan"]]) == txn["isLoan"]
and line[tag["txnCode"]] in txn["code"]
and float(line[tag["txnAmount"]]) >= txn["txnAmountLimit"]
):
codes[1].append(line[tag["txnCode"]])
canIn = True
# 贷款条件筛选
elif (
not line[tag["status"]] == "R"
and float(line[tag["txnAmount"]]) >= loan["txnAmountLimit"]
and line[tag["txnCode"]] in loan["code"]
and int(line[tag["isLoan"]]) == loan["isLoan"]
and int(line[tag["status"]]) == loan["status"]
):
flag = False
for item in loan["abstract"]:
if re.search(item, line[tag["abstract"]]):
flag = True
break
if flag:
continue
codes[0].append(line[tag["txnCode"]])
canIn = True
if canIn:
# 构建初始图G, 将符合条件的节点和边加入G
if not G.has_node(line[tag["myId"]]):
if len(line[tag["myId"]]) >= 15:
line[tag["myId"]] = line[tag["myId"]][:-2] + '00'
G.add_node(line[tag["myId"]], netIncome=0, std=0)
if not G.has_node(line[tag["recipId"]]):
if len(line[tag["recipId"]]) >= 15:
line[tag["recipId"]] = line[tag["recipId"]][:-2] + '00'
G.add_node(line[tag["recipId"]], netIncome=0, std=0)
G.add_edge(
line[tag["myId"]],
line[tag["recipId"]],
txnAmount=float(line[tag["txnAmount"]]),
txnDateTime=int(line[tag["txnDateTime"]]),
isLoan=int(line[tag["isLoan"]]),
txnCode=line[tag["txnCode"]],
width=float(line[tag["txnAmount"]])**0.5 / 1800
)
i += 1
print("----------资金归集表数据读取完成----------")
print("符合条件的贷款和转账关系总数:", G.size())
print("含有贷款和转账的公司数量:", nx.number_of_nodes(G))
codes = [list(set(codes[i])) for i in range(2)]
print("符合条件的贷款交易码类型:", codes[0])
print("符合条件的转账交易码类型:", codes[1])
# 切分子图
tmp = nx.to_undirected(G)
GList = list()
for c in nx.connected_components(tmp):
GList.append(G.subgraph(c))
print("----------资金归集子图切分完成----------")
return GList
def getNetIncome(Glist):
'''
计算各个企业的净资金流入
Params:
GList: 资金归集子图列表
Outputs:
GList: 在原图中加入点的权重
'''
for subG in Glist:
for n in subG.nodes():
children = list(subG.neighbors(n))
father = list(subG.predecessors(n))
netIncome = 0
# 贷款流入
for f in father:
for k1 in subG[f][n]:
netIncome += subG[f][n][k1]["txnAmount"]
# 转账流出
for c in children:
for k2 in subG[n][c]:
netIncome -= subG[n][c][k2]["txnAmount"]
subG.nodes[n]["netIncome"] = netIncome
# 标准化净资金流入, 用于可视化时的size, 范围为[5, 14]
d = [abs(x)
for x in nx.get_node_attributes(subG, "netIncome").values()
]
maxNetIncome, minNetIncome = max(d), min(d)
if maxNetIncome == minNetIncome:
for n in subG.nodes():
subG.nodes[n]["std"] = 9
else:
k = 9/(maxNetIncome - minNetIncome)
for n in subG.nodes():
subG.nodes[n]["std"] = 5 + k * (
abs(subG.nodes[n]["netIncome"]) - minNetIncome
)
print("----------净资金流入计算完成----------")
def findShellEnterprise(GList):
'''
根据资金归集关系找到空壳企业
Params:
GList: 资金归集子图列表
Returns:
se: 企业资金归集图
seNodes: 资金归集企业列表
'''
se = nx.MultiDiGraph()
seNodes = [[] for i in range(3)]
codes = [[], []]
for subG in GList:
for n in subG.nodes():
children = list(subG.neighbors(n))
father = list(subG.predecessors(n))
if not father or not children:
continue
# 上游企业
for f in father:
for k1 in subG[f][n]:
# 若入边非贷款, 直接跳过
if subG[f][n][k1]["isLoan"] == txn["isLoan"]:
continue
# 寻找最匹配的贷款和转账
bestMatchF, bestMatchRate, bestMatchC, bestMatchLoan, bestMatchTxn, bestMatchDate = "", 0.9, "", -1, -1, 0
# 下游企业
for c in children:
for k2 in subG[n][c]:
# 若出边非转账或三元组上任意两个节点相同, 直接跳过
if subG[n][c][k2]["isLoan"] == loan["isLoan"]:
continue
# 日期相差五天, 且金额变化在0.9-1.0范围内
rate = subG[n][c][k2]["txnAmount"] / subG[f][n][k1]["txnAmount"]
if (
subG[n][c][k2]["txnDateTime"] - subG[f][n][k1]["txnDateTime"] <= 5
and subG[n][c][k2]["txnDateTime"] >= subG[f][n][k1]["txnDateTime"]
and rate >= bestMatchRate
and rate <= 1
):
bestMatchC, bestMatchRate, bestMatchF = c, rate, f
bestMatchLoan, bestMatchTxn = subG[f][n][k1]["txnAmount"], subG[n][c][k2]["txnAmount"]
bestMatchDate = (subG[f][n][k1]["txnDateTime"], subG[n][c][k2]["txnDateTime"])
# 如果找到了匹配到的贷款和转账, 则修改节点属性, 将其记录到se中
if bestMatchC:
print(
"father: ", bestMatchF,
"node: ", n,
"child: ", bestMatchC, "\n"
"贷款交易码:", subG[bestMatchF][n][k1]["txnCode"],
"转账交易码:", subG[n][bestMatchC][k2]["txnCode"]
)
print(
"rate: ", bestMatchRate,
"贷款金额: ", bestMatchLoan,
"转账金额: ", bestMatchTxn,
"贷款和转账日期: ", bestMatchDate
)
codes[0].append(subG[bestMatchF][n][k1]["txnCode"])
codes[1].append(subG[n][bestMatchC][k2]["txnCode"])
se.add_edge(
bestMatchF,
n,
txnAmount=bestMatchLoan,
isLoan=0,
txnDateTime=bestMatchDate[0],
txnCode=subG[bestMatchF][n][k1]["txnCode"],
width=subG[bestMatchF][n][k1]["width"]
)
se.add_edge(
n,
bestMatchC,
txnAmount=bestMatchTxn,
isLoan=1,
txnDateTime=bestMatchDate[1],
txnCode=subG[n][bestMatchC][k2]["txnCode"],
width=subG[n][bestMatchC][k2]["width"]
)
seNodes[0].append(bestMatchF)
seNodes[1].append(n)
seNodes[2].append(bestMatchC)
if (nx.number_of_nodes(se)):
print("资金归集三元组关系数量:", se.size() / 2)
print("所有处于资金归集三元组中的企业总数", nx.number_of_nodes(se))
seNodes = [list(set(seNodes[i])) for i in range(3)]
codes = [list(set(codes[i])) for i in range(2)]
print("筛选后贷款的交易码含有:", codes[0])
print("筛选后转账的交易码含有:", codes[1])
print("具有资金归集行为的提供贷款企业数量为:", len(seNodes[0]))
print("具有资金归集行为的中间企业数量为:", len(seNodes[1]))
print("具有资金归集行为的接收转账企业数量为:", len(seNodes[2]))
seNodes = [seNodes[i][j] for i in range(3) for j in range(len(seNodes[i]))]
print("资金归集的企业列表:", seNodes)
return se, seNodes
def graphs2json(GList, se, seNodes):
'''
将资金归集的识别结果导出为json
Params:
se: 按中心企业切分的资金归集识别列表
seNodes: 中心企业列表
'''
collectionList = {"nodes": [], "links": []}
allList = {"nodes": [], "links": []}
tmp = {"nodes": [], "links": []}
i = 0
Gid = 0 # 子图编号
allCount = 0
for item in GList:
tmp["nodes"], tmp["links"] = [], []
# 初始化子图数据, 先后加点和边
for n in item.nodes():
if item.nodes[n]["netIncome"] >= 0:
group, c = 3, "pos"
else:
group, c = 4, "neg"
tmp["nodes"].append(
{"group": group, "class": c, "size": item.nodes[n]["std"], "Gid": Gid, "id": n}
)
for u in item.nodes():
for v in list(item.neighbors(u)):
for k in item[u][v]:
dateTmp = '2020-09-' + str(item[u][v][k]["txnDateTime"])[-2:]
tmp["links"].append(
{"source": u, "target": v, "date": dateTmp, "width": item[u][v][k]["width"]}
)
allCount += len(tmp["nodes"])
# 每个json存储的点不超过1500个
if allCount >= 1500:
print("第", i, "个json的节点数量:", len(allList["nodes"]))
path = "./frontend/public/res/json/moneyCollection/" + "all_" + str(i) + ".json"
with open(path, "w") as f:
json.dump(allList, f)
i += 1
allCount = 0
allList["nodes"], allList["links"] = [], []
allList["nodes"] += tmp["nodes"]
allList["links"] += tmp["links"]
else:
allList["nodes"] += tmp["nodes"]
allList["links"] += tmp["links"]
Gid += 1
# 清空还未存储的点
if allList["nodes"]:
print("第", i, "个json的节点数量:", len(allList["nodes"]))
path = "./frontend/public/res/json/moneyCollection/" + "all_" + str(i) + ".json"
with open(path, "w") as f:
json.dump(allList, f)
# 存储具有资金归集行为的点
for n in se.nodes():
group, c = 2, "end"
if n in seNodes[1]:
group, c = 1, "mid"
elif n in seNodes[0]:
group, c = 0, "start"
collectionList["nodes"].append({"group": group, "class": c, "size": 9, "Gid": Gid, "id": n})
Gid += 1
for u in se.nodes():
for v in list(se.neighbors(u)):
for k in se[u][v]:
collectionList["links"].append(
{"source": u, "target": v, "width": se[u][v][k]["width"]}
)
print("存储具有资金归集行为企业信息的json的节点数量:", len(collectionList["nodes"]))
with open(r"./frontend/public/res/json/moneyCollection/moneyCollection.json", "w") as f:
json.dump(collectionList, f)
print("----------资金归集json数据导出完成----------")
def ansJson(seNodes):
'''
将资金归集的识别结果导出为json
Params:
se: 按中心企业切分的资金归集识别列表
seNodes: 中心企业列表
'''
with open(r"./answers/moneyCollection/moneyCollection.json", "w") as f:
json.dump({"list": seNodes}, f)<file_sep>/main.py
import control
import guarantee
import moneyCollection
if __name__ == "__main__":
# 控制人表
controlG = control.getInitControlG("./backend/res/control.csv")
controlRootG = control.getRootOfControlG(controlG)
control.graphs2json(controlRootG)
control.ansJson(controlRootG)
# 担保关系表
guaranteeG = guarantee.getInitGuaranteeG("./backend/res/guarantee.csv")
guaranteeRiskG = guarantee.markRiskOfGuaranteeG(guaranteeG)
guarantee.riskQuantification(guaranteeRiskG)
guarantee.graphs2json(guaranteeRiskG)
guarantee.ansJson(guaranteeRiskG)
# 资金归集表
moneyCollectionCut = moneyCollection.getInitmoneyCollectionG("./backend/res/moneyCollection.csv")
se, seNodes = moneyCollection.findShellEnterprise(moneyCollectionCut)
moneyCollection.getNetIncome(moneyCollectionCut)
moneyCollection.graphs2json(moneyCollectionCut, se, seNodes)
moneyCollection.ansJson(seNodes)
<file_sep>/guarantee.py
import os
import json
import numpy as np
import pandas as pd
import networkx as nx
import matplotlib.pyplot as plt
from collections import defaultdict
def getInitGuaranteeG(path):
"""
读取担保关系的excel表格到DataFrame, 并切分子图
Params:
path: 含有担保关系数据的excel表格
Returns:
subG: 根据表格数据切分得到的子图集合, 每个元素都是一副子图
"""
guarantee = pd.read_csv(path, encoding="gb2312")
guarantee.columns = ["src", "destn", "time", "guarType", "amount"]
# 担保金额为0的样本视为无效的担保, 直接删去, 可减少870条边
guarantee = guarantee[~guarantee["amount"].isin([0])]
# 构建初始图G
G = nx.DiGraph()
for _, row in guarantee.iterrows():
G.add_node(row["src"], guarType=[], m=0.0, std=0.0)
G.add_node(row["destn"], guarType=[], m=0.0, std=0.0)
G.add_edge(
row["src"],
row["destn"],
guarType=row["guarType"],
amount=row["amount"],
mij=0,
)
# 切分子图
tmp = nx.to_undirected(G)
subG = list()
for c in nx.connected_components(tmp):
subG.append(G.subgraph(c))
print("----------初始化子图信息完成----------")
print("有效担保关系节点总数:", nx.number_of_nodes(G))
print("有效担保关系边总数:", nx.number_of_edges(G))
print("切分子图数量:", len(subG))
return subG
def markRiskOfGuaranteeG(GList):
"""
标记担保关系图的风险
Params:
GList: 担保关系子图列表
Output:
GList: 更新担保关系的列表
"""
for subG in GList:
# 双节点的子图, 仅可能为普通担保或互保
if subG.number_of_nodes() == 2:
# 普通担保判定
if nx.is_tree(subG):
for n in subG.nodes():
subG.nodes[n]["guarType"].append("Normal")
# 互保判定
else:
for n in subG.nodes():
subG.nodes[n]["guarType"].append("Mutual")
# 多于2个节点的情形, 标记节点所属的担保关系类型
for u, v in subG.edges:
# 贪心策略: u为源点,则认为u更有可能为Cross; v为终点,则认为v更有可能成为Focus
# 原图中标记为Cross的边, 形如u为交通枢纽, 即u的度较大
# 一保多(星型担保 or 担保公司)
if subG.out_degree(u) >= 3 and "Cross" not in subG.nodes[u]["guarType"]:
subG.nodes[u]["guarType"].append("Cross")
# 原图中标记为Focus的边, 形如多个节点指向一个节点
# 多保一(联合担保)
if subG.in_degree(v) >= 3 and "Focus" not in subG.nodes[v]["guarType"]:
subG.nodes[v]["guarType"].append("Focus")
# 担保圈, 直接通过边属性检查, 经检验只有两个子图含有Circle标记, 但元数据的标记不准确
# 分别进行正向和逆向的拓扑排序检查, 可以找到遗漏的担保圈和互保关系
tmpG = nx.DiGraph(subG)
flag = True
while flag:
flag = False
l = list()
for n in tmpG.nodes():
if tmpG.in_degree(n) == 0:
l.append(n)
flag = True
tmpG.remove_nodes_from(l)
# 将边逆向, 再次拓扑排序
tmpG = nx.reverse(tmpG)
flag = True
while flag:
flag = False
l = list()
for n in tmpG.nodes():
if tmpG.in_degree(n) == 0:
l.append(n)
flag = True
tmpG.remove_nodes_from(l)
# 拓扑排序后图非空说明含有担保圈或互保关系
if nx.number_of_nodes(tmpG):
# 互保判定
for u, v in tmpG.edges():
if tmpG.has_edge(v, u):
if "Mutual" not in subG.nodes[u]["guarType"]:
subG.nodes[u]["guarType"].append("Mutual")
if "Mutual" not in subG.nodes[v]["guarType"]:
subG.nodes[v]["guarType"].append("Mutual")
# 担保圈判定
visited = list()
trace = list()
def dfs2FindCircle(node):
"""
用回溯法找环
Params:
node: 子图的起始搜索节点
"""
if node in visited:
if node in trace:
trace_index = trace.index(node)
# 双节点的互保不作为担保圈进行标记
if len(trace) - trace_index > 2:
for i in range(trace_index, len(trace)):
if "Circle" not in subG.nodes[trace[i]]["guarType"]:
subG.nodes[trace[i]]["guarType"].append("Circle")
return
visited.append(node)
trace.append(node)
for child in list(tmpG.neighbors(node)):
dfs2FindCircle(child)
trace.pop()
dfs2FindCircle(list(tmpG.nodes())[0])
# 担保链: 若节点均不属于上述情况则该节点为担保链上的点
for u, v in subG.edges():
if not subG.nodes[u]["guarType"] and "Chain" not in subG.nodes[u]["guarType"]:
subG.nodes[u]["guarType"].append("Chain")
if not subG.nodes[v]["guarType"] and "Chain" not in subG.nodes[v]["guarType"]:
subG.nodes[v]["guarType"].append("Chain")
print("----------担保关系识别完成----------")
return GList
def riskQuantification(subG):
"""
标记节点的风险值m
Params:
G: 子图列表
Outputs:
G: 标记各个节点风险值m后的子图列表
"""
for G in subG:
de = dict()
tmpG = nx.Graph(G)
txnAllSum = sum(nx.get_edge_attributes(tmpG, "amount").values())
for n in tmpG.nodes():
neighbors = tmpG.adj[n].keys()
de[n] = sum(tmpG[n][neighbor]["amount"] / txnAllSum for neighbor in neighbors)
G.nodes[n]["m"] = de[n]
maxM, minM = max(de.values()), min(de.values())
if maxM == minM:
for n in G.nodes():
G.nodes[n]["std"] = 15
else:
k = 20/(maxM - minM)
for n in G.nodes():
G.nodes[n]["std"] = 5 + k * (G.nodes[n]["m"] - minM)
print("----------m值计算完成----------")
def graphs2json(GList):
"""
将图数据输出为前端可视化用的json文件
Params:
GList: 图数据
Outputs:
输出转化后的json文件到filePath1和filepath2下
"""
circleList = {"links": [], "nodes": []}
mutualList = {"links": [], "nodes": []}
crossList = {"links": [], "nodes": []}
focusList = {"links": [], "nodes": []}
doubleNormalList = {"links": [], "nodes": []}
multiNormalList = {"links": [], "nodes": []}
Gid = 0 # 子图编号
doubleCount = 0
i = 0
c = ["doubleRisk", "tripleRisk", "quadraRisk"]
offsetDict = {"Chain": 0, "Mutual": 1, "Focus": 2, "Cross": 3,"Circle": 4, "Normal": 5}
for item in GList:
# 初始化子图数据, 先后加点和边
isMutual, isCircle, isCross, isFocus, isUnusual = False, False, False, False, False
tmp = {"links": [], "nodes": []}
for n in item.nodes:
riskCount = len(item.nodes[n]["guarType"]) - 1
if "Mutual" in item.nodes[n]["guarType"]:
isMutual, isUnusual = True, True
if "Focus" in item.nodes[n]["guarType"]:
isFocus, isUnusual = True, True
if "Cross" in item.nodes[n]["guarType"]:
isCross, isUnusual = True, True
if "Circle" in item.nodes[n]["guarType"]:
isCircle, isUnusual = True, True
if riskCount > 0:
tmp["nodes"].append({
"group": riskCount + 5,
"class": c[riskCount-1],
"size": item.nodes[n]["std"],
"ctx": ', '.join(item.nodes[n]["guarType"]),
"Gid": Gid,
"id": n,
"m": item.nodes[n]["m"]
})
else:
tmp["nodes"].append({
"group": offsetDict[item.nodes[n]["guarType"][0]],
"class": item.nodes[n]["guarType"][0],
"size": item.nodes[n]["std"],
"ctx": ', '.join(item.nodes[n]["guarType"]),
"Gid": Gid,
"id": n,
"m": item.nodes[n]["m"]
})
# 加边
for u, v in item.edges:
tmp["links"].append({
"source": u,
"target": v,
"amount": item[u][v]["amount"]
})
# 存到对应类型的json中
if isUnusual:
if isCircle:
circleList["nodes"] += (tmp["nodes"])
circleList["links"] += (tmp["links"])
if isMutual:
mutualList["nodes"] += (tmp["nodes"])
mutualList["links"] += (tmp["links"])
if isCross:
crossList["nodes"] += (tmp["nodes"])
crossList["links"] += (tmp["links"])
if isFocus:
focusList["nodes"] += (tmp["nodes"])
focusList["links"] += (tmp["links"])
else: # "Chain"
if nx.number_of_nodes(item) == 2:
doubleCount += 2
if doubleCount < 2950:
doubleNormalList["nodes"] += (tmp["nodes"])
doubleNormalList["links"] += (tmp["links"])
else:
print("doubleNormalList", len(doubleNormalList["nodes"]))
with open("./frontend/public/res/json/guarantee/doubleNormal_" + str(i) + ".json", "w") as f:
json.dump(doubleNormalList, f)
i += 1
doubleCount = 2
doubleNormalList = {"links": [], "nodes": []}
doubleNormalList["nodes"] += (tmp["nodes"])
doubleNormalList["links"] += (tmp["links"])
else:
multiNormalList["nodes"] += (tmp["nodes"])
multiNormalList["links"] += (tmp["links"])
Gid += 1
# 将剩余的双节点子图存到下一个json中
if doubleNormalList["nodes"]:
print("doubleNormalList", len(doubleNormalList["nodes"]))
with open("./frontend/public/res/json/guarantee/doubleNormal_" + str(i) + ".json", "w") as f:
json.dump(doubleNormalList, f)
print("circleList", len(circleList["nodes"]))
print("mutualList", len(mutualList["nodes"]))
print("crossList", len(crossList["nodes"]))
print("focusList", len(focusList["nodes"]))
print("multiNormalList", len(multiNormalList["nodes"]))
# 将上述数据写入文件
with open(r"./frontend/public/res/json/guarantee/circle.json", "w") as f:
json.dump(circleList, f)
with open(r"./frontend/public/res/json/guarantee/mutual.json", "w") as f:
json.dump(mutualList, f)
with open(r"./frontend/republic/res/jsons/guarantee/cross.json", "w") as f:
json.dump(crossList, f)
with open(r"./frontend/public/res/json/guarantee/focus.json", "w") as f:
json.dump(focusList, f)
with open(r"./frontend/public/res/json/guarantee/multiNormal.json", "w") as f:
json.dump(multiNormalList, f)
print("----------担保关系的json导出完成完成----------")
def ansJson(GList):
"""
将图数据输出为答案的json文件
Params:
GList: 图数据
Outputs:
输出转化后的json文件到filePath1和filepath2下
"""
circleList = {"links": [], "nodes": []}
mutualList = {"links": [], "nodes": []}
crossList = {"links": [], "nodes": []}
focusList = {"links": [], "nodes": []}
doubleNormalList = {"links": [], "nodes": []}
multiNormalList = {"links": [], "nodes": []}
Gid = 0 # 子图编号
c = ["doubleRisk", "tripleRisk", "quadraRisk"]
for item in GList:
# 初始化子图数据, 先后加点和边
isMutual, isCircle, isCross, isFocus, isUnusual = False, False, False, False, False
tmp = {"links": [], "nodes": []}
for n in item.nodes:
riskCount = len(item.nodes[n]["guarType"]) - 1
# Chain补入
if nx.number_of_nodes(item) > 2 and "Chain" not in item.nodes[n]["guarType"]:
ctx = ', '.join(item.nodes[n]["guarType"]) + ', Chain'
else:
ctx = ', '.join(item.nodes[n]["guarType"])
if "Mutual" in item.nodes[n]["guarType"]:
isMutual, isUnusual = True, True
if "Focus" in item.nodes[n]["guarType"]:
isFocus, isUnusual = True, True
if "Cross" in item.nodes[n]["guarType"]:
isCross, isUnusual = True, True
if "Circle" in item.nodes[n]["guarType"]:
isCircle, isUnusual = True, True
if riskCount > 0:
tmp["nodes"].append({
"class": c[riskCount-1],
"ctx": ctx,
"Gid": Gid,
"id": n,
"m": item.nodes[n]["m"]
})
else:
tmp["nodes"].append({
"class": item.nodes[n]["guarType"][0],
"ctx": ctx,
"Gid": Gid,
"id": n,
"m": item.nodes[n]["m"]
})
# 加边
for u, v in item.edges:
tmp["links"].append({
"source": u,
"target": v,
"amount": item[u][v]["amount"]
})
# 存到对应类型的json中
if isUnusual:
if isCircle:
circleList["nodes"] += (tmp["nodes"])
circleList["links"] += (tmp["links"])
if isMutual:
mutualList["nodes"] += (tmp["nodes"])
mutualList["links"] += (tmp["links"])
if isCross:
crossList["nodes"] += (tmp["nodes"])
crossList["links"] += (tmp["links"])
if isFocus:
focusList["nodes"] += (tmp["nodes"])
focusList["links"] += (tmp["links"])
else: # "Chain"
if nx.number_of_nodes(item) == 2:
doubleNormalList["nodes"] += (tmp["nodes"])
doubleNormalList["links"] += (tmp["links"])
else:
multiNormalList["nodes"] += (tmp["nodes"])
multiNormalList["links"] += (tmp["links"])
Gid += 1
print("circleList", len(circleList["nodes"]))
print("mutualList", len(mutualList["nodes"]))
print("crossList", len(crossList["nodes"]))
print("focusList", len(focusList["nodes"]))
print("multiNormalList", len(multiNormalList["nodes"]))
# 将上述数据写入文件
with open(r"./answers/guarantee/circle.json", "w") as f:
json.dump(circleList, f)
with open(r"./answers/guarantee/mutual.json", "w") as f:
json.dump(mutualList, f)
with open(r"./answers/guarantee/cross.json", "w") as f:
json.dump(crossList, f)
with open(r"./answers/guarantee/focus.json", "w") as f:
json.dump(focusList, f)
with open(r"./answers/guarantee/doubleNormal.json", "w") as f:
json.dump(doubleNormalList, f)
with open(r"./answers/guarantee/multiNormal.json", "w") as f:
json.dump(multiNormalList, f)
print("----------担保关系的json导出完成完成----------")<file_sep>/README.md
# 项目背景
“随e融杯”金融大数据挑战赛一等奖
可视化主要使用的是D3.js
# 安装
全为HTML、CSS、JavaScript,无需安装
# 使用
由于保密需要,相关数据已做脱敏处理,请使用者根据不同风险类型的代码自定义数据。我们首先通过一系列数据分析找出了不同的风险类型,然后使用该代码进行风险可视化
| 3b62e5386ab02be20b5970112fc284010e0910b0 | [
"Markdown",
"Python"
] | 5 | Python | lebron-li/Visualization-of-risk-control-based-on-Enterprise-Association-Graph | 509b96acc661c84d200475d154ac5002fa9a78f9 | d6931fc1378fd4bfbb2bf6067c126f7cf68ef608 |
refs/heads/master | <repo_name>LintheGH/mini-program<file_sep>/pages/rank/rank.js
// pages/rank/rank.js
const request = require('../../utils/request')
Page({
/**
* 页面的初始数据
*/
data: {
albumList: []
},
getAlbumList () {
request({
url: 'https://www.missevan.com/mobileWeb/albumList',
success: (res) => {
this.setData({
albumList: res.data.info
})
}
})
},
toAlbumList (e) {
wx.navigateTo({
url: '../album/album?id=' + e.currentTarget.id
})
},
/**
* 生命周期函数--监听页面加载
*/
onLoad: function (options) {
},
/**
* 生命周期函数--监听页面初次渲染完成
*/
onReady: function () {
this.getAlbumList()
},
/**
* 生命周期函数--监听页面隐藏
*/
onHide: function () {
},
/**
* 生命周期函数--监听页面卸载
*/
onUnload: function () {
},
/**
* 页面相关事件处理函数--监听用户下拉动作
*/
onPullDownRefresh: function () {
},
/**
* 页面上拉触底事件的处理函数
*/
onReachBottom: function () {
}
})<file_sep>/pages/index/Components/HomePage3/homepage.js
// pages/index/Components/HomePage3/homepage.js
const request = require('../../../../utils/request')
Component({
/**
* 组件的初始数据
*/
data: {
bannerList: [],
sound: [],
channel: []
},
/**
* 组件的方法列表
*/
methods: {
getHomePage3 () {
request({
url: 'https://www.missevan.com/mobileWeb/newHomepage3',
success: (res) => {
this.setData({
bannerList: res.data.info.banner,
sound: res.data.info.sound,
channel: res.data.info.channel
})
}
})
}
},
attached () {
this.getHomePage3()
}
})
<file_sep>/pages/index/index.js
//index.js
//获取应用实例
const request = require('../../utils/request')
Page({
data: {
music: []
},
onLoad: function () {
this.getMusic()
},
getMusic () {
request({
url: 'https://www.missevan.com/sound/newhomepagedata',
success: (res) => {
this.setData({
music: res.data.music
})
}
})
},
getUserInfo: function(e) {
}
})
<file_sep>/pages/index/Components/ListBar/listbar.js
const dataconfig = require('../../../../utils/dataconfig')
Component({
data: {
type_info: {}
},
/**
* 组件的属性列表
*/
properties: {
data: {
type: Array,
value: []
},
title: String,
type: String
},
/**
* 组件的方法列表
*/
methods: {
},
attached () {
this.setData({
type_info: dataconfig[this.data.type]
})
}
})
<file_sep>/pages/sound/sound.js
// pages/sound/sound.js
const request = require('../../utils/request')
Page({
/**
* 页面的初始数据
*/
data: {
sound: {},
percent: 0
},
getSound (id) {
request({
url: 'https://www.missevan.com/sound/getsound?soundid=' + id,
success: (res) => {
this.setData({
sound: res.data.info.sound
})
let src = 'https://static.missevan.com/' + res.data.info.sound.soundurl_64
let radio = this.creatAudio(src)
}
})
},
creatAudio (src) {
const radio = wx.createInnerAudioContext()
radio.src = src
radio.autoplay = false
return radio
},
controlAudio (radio) {
},
/**
* 生命周期函数--监听页面加载
*/
onLoad: function (options) {
let id = options.id
this.getSound(id)
},
/**
* 生命周期函数--监听页面初次渲染完成
*/
onReady: function () {
},
/**
* 生命周期函数--监听页面显示
*/
onShow: function () {
},
/**
* 生命周期函数--监听页面隐藏
*/
onHide: function () {
},
/**
* 生命周期函数--监听页面卸载
*/
onUnload: function () {
}
})<file_sep>/pages/index/Components/Banner/banner.js
// pages/index/Components/Banner/banner.js
Component({
properties: {
bannerList: {
type: Array,
value: []
}
}
})
<file_sep>/pages/album/album.js
// pages/album/album.js
const request = require('../../utils/request')
Page({
/**
* 页面的初始数据
*/
data: {
albumList: {},
time: []
},
getAlbumList (id) {
request({
url: 'https://www.missevan.com/sound/soundalllist?albumid=' + id,
success: (res) => {
let time = this.formatTime(res.data.info.sounds)
this.setData({
albumList: res.data.info,
time: time
})
}
})
},
formatTime (array) {
return array.map(item => {
let min = Math.floor(Math.ceil(item.duration/1000)/60)
let sec = Math.ceil(item.duration/1000)%60
sec = sec < 10 ? '0' + sec : sec
let time = min + ':' + sec
return time
})
},
/**
* 生命周期函数--监听页面加载
*/
onLoad: function (options) {
let id = options.id
this.getAlbumList(id)
},
toSound (e) {
let id = e.currentTarget.id
wx.navigateTo({
url: '../sound/sound?id=' + id
})
},
/**
* 页面相关事件处理函数--监听用户下拉动作
*/
onPullDownRefresh: function () {
},
/**
* 页面上拉触底事件的处理函数
*/
onReachBottom: function () {
}
}) | 0027a5333cc2793074876edf99f6a4376097aa95 | [
"JavaScript"
] | 7 | JavaScript | LintheGH/mini-program | 735afc18f9177ae6c3bd24100e2985cd3d42ec9c | d514010d056671a89260638d0f92c2a6463eb5b2 |
refs/heads/master | <repo_name>bolidenghia/QuanlySanBay<file_sep>/README.md
# QuanlySanBay
đồ án
<file_sep>/frame.h
#include <iostream>
#include <string>
#include <fstream>
#include <ctime>
#include <cmath>
#include <conio.h>
#include <stdlib.h>
#include <stdio.h>
#include "mylib.h"
using namespace std;
// ==================== ADD FILE ====================== //
//Menu
const int itemmenuchinh = 5;
const int itemmenuMB=6;
//" "
char MENU[itemmenuchinh][100] = {" <NAME> BAY ",
" QUAN LY CHUYEN BAY ",
" QUAN LY DAT VE ",
" QUAN <NAME> ",
" THOAT "};
char MENUMAYBAY[itemmenuMB][50] = {
" THEM MAY BAY ",
" XOA MAY BAY ",
" HIEU CHINH THONG TIN ",
" IN DANH SACH MAY BAY ",
" SAVE FILE ",
" OPEN FILE "};
char MENUNHANVIEN[2][50] = {" 1.Nhap Nhan Vien "," 2.In Danh Sach SV "};
// ===== Giaodien =====
void header(){
TextColor(117);
gotoxy (45,2);
cout<<" HOC VIEN CONG NGHE BUU CHINH VIEN THONG CO SO TP.HCM ";
SetColor(4);
gotoxy (42,3);
cout<<"---------------------------------------*--*--*------------------------------------------";
SetColor(4);
gotoxy(61,5); cout<<"============================================";
gotoxy(61,6); cout<<"|";
SetColor(4);
gotoxy(74,6); cout<<" QUAN LY MAY BAY ";
SetColor(4);
gotoxy(104,6); cout<<"|";
gotoxy(61,7);cout<<"============================================";
}
void footer1() {
SetColor(8);
gotoxy(15,46);
cout<<"-------------------------------------------------------------------------------------------------------------------------------------------------------";
// exit
SetColor(15);
SetBGColor(4);
gotoxy(50,47); cout<< " ESC ";
SetColor(6);
SetBGColor(0);
gotoxy(56,47); cout<< ": THOAT";
// ok
SetColor(15);
SetBGColor(2);
gotoxy(66,47); cout<< " ENTER ";
SetColor(6);
SetBGColor(0);
gotoxy(73,47); cout<< ": CHON";
// Di chuyen
SetColor(15);
SetBGColor(9);
gotoxy(83,47); cout<< (char)94<<" "<<(char)118<<" " <<(char)60<<" " <<(char)62;
SetColor(6);
SetBGColor(0);
gotoxy(90,47); cout<< ": UP, DOWN, LEFT, RIGHT ";
}
void frameFull(){
SetColor(9);
SetBGColor(7);
gotoxy(53,16);cout<<" ============| ==== ==== ==== ==== ";
gotoxy(53,17);cout<<" ============| ==== ==== ==== ==== ";
gotoxy(53,18);cout<<" ===== ==== ==== ==== ==== ";
gotoxy(53,19);cout<<" ===== ==== ==== ==== ==== ";
gotoxy(53,20);cout<<" ===== ==== ==== ==== ==== ";
gotoxy(53,21);cout<<" ============| ==== ==== ==== ==== ";
gotoxy(53,22);cout<<" ============| ==== ==== ==== ==== ";
gotoxy(53,23);cout<<" ====== ==== ==== ==== ==== ";
gotoxy(53,24);cout<<" ====== ==== ==== ==== ==== ";
gotoxy(53,25);cout<<" ====== ================ ============= ============== ";
gotoxy(53,26);cout<<" ====== ================ ============= ============== ";
// SetBGColor(7);
}
void frameContinue(){
SetColor(4);
SetBGColor(15);
gotoxy(55, 42);
cout << " ";
gotoxy(55, 43);
cout << " NHAN PHIM BAT KY DE TIEP TUC ";
gotoxy(55, 44);
cout << " ";
SetBGColor(7);
}
void frameAdd_MB(){
header();footer1();
//anConTro();
SetColor(4);
SetBGColor(7);
gotoxy(45,14);cout<<"+----------------+----------------------+------------+-------------+---------------+";
gotoxy(45,15);cout<<"| SO HIEU MB | LOAI MAY BAY | SO DAY | SO HANG | TONG SO GHE |";
gotoxy(45,16);cout<<"+----------------+----------------------+------------+-------------+---------------+";
SetColor(13);
gotoxy(72, 11);
cout << "+-------------------------+";
gotoxy(72, 12);
cout << "| NHAP DANH SACH MAY BAY |";
gotoxy(72, 13);
cout << "+-------------------------+";
}
int frameNext(){
SetColor(12);
SetBGColor(11);
gotoxy(55, 36);
cout << " ";
gotoxy(55, 37);
cout << " BAN CO MUON THEM TIEP? ";
gotoxy(55, 38);
cout << " ";
SetBGColor(7);
return GetKey();
}
void ToMauMenuChinh(int textColor, int bGColor, int vitri) {
SetColor(textColor);
SetBGColor(bGColor);
gotoxy(0, 12 + vitri * 4); cout <<" ";
gotoxy(0, 13 + vitri * 4); cout << MENU[vitri];
gotoxy(0, 14 + vitri * 4); cout <<" ";
}
void ToMauMenuCon(int textColor, int bGColor, char MenuCon[][50], int vitri) {
SetColor(textColor);
SetBGColor(bGColor);
gotoxy(25, 8 + (vitri + 1) * 4); cout <<" ";
gotoxy(25, 9 + (vitri + 1) * 4); cout << MenuCon[vitri];
gotoxy(25, 10 + (vitri + 1) * 4); cout <<" ";
}
void TextViewChuDe(char s[],int x , int y , int dodai ,int background,int textcolor)
{
gotoxy(x,y); cout<<(char)218;
for(int i= 0 ; i< dodai ; i++) cout<<(char)196; cout<<(char)191 ;
gotoxy(x, y+1 ); cout<<(char)179;
gotoxy(x + dodai + 1 , y+1 ); cout<<(char)179;
gotoxy( x,y + 2 ); cout<<(char)192;
for(int i=0; i< dodai;i++) cout<<(char)196; cout<<(char)217;
SetColor( background);
gotoxy(x + 1 , y + 1 ); cout<<s;
}
void ButtonThoat(int hang ,int cot, int background ,int textcolor)
{
gotoxy(hang,cot);
SetColor(7);
gotoxy(hang,cot); cout<<" ";
gotoxy(hang,cot+1); cout<<" ESC ";
gotoxy(hang,cot+2); cout<<" ";
gotoxy(hang+25,cot); cout<<" ";
gotoxy(hang+25,cot+1); cout<<" ENTER ";
gotoxy(hang+25,cot+2); cout<<" ";
SetColor(background);
gotoxy(hang+6,cot+1); cout<<": THOAT ";
gotoxy(hang+35,cot+1); cout<<": OK ";
}
void ButtonChucNang(int background,int textcolor)
{
SetColor(14);
gotoxy(5,28); cout<<" ";
gotoxy(5,29); cout<<" ESC ";
gotoxy(5,30); cout<<" ";
gotoxy(25,28); cout<<" ";
gotoxy(25,29); cout<<" INSERT ";
gotoxy(25,30); cout<<" ";
gotoxy(45,28); cout<<" ";
gotoxy(45,29); cout<<" END ";
gotoxy(45,30); cout<<" ";
gotoxy(65,28); cout<<" ";
gotoxy(65,29); cout<<" DELETE ";
gotoxy(65,30); cout<<" ";
SetColor(background);
gotoxy(15,29); cout<<": THOAT ";
gotoxy(36,29); cout<<": SUA ";
gotoxy(56,29); cout<<": THEM ";
gotoxy(76,29); cout<<": XOA ";
}
//-----------------CHILDREN MENU-------------------------
<file_sep>/mylib.h
#include <iostream>
#include <conio.h>
#include <stdio.h>
#include <stdlib.h>
#include <dos.h>
#include <string.h>
#include <windows.h>
//#include "frame.h"
//#define Enter 13
//const int WHITE=15;
#define PASSWORD "<PASSWORD>"
//const int WHITE=15;
//
char* Pwd () {
char S[40]; int i=0;
while ((S[i]= getch()) != ENTER )
{ printf ("%c", '*') ; i++ ;
}
S[i]='\0';
return S;
}
int CheckPwd () {
int dem =0;
for ( dem =1 ; dem <=3 ; dem++)
{ printf( "Password :");
if (strcmp(Pwd(),PASSWORD) ==0) return 1;
else printf ( "\nPassword sai. Hay nhap lai\n") ;
}
return 0;
}
void gotoxy(short x,short y)
{
HANDLE hConsoleOutput;
COORD Cursor_an_Pos = { x,y};
hConsoleOutput = GetStdHandle(STD_OUTPUT_HANDLE);
SetConsoleCursorPosition(hConsoleOutput , Cursor_an_Pos);
}
int wherex( void )
{
HANDLE hConsoleOutput;
hConsoleOutput = GetStdHandle(STD_OUTPUT_HANDLE);
CONSOLE_SCREEN_BUFFER_INFO screen_buffer_info;
GetConsoleScreenBufferInfo(hConsoleOutput, &screen_buffer_info);
return screen_buffer_info.dwCursorPosition.X;
}
int wherey( void )
{
HANDLE hConsoleOutput;
hConsoleOutput = GetStdHandle(STD_OUTPUT_HANDLE);
CONSOLE_SCREEN_BUFFER_INFO screen_buffer_info;
GetConsoleScreenBufferInfo(hConsoleOutput, &screen_buffer_info);
return screen_buffer_info.dwCursorPosition.Y;
}
void clreol( ) {
COORD coord;
DWORD written;
CONSOLE_SCREEN_BUFFER_INFO info;
GetConsoleScreenBufferInfo(GetStdHandle(STD_OUTPUT_HANDLE), &info);
coord.X = info.dwCursorPosition.X;
coord.Y = info.dwCursorPosition.Y;
FillConsoleOutputCharacter (GetStdHandle(STD_OUTPUT_HANDLE), ' ',
info.dwSize.X - info.dwCursorPosition.X * info.dwCursorPosition.Y, coord, &written);
gotoxy (info.dwCursorPosition.X , info.dwCursorPosition.Y );
}
void SetColor(WORD color)
{
HANDLE hConsoleOutput;
hConsoleOutput = GetStdHandle(STD_OUTPUT_HANDLE);
CONSOLE_SCREEN_BUFFER_INFO screen_buffer_info;
GetConsoleScreenBufferInfo(hConsoleOutput, &screen_buffer_info);
WORD wAttributes = screen_buffer_info.wAttributes;
color &= 0x000f;
wAttributes &= 0xfff0;
wAttributes |= color;
SetConsoleTextAttribute(hConsoleOutput, wAttributes);
}
void SetBGColor(WORD color)
{
HANDLE hConsoleOutput;
hConsoleOutput = GetStdHandle(STD_OUTPUT_HANDLE);
CONSOLE_SCREEN_BUFFER_INFO screen_buffer_info;
GetConsoleScreenBufferInfo(hConsoleOutput, &screen_buffer_info);
WORD wAttributes = screen_buffer_info.wAttributes;
color &= 0x000f;
color <<= 4;
wAttributes &= 0xff0f;
wAttributes |= color;
SetConsoleTextAttribute(hConsoleOutput, wAttributes);
}
void resizeConsole(int width, int height)
{
HWND console = GetConsoleWindow();
RECT r;
GetWindowRect(console, &r);
MoveWindow(console, r.left, r.top, width, height, TRUE);
}
int GetKey() {
char key;
fflush(stdin);
key = _getch();
if (key == -32 || key == 0)
return _getch();
else return key;
}
void BaoLoi (char *s){
int x=wherex() , y=wherey();
gotoxy (50,24);
cout <<s;
Sleep(2500);
gotoxy(50,26);
clreol();
gotoxy(x,y);
}
void NhapChuoi (char *tieude, char *S) {
char *a;/*cout << tieude ; */ fflush(stdin);
do{
fflush(stdin);
gets(S);
}while (strcmp(S,"")==0);
a=strupr(S);
strcpy(S,a);
}
void Alert(string str ,int x, int y, float time, int bgColor = 4) {
gotoxy(x, y);
SetColor(0);
SetBGColor(bgColor);
std::cout << str;
Sleep(time*1000);
gotoxy(x, y);
SetBGColor(7);
std::cout << " ";
}
void anConTro()
{
HANDLE hOut;
CONSOLE_CURSOR_INFO ConCurInf;
hOut = GetStdHandle(STD_OUTPUT_HANDLE);
ConCurInf.dwSize = 10;
ConCurInf.bVisible = FALSE;
SetConsoleCursorInfo(hOut, &ConCurInf);
}
void TextColor(int color) {
SetConsoleTextAttribute(GetStdHandle(STD_OUTPUT_HANDLE), color);
}
void Normal () {
SetColor(4);
SetBGColor(7);
}
void HighLight () {
SetColor(11);
SetBGColor(1);
}
void clrscr() {
system("cls");
}
| f22716460806ac9ee41489bc38177451c8badbd5 | [
"Markdown",
"C++"
] | 3 | Markdown | bolidenghia/QuanlySanBay | 776cdf3cacc6e1d030669fba25b20f447920a174 | 2455886d8956a22caacb221c59a7b0da0ecf7cfa |
refs/heads/master | <file_sep>SELECT *
INTO db_work..ПозицияДокументаУчета_backup_160426_0946
FROM ПозицияДокументаУчета
/*
--restore
UPDATE ПозицияДокументаУчета
SET ПереходИсточник = b.ПереходИсточник
FROM ПозицияДокументаУчета a INNER JOIN db_work..ПозицияДокументаУчета_backup_160426_0909 b
ON a.Oid = b.oid
*/<file_sep>SELECT Наименование, КонструкторскоеОбозначение
FROM НоменклатурнаяПозиция WITH (NOLOCK)
WHERE Oid IN (SELECT НП
FROM СпецификацияКомпонент WITH (NOLOCK)
WHERE Работа IN (SELECT Работа
FROM СпецификацияОперация WITH (NOLOCK)
WHERE МестоВыполнения IN (SELECT Oid
FROM ПроизводственнаяЕдиница WITH (NOLOCK)
WHERE Код = '1240')
AND isnull(GCRecord, 0) = 0)
AND isnull(GCRecord, 0) = 0)
--and LEN(RTRIM(LTRIM(isnull(КонструкторскоеОбозначение, ''))))=0
AND isnull(GCRecord, 0) = 0
ORDER BY Наименование<file_sep>--Создание заказ-нарядов
-- сборки 62, 59Н6, 20508490, 234
-- select * from _im309L
-- select a.[Служебная] from _im309L a INNER JOIN tblWorkNotes b on a.[Служебная] = b.n_doc
-- update _im309L set заказ='20509392' where isnull([Заказ], '') = ''
/*
SELECT *
INTO db_work..tblWorkNotes_vlm309_170630_1000
FROM db_normativ..tblWorkNotes
SELECT *
INTO db_work..tblWorkNotesPlus_vlm309_170630_1000
FROM db_normativ..tblWorkNotesPlus
SELECT *
INTO db_work..tblWorkNotesCorrect_vlm309_170630_1000
FROM db_normativ..tblWorkNotesCorrect
SELECT *
INTO db_work..pdm_tblnorderarchiv_vlm309_170630_1000
FROM db_pdm_calc..pdm_tblnorderarchiv
SELECT *
INTO db_work..pdm_tblnorder_specification_vlm309_170630_1000
FROM db_pdm_calc..pdm_tblnorder_specification
SELECT *
INTO db_work..pdm_tblrvcalcarchiv_vlm309_170630_1000
FROM db_pdm_calc..pdm_tblrvcalcarchiv
SELECT *
INTO db_work..pdm_tblpreparatorylistarchiv_vlm309_170630_1000
FROM db_pdm_calc..pdm_tblpreparatorylistarchiv
SELECT *
INTO db_work..pdm_tbltestlistarchiv_vlm309_170630_1000
FROM db_pdm_calc..pdm_tbltestlistarchiv
*/
DELETE FROM _im309L
WHERE [сборка] IN ( SELECT [сборка]
FROM _im309L
GROUP BY [сборка]
HAVING COUNT(*) > 1
)
UPDATE _im309L
SET [Служебная] = 'убрать' + [Служебная]
FROM _im309L a
INNER JOIN db_normativ..tblWorkNotes b ON a.[Служебная] = b.N_doc
WHERE [Служебная] NOT LIKE '%убрать%'
INSERT INTO db_normativ..tblWorkNotes ( N_doc, Data_doc, Zakaz, Sborka, Izd, N_rsi, C_podr, R_podr, C_podr1, R_podr1 )
SELECT [Служебная], GETDATE(), /*'20309230'*/ [Заказ], [Сборка], '000-309VLM', [Сборка], sto_sekt ,
sto_sotr + ISNULL(' (' + sto_tele + ')', ''), kto_sekt, kto_sotr + ISNULL(' (' + kto_tele + ')', '')
FROM _im309L
WHERE [Служебная] NOT LIKE '%убрать%'
GROUP BY [Служебная], [Сборка], sto_sekt, sto_sotr, sto_tele, kto_sekt, kto_sotr, kto_tele, [Заказ]
INSERT INTO db_normativ..tblWorkNotesPlus ( WorkNote, Blank, NameBlank, Value, ValueZip )
SELECT b.[id], [Обозначение], [Наименование], [На изд], [На ЗИП]
FROM _im309L a, db_normativ..tblWorkNotes b
WHERE a.[Служебная] = b.N_doc
AND b.Izd = '000-309VLM'
AND LEN([Наименование]) < 125
INSERT INTO db_normativ..tblWorkNotesCorrect ( WorkNote, Blank, NameBlank, [Value], Sborka )
SELECT b1.[id], a1.[Обозначение], a1.[Наименование], 0, 'сб' + a1.[Входящая]
FROM ( SELECT b.Sborka, a.[Обозначение], a.[Наименование], a.[Основная], a.[Входящая]
FROM _im309E a, db_normativ..tblWorkNotes b
WHERE a.[Основная] LIKE b.Sborka
GROUP BY b.Sborka, a.[Обозначение], a.[Наименование], a.[Основная], a.[Входящая]
) a1 , db_normativ..tblWorkNotes b1
WHERE a1.[Основная] LIKE b1.Sborka
AND b1.Izd LIKE '000-309VLM'
INSERT INTO db_pdm_calc..pdm_tblnorderarchiv ( norder, crdate, definition )
SELECT [Служебная], GETDATE(), '000-309VLM'
FROM _im309L
WHERE [Служебная] NOT LIKE '%убрать%'
GROUP BY [Служебная]
INSERT INTO db_pdm_calc..pdm_tblnorder_specification ( norderarchiv_id, blank, definition, val, type_element_id, crdate )
SELECT b.[id], [Обозначение], [Наименование], ISNULL([Всего], 0), 1, GETDATE()
FROM _im309L a, db_pdm_calc..pdm_tblnorderarchiv b
WHERE a.[Служебная] = b.norder
AND b.definition = '000-309VLM'
INSERT INTO db_pdm_calc..pdm_tblrvcalcarchiv ( norder, crdate, definition )
SELECT [Служебная], GETDATE(), '000-309VLM'
FROM _im309L
WHERE [Служебная] NOT LIKE '%убрать%'
GROUP BY [Служебная]
INSERT INTO db_pdm_calc..pdm_tblpreparatorylistarchiv ( norder, crdate, definition )
SELECT [Служебная], GETDATE(), '000-309VLM'
FROM _im309L
WHERE [Служебная] NOT LIKE '%убрать%'
GROUP BY [Служебная]
INSERT INTO db_pdm_calc..pdm_tbltestlistarchiv ( norder, crdate, definition )
SELECT [Служебная], GETDATE(), '000-309VLM'
FROM _im309L
WHERE [Служебная] NOT LIKE '%убрать%'
GROUP BY [Служебная]<file_sep>IF OBJECT_ID('tempdb..#t1') IS NOT NULL
DROP TABLE #t1
SELECT doc1.Номер AS НомерДУ, doc1.СтроковоеПредставление AS ДУ, pdy1.Количество,
edizm1.Код as [ед.изм.],
/*doc1.ТипДокумента, */bdy1.ДатаПроведения ,
np1.Наименование,-- CAST(pdy1.Количество AS INT) AS am1 ,
--CAST(CAST(pdy1.Количество AS INT) AS DECIMAL(19, 9)) AS am2б
pe1.Код AS подразделение, np1.КонструкторскоеОбозначение, doc1.HistoryModifyActionExecutor
INTO #t1
FROM Документ doc1 ( NOLOCK )
INNER JOIN БазовыйДокументУчета bdy1 ( NOLOCK ) ON doc1.Oid = bdy1.Oid
INNER JOIN ПозицияДокументаУчета pdy1 WITH ( NOLOCK ) ON doc1.Oid = pdy1.ДокументУчета
INNER JOIN ( SELECT *
FROM НоменклатурнаяПозиция np1 ( NOLOCK )
WHERE ISNULL(КорректироватьКолвоПоНаличию, 0) = 0 AND (LTRIM(RTRIM(ISNULL(КонструкторскоеОбозначение, ''))) = ''
--OR LTRIM(RTRIM(ISNULL(КонструкторскоеОбозначение, ''))) like '%рст%'
)
) np1 ON pdy1.НоменклатурнаяПозиция = np1.Oid
INNER JOIN ПроизводственнаяЕдиница pe1
ON bdy1.МХПоставщик = pe1.Oid
INNER JOIN ЕдиницаИзмерения edizm1
ON pdy1.ЕИ = edizm1.Oid
--LEFT JOIN SecuritySystemUser users1
--ON doc1.HistoryModifyActionExecutor
WHERE doc1.ТипДокумента = 'Galaktika.PRM.ATP.Module.КорректировкаОстатков'
AND ISNULL(doc1.GCRecord, 0) = 0
AND ISNULL(pdy1.GCRecord, 0) = 0
AND bdy1.Проведен = 1
AND bdy1.ДатаПроведения > DATEADD(dd, -60, GETDATE())
--AND CAST(pdy1.Количество AS varchar)
--AND CEILING(pdy1.Количество) <> pdy1.Количество
AND CAST(CAST(pdy1.Количество AS INT) AS DECIMAL(19, 9)) <> pdy1.Количество
and pe1.Код <> '7800'
--ORDER BY bdy1.ДатаПроведения DESC
SELECT *
FROM #t1
ORDER BY ДатаПроведения DESC
SELECT подразделение, count(*)
FROM #t1
GROUP BY подразделение
ORDER BY подразделение
SELECT LEFT(подразделение,2), SUM(cc)
FROM (
SELECT подразделение, count(*) AS cc
FROM #t1
GROUP BY подразделение
--ORDER BY Код
) qwe
GROUP BY LEFT(подразделение,2)
ORDER BY SUM(cc)
--select CEILING($123.45)
SELECT НомерДУ,ДатаПроведения,подразделение,HistoryModifyActionExecutor AS Пользователь , Наименование, Количество, [ед.изм.],
ДУ
FROM #t1
WHERE LEFT(подразделение ,2)= '36'
AND ((Наименование like 'Клей%' AND Количество < 0.8) OR (Наименование NOT like 'Клей%'))
AND ((Наименование like 'Лак%' AND Количество < 0.8) OR (Наименование NOT like 'Лак%'))
AND ((Наименование like 'Эмаль%' AND Количество < 0.8) OR (Наименование NOT like 'Эмаль%'))
-- AND ((Наименование = '' AND Количество < 0.8) OR (Наименование <> ''))
AND ((Наименование like 'Провод%' AND Количество < 0.01) OR (Наименование NOT LIKE 'Провод%'))
AND ((Наименование like 'ПроволокаММ%' AND Количество < 0.001) OR (Наименование not like 'ПроволокаММ%'))
AND ((Наименование like 'Трубка%' AND Количество < 0.01) OR (Наименование NOT LIKE 'Трубка%'))
AND ((Наименование like 'Фольга%' AND Количество < 0.01) OR (Наименование NOT LIKE 'Фольга%'))
AND ((Наименование LIKE 'Пленка%' AND Количество < 0.001) OR (Наименование NOT LIKE 'Пленка%'))
AND ((Наименование = 'Клей - герметик кремнийорганический ЭЛАСИЛ 137-83 ТУ6-02-1237-83' AND Количество < 0.1) OR (Наименование <> 'Клей - герметик кремнийорганический ЭЛАСИЛ 137-83 ТУ6-02-1237-83'))
--0.010000000 Провод МГТФ 0,07 ТУ 16-505.185-71
/*
0.000001000 Проволока ММ
0.010000000 Трубка 305 ТВ-40, 4, белая, 1 сорта ГОСТ 19034-82
0.010000000 Трубка 305 ТВ-40, 5, белая, 1 сорта ГОСТ 19034-82
0.010000000 Фольга ДПРНТ
0.800000000 Эмаль
0.000100000 Пленка ПМ-А, 40 ТУ 6-19-121-85
*/
--AND Наименование LIKE 'Проволока ММ%'
--ORDER BY ДатаПроведения DESC
ORDER BY Наименование, ДатаПроведения <file_sep>SELECT count(*)
FROM (
SELECT count(*) AS cc , doc.Код
FROM СпецификацияОперация op
LEFT JOIN ОбъектПланирования pl1
ON op.Работа = pl1.Oid
LEFT JOIN ОбъектПланирования pl2
ON pl1.Вышестоящий = pl2.Oid
LEFT JOIN Документ doc ON pl2.oid = doc.oid
INNER JOIN (SELECT a.assembly
FROM EnterpriseToolkit..plan_tbldpr1 a
INNER JOIN EnterpriseToolkit..plan_tbldpr1archiv b
ON a.archiv_id = b.id AND b.ddate IS NULL AND b.iyear = 2015
GROUP BY a.assembly
) qwe
ON doc.Код = qwe.assembly
WHERE op.Статус = 4
AND НомерПерехода = 4
GROUP BY doc.Код
)qqq<file_sep>IF OBJECT_ID('db_work..ga_report_table') IS NOT NULL
DROP TABLE db_work..ga_report_table
--
SELECT IDENTITY( INT ,1,1 ) AS id, b.Сборка, op.ОкончаниеФакт, np.КонструкторскоеОбозначение, np.Наименование, a.ОписаниеМаршрута,
CASE op.Статус WHEN 0 THEN 'Неспланирована'
WHEN 1 THEN 'Спланирована'
WHEN 2 THEN 'КИсполнению'
WHEN 3 THEN 'Начата'
WHEN 4 THEN 'Закончена'
WHEN 5 THEN 'ЗафиксированоВсе'
WHEN 6 THEN 'ЗафиксированСрок'
WHEN 7 THEN 'ЗафиксированРесурс'
WHEN 8 THEN 'Проект'
WHEN 9 THEN 'Подпроект'
WHEN 10 THEN 'Остановлена'
WHEN 11 THEN 'Отменена'
WHEN 12 THEN 'Закрыта'
ELSE 'неизвестное состояние'
END AS Стутус, op.Начало AS Запуск
INTO db_work..ga_report_table
FROM ЗаказНаПроизводство a
LEFT JOIN Документ b ON a.Oid = b.Oid
LEFT JOIN ОбъектПланирования op ON a.Oid = op.Oid
--INNER JOIN [СпецификацияОперация] sop ON op.oid = sop.Работа
LEFT JOIN НоменклатурнаяПозиция np ON op.НоменклатурнаяПозиция = np.Oid
WHERE --op.Статус > 3
ISNULL(b.GCRecord, 0) = 0
AND ISNULL(np.GCRecord, 0) = 0
GROUP BY b.Сборка, op.ОкончаниеФакт, np.КонструкторскоеОбозначение, np.Наименование, a.ОписаниеМаршрута, CASE op.Статус WHEN 0 THEN 'Неспланирована'
WHEN 1 THEN 'Спланирована'
WHEN 2 THEN 'КИсполнению'
WHEN 3 THEN 'Начата'
WHEN 4 THEN 'Закончена'
WHEN 5 THEN 'ЗафиксированоВсе'
WHEN 6 THEN 'ЗафиксированСрок'
WHEN 7 THEN 'ЗафиксированРесурс'
WHEN 8 THEN 'Проект'
WHEN 9 THEN 'Подпроект'
WHEN 10 THEN 'Остановлена'
WHEN 11 THEN 'Отменена'
WHEN 12 THEN 'Закрыта'
ELSE 'неизвестное состояние'
END, op.Начало
ORDER BY b.Сборка, np.КонструкторскоеОбозначение, np.Наименование
SELECT COUNT(*)
FROM db_work..ga_report_table
/*
СпецификацияОперация - Статус
public enum СтатусОперации
{
Неспланирована,=0
Спланирована,=1
КИсполнению,=2
Зафиксирована,=3
Начата,=4
Закончена=5
}
*/
/*
Иконка статуса задания(ЗНП)
namespace Galaktika.PRM.WOM.Module
{
using System;
using System.ComponentModel;
[Description("WOM.ИзображениеСтатусаЗадания. Перечисление")]
public enum ИзображениеСтатусаЗадания
{
Неспланирована,
Спланирована,
КИсполнению,
Начата,
Закончена,
ЗафиксированоВсе,
ЗафиксированСрок,
ЗафиксированРесурс,
Проект,
Подпроект,
Остановлена,
Отменена,
Закрыта
}
}
*/<file_sep>-- документы могли не попасть в data sync из за проблем в сборке модуля. потому что исключения не всегда пишуться
SET DATEFORMAT DMY
SELECT --doc1.Номер, doc1.ДатаПоследнейМодификации,
--bdy1.МХПоставщик,
bdy1.МХПолучатель, CAST(pdy1.Количество AS VARCHAR), np1.КонструкторскоеОбозначение, pdy1.ЭтапПроектаПоставщик ,
doc1.Номер, 1 , -- holded
1 , -- direction = in
bdy1.ДатаПроведения, pdy1.Oid AS pdy_oid
FROM
--ПередачаВПроизводстве pvp1 WITH ( NOLOCK )
ПолучениеВПроизводстве pvp1 WITH ( NOLOCK )
INNER JOIN Документ doc1 ( NOLOCK ) ON pvp1.Oid = doc1.Oid
LEFT JOIN nniirt_tbl_WhSyncData_log dsl ( NOLOCK ) ON doc1.Номер = CAST(dsl.doc_id AS VARCHAR)
INNER JOIN БазовыйДокументУчета bdy1 ( NOLOCK ) ON doc1.Oid = bdy1.Oid
INNER JOIN ПозицияДокументаУчета pdy1 ( NOLOCK ) ON pdy1.ДокументУчета = bdy1.Oid
INNER JOIN НоменклатурнаяПозиция np1 ( NOLOCK ) ON pdy1.НоменклатурнаяПозиция = np1.Oid
WHERE bdy1.ДатаПроведения > '05/09/2017 00:00:00' --DATEADD(dd, -2, GETDATE())
AND dsl.id IS NULL
--AND bdy1.МХПоставщик
AND bdy1.МХПолучатель IN ( SELECT Oid
FROM ПроизводственнаяЕдиница in_pe1 WITH ( NOLOCK )
WHERE Код IN ( N'3055-30к', N'3055-30п', N'3055-41к', N'3055-01п', N'3054-ср' ,
N'3054-р' , N'3055-00к', N'3054-г', N'3055-01к', N'3054', N'3055' ,
N'3050' , N'3055-1'
)
)
AND bdy1.МХПоставщик NOT IN ( SELECT Oid
FROM ПроизводственнаяЕдиница in_pe1 WITH ( NOLOCK )
WHERE Код IN ( N'3055-30к', N'3055-30п', N'3055-41к', N'3055-01п', N'3054-ср' ,
N'3054-р' , N'3055-00к', N'3054-г', N'3055-01к', N'3054', N'3055' ,
N'3050' , N'3055-1'
)
)
AND ISNULL(bdy1.Проведен, 0) > 0
AND pdy1.ЭтапПроектаПоставщик IS NOT NULL
ORDER BY bdy1.ДатаПроведения
SELECT --doc1.Номер, doc1.ДатаПоследнейМодификации,
bdy1.МХПоставщик ,
--bdy1.МХПолучатель,
CAST(pdy1.Количество AS VARCHAR), np1.КонструкторскоеОбозначение, pdy1.ЭтапПроектаПоставщик, doc1.Номер, 1 , -- holded
2 , -- direction = out
bdy1.ДатаПроведения, pdy1.Oid AS pdy_oid
FROM ПередачаВПроизводстве pvp1 WITH ( NOLOCK )
--ПолучениеВПроизводстве pvp1 WITH ( NOLOCK )
INNER JOIN Документ doc1 ( NOLOCK ) ON pvp1.Oid = doc1.Oid
LEFT JOIN nniirt_tbl_WhSyncData_log dsl ( NOLOCK ) ON doc1.Номер = CAST(dsl.doc_id AS VARCHAR)
INNER JOIN БазовыйДокументУчета bdy1 ( NOLOCK ) ON doc1.Oid = bdy1.Oid
INNER JOIN ПозицияДокументаУчета pdy1 ( NOLOCK ) ON pdy1.ДокументУчета = bdy1.Oid
INNER JOIN НоменклатурнаяПозиция np1 ( NOLOCK ) ON pdy1.НоменклатурнаяПозиция = np1.Oid
WHERE bdy1.ДатаПроведения > '05/09/2017 00:00:00' --DATEADD(dd, -2, GETDATE())
AND dsl.id IS NULL
AND bdy1.МХПоставщик IN ( SELECT Oid
FROM ПроизводственнаяЕдиница in_pe1 WITH ( NOLOCK )
WHERE Код IN ( N'3055-30к', N'3055-30п', N'3055-41к', N'3055-01п', N'3054-ср' ,
N'3054-р' , N'3055-00к', N'3054-г', N'3055-01к', N'3054', N'3055' ,
N'3050' , N'3055-1'
)
)
AND bdy1.МХПолучатель NOT IN ( SELECT Oid
FROM ПроизводственнаяЕдиница in_pe1 WITH ( NOLOCK )
WHERE Код IN ( N'3055-30к', N'3055-30п', N'3055-41к', N'3055-01п', N'3054-ср' ,
N'3054-р' , N'3055-00к', N'3054-г', N'3055-01к', N'3054', N'3055' ,
N'3050' , N'3055-1'
)
)
AND ISNULL(bdy1.Проведен, 0) > 0
AND pdy1.ЭтапПроектаПоставщик IS NOT NULL
ORDER BY bdy1.ДатаПроведения<file_sep>IF OBJECT_ID('tempdb..#t3') IS NOT NULL
DROP TABLE #t3
SELECT *
INTO #t3
from СпецификацияКомпонент with (nolock)
WHERE Работа IN (
SELECT oid
from Документ with (nolock)
WHERE сборка = '350813')
UPDATE component
SET [МХСписания] = COALESCE(component.МХСписания, Item.МХСписания, itemGroup.МХСписания, operation.МестоВыполнения) ,
[МХОтпуска] = COALESCE(component.МХОтпуска, Item.МХОтпуска)
--SELECT component.[МХСписания], component.[МХСписания]
FROM СпецификацияОперация operation, ЗаказНаПроизводство g ,
#t3 t3
INNER JOIN СпецификацияКомпонент component ON t3.oid = component.Oid
INNER JOIN НоменклатурнаяПозиция item ON component.НП = item.Oid
LEFT JOIN НоменклатурнаяГруппа itemGroup ON item.НоменклатурнаяГруппа = itemGroup.Oid
INNER JOIN ОбъектПланирования work ON component.Работа = work.Oid
LEFT JOIN НоменклатурнаяПозиция workitem ON work.НоменклатурнаяПозиция = workitem.Oid
WHERE
--(component.[МХСписания] IS NULL OR component.[МХСписания] IS null)
--AND
component.GCRecord IS NULL
AND operation.GCRecord IS NULL
AND work.Тип = 4
AND work.Исключена = 0
AND work.Статус NOT IN ( 5, 6 )
AND operation.Работа = g.Oid
AND operation.Работа = component.Работа
AND operation.НомерПерехода = component.НомерОперацииКуда
<file_sep>SELECT docs.Сборка, Код, Наименование, so.НомерПерехода, CASE so.Статус
WHEN 0 THEN 'Неспланирована'
WHEN 1 THEN 'Спланирована'
WHEN 2 THEN 'КИсполнению'
WHEN 3 THEN 'Начата'
WHEN 4 THEN 'Закончена'
WHEN 5 THEN 'ЗафиксированоВсе'
WHEN 6 THEN 'ЗафиксированСрок'
WHEN 7 THEN 'ЗафиксированРесурс'
WHEN 8 THEN 'Проект'
WHEN 9 THEN 'Подпроект'
WHEN 10 THEN 'Остановлена'
WHEN 11 THEN 'Отменена'
WHEN 12 THEN 'Закрыта'
ELSE 'неизвестное состояние'
END AS Статус--, qwe2.Статус
FROM Документ docs (NOLOCK)
INNER JOIN СпецификацияОперация so
ON docs.Oid = so.Работа
INNER JOIN (SELECT ROW_NUMBER() OVER (ORDER BY Работа, НомерПерехода) AS id, Работа, НомерПерехода
FROM СпецификацияОперация WITH (NOLOCK)
WHERE ISNULL(GCRecord, 0) = 0
GROUP BY Работа, НомерПерехода
HAVING COUNT(*) > 1
) qwe
ON so.Работа = qwe.Работа
AND so.НомерПерехода = qwe.НомерПерехода
LEFT JOIN (SELECT ROW_NUMBER() OVER (ORDER BY Работа, НомерПерехода) AS id, Работа, НомерПерехода, Статус
FROM СпецификацияОперация WITH (NOLOCK)
WHERE ISNULL(GCRecord, 0) = 0
GROUP BY Работа, НомерПерехода, Статус
HAVING COUNT(*) > 1
--ORDER BY Работа, НомерПерехода, Статус
) qwe2
ON qwe.Работа = qwe2.Работа
AND qwe.НомерПерехода = qwe2.НомерПерехода
/************************************/
-- among started
WHERE qwe2.id IS NULL
--AND so.Статус IN (0,1)
/************************************/
---- among unstarted
--WHERE qwe2.id IS NOT NULL
--AND so.Статус IN (0,1)
/************************************/
GROUP BY Код, Наименование, docs.Сборка, so.НомерПерехода, so.Статус --, qwe2.Статус
ORDER BY docs.Сборка, docs.Код, НомерПерехода
<file_sep>select *
from object_tblClasses
WHERE id = (select MAX(id)
from object_tblClasses
)
select *
from object_tblClassmembers
WHERE class_id = (select MAX(id)
from object_tblClasses)
order by fname
select *
from object_tblClassmembers
WHERE id = 3162
order by fname<file_sep>DELETE FROM glx01.db_work.dbo.t_for_youtrack_report
INSERT INTO glx01.db_work.dbo.t_for_youtrack_report ( —борка )
SELECT f1 from OPENROWSET('Microsoft.ACE.OLEDB.12.0','Excel 8.0;HDR=NO;Database=d:\hlam\_current\07.07.2017.xlsx','select f1 from [Ћист1$]')
INSERT INTO glx01.db_work.dbo.t_for_youtrack_report ( —борка )
VALUES ( '512009' -- —борка - varchar(50)
)
DELETE FROM glx01.[Galaktika.AMM.5.0.17.06.NR1.Lite].dbo.nniirt_tblќчередь—борокЌа«Ќѕ
INSERT INTO glx01.[Galaktika.AMM.5.0.17.06.NR1.Lite].dbo.nniirt_tblќчередь—борокЌа«Ќѕ ( —борка )
SELECT f1 from OPENROWSET('Microsoft.ACE.OLEDB.12.0','Excel 8.0;HDR=NO;Database=d:\hlam\_current\07.07.2017.xlsx','select * from [Ћист1$]')
INSERT INTO SQL241EX.db_work.dbo.amm_link_170720_1002 ( id, oper, checking_status , oper_order_manual, username)
SELECT f1,f2,f3,f4,f5 from OPENROWSET('Microsoft.ACE.OLEDB.12.0','Excel 8.0;HDR=NO;Database=d:\hlam\_current\amm.xlsx',
'select f1,f2,f3,f4,f5 from [Ћист1$]')
<file_sep>IF OBJECT_ID('db_work..ga_report_table_ylanov1') IS NOT NULL
DROP TABLE db_work..ga_report_table_ylanov1
--
SELECT IDENTITY( INT ,1,1 ) AS id, b.Сборка, op.ОкончаниеФакт, np.КонструкторскоеОбозначение, np.Наименование, a.ОписаниеМаршрута,
CASE op.Статус WHEN 0 THEN 'Неспланирована'
WHEN 1 THEN 'Спланирована'
WHEN 2 THEN 'КИсполнению'
WHEN 3 THEN 'Начата'
WHEN 4 THEN 'Закончена'
WHEN 5 THEN 'ЗафиксированоВсе'
WHEN 6 THEN 'ЗафиксированСрок'
WHEN 7 THEN 'ЗафиксированРесурс'
WHEN 8 THEN 'Проект'
WHEN 9 THEN 'Подпроект'
WHEN 10 THEN 'Остановлена'
WHEN 11 THEN 'Отменена'
WHEN 12 THEN 'Закрыта'
ELSE 'неизвестное состояние'
END AS Стутус, op.Начало AS Запуск,
doc2.Наименование AS Проект
INTO db_work..ga_report_table_ylanov1
FROM ЗаказНаПроизводство a
LEFT JOIN Документ b ON a.Oid = b.Oid
LEFT JOIN ОбъектПланирования op ON a.Oid = op.Oid
--INNER JOIN [СпецификацияОперация] sop ON op.oid = sop.Работа
LEFT JOIN НоменклатурнаяПозиция np ON op.НоменклатурнаяПозиция = np.Oid
LEFT JOIN Документ doc2
ON op.Проект = doc2.Oid
WHERE --op.Статус > 3
ISNULL(b.GCRecord, 0) = 0
AND ISNULL(np.GCRecord, 0) = 0
AND isnull(doc2.GCRecord, 0) = 0
GROUP BY b.Сборка, op.ОкончаниеФакт, np.КонструкторскоеОбозначение, np.Наименование, a.ОписаниеМаршрута, CASE op.Статус WHEN 0 THEN 'Неспланирована'
WHEN 1 THEN 'Спланирована'
WHEN 2 THEN 'КИсполнению'
WHEN 3 THEN 'Начата'
WHEN 4 THEN 'Закончена'
WHEN 5 THEN 'ЗафиксированоВсе'
WHEN 6 THEN 'ЗафиксированСрок'
WHEN 7 THEN 'ЗафиксированРесурс'
WHEN 8 THEN 'Проект'
WHEN 9 THEN 'Подпроект'
WHEN 10 THEN 'Остановлена'
WHEN 11 THEN 'Отменена'
WHEN 12 THEN 'Закрыта'
ELSE 'неизвестное состояние'
END, op.Начало, doc2.Наименование
ORDER BY b.Сборка, np.КонструкторскоеОбозначение, np.Наименование
SELECT COUNT(*)
FROM db_work..ga_report_table_ylanov1
/*
СпецификацияОперация - Статус
public enum СтатусОперации
{
Неспланирована,=0
Спланирована,=1
КИсполнению,=2
Зафиксирована,=3
Начата,=4
Закончена=5
}
*/
/*
Иконка статуса задания(ЗНП)
namespace Galaktika.PRM.WOM.Module
{
using System;
using System.ComponentModel;
[Description("WOM.ИзображениеСтатусаЗадания. Перечисление")]
public enum ИзображениеСтатусаЗадания
{
Неспланирована,
Спланирована,
КИсполнению,
Начата,
Закончена,
ЗафиксированоВсе,
ЗафиксированСрок,
ЗафиксированРесурс,
Проект,
Подпроект,
Остановлена,
Отменена,
Закрыта
}
}
*/<file_sep>когда ТО просит обновить маршруты толпой из excel файла
в этом случае <NAME>. создает новую таблицу blankrouteXX в базе db_work на sql04
<file_sep>SELECT is_read_committed_snapshot_on, snapshot_isolation_state_desc, snapshot_isolation_state
FROM sys.databases
WHERE name = 'Galaktika.AMM.5.0.17.06.NR1.Lite'<file_sep>SELECT a.* , b.C_PODR, b.TN, b.N_FAM, b.N_NAME, b.N_FATHERS, b.D_OPEN,
b.D_CLOSE
FROM tblRegister a
LEFT JOIN kop_tblUIDRLC b
ON a.uid = b.UID
WHERE a.sdate = '20160331'
ORDER BY a.stime
SELECT *
FROM (
SELECT a.uid
FROM tblRegister a
WHERE a.sdate = '20160331') qwe
GROUP BY uid
HAVING count(*) >1
SELECT count(*)
FROM tblRegister a
WHERE a.sdate = '20160331'
<file_sep>SELECT pee.Код AS участок, np1.Наименование as [материал на контроле]
FROM СпецификацияОперация so WITH ( NOLOCK )
INNER JOIN СпецификацияКомпонент sk ( NOLOCK ) ON so.Работа = sk.Работа
AND so.НомерПерехода = sk.НомерОперацииКуда
INNER JOIN ( SELECT *
FROM НоменклатурнаяПозиция WITH ( NOLOCK )
WHERE ISNULL(КорректироватьКолвоПоНаличию, 0) = 0
AND ISNULL(Наименование, '') <> ''
AND ISNULL(КонструкторскоеОбозначение, '') = ''
AND ISNULL(GCRecord, 0) = 0
) np1 ON sk.НП = np1.Oid
INNER JOIN ( SELECT pe_b.Oid, pe_b.Код
FROM ПроизводственнаяЕдиница pe_a ( NOLOCK )
INNER JOIN ПроизводственнаяЕдиница pe_b ( NOLOCK ) ON pe_a.Oid = pe_b.Вышестоящий
WHERE pe_a.Код IN ( '3600' )
AND ISNULL(pe_a.GCRecord, 0) = 0
AND ISNULL(pe_b.GCRecord, 0) = 0
) pee ON so.МестоВыполнения = pee.Oid
WHERE ISNULL(so.GCRecord, 0) = 0
GROUP BY Наименование, pee.Код
ORDER BY pee.Код<file_sep>
select *
INTO db_work..НоменклатурнаяПозиция_backup_170427_0952
from НоменклатурнаяПозиция with (nolock)
UPDATE dbo.НоменклатурнаяПозиция
SET КорректироватьКолвоПоНаличию = 0
WHERE Наименование in (
SELECT DEFINITION COLLATE Cyrillic_General_CI_AS
FROM SQL77OKOM.db_logist.dbo.vw_omts_ElementsForImportToAmm_checking
WHERE ISNULL(checking_state, 0) > 0 --remark = 'ок'
)
SELECT *
FROM dbo.НоменклатурнаяПозиция
WHERE Наименование in (
SELECT DEFINITION COLLATE Cyrillic_General_CI_AS
FROM SQL77OKOM.db_logist.dbo.vw_omts_ElementsForImportToAmm_checking
WHERE ISNULL(checking_state, 0) > 0 --remark = 'ок'
)<file_sep>SELECT COUNT(*) , sdate
FROM kop_tblRegister
GROUP BY sdate
ORDER BY sdate
SELECT *
INTO tblRegister_backup_160311
FROM tblRegister
DELETE FROM dbo.tblRegister
WHERE sdate NOT IN (
SELECT sdate
FROM tblRegister
WHERE sdate LIKE '2016%'
GROUP BY sdate
)
-- or
DELETE FROM dbo.tblRegister
WHERE CAST(SUBSTRING(sdate, 1, 4) AS INT) = 2016 AND
CAST(SUBSTRING(sdate, 5, 2) AS INT) < 10<file_sep>IF OBJECT_ID('tempdb..#t1') IS NOT NULL
DROP TABLE #t1
SELECT
[Ошибка], count(*) AS cc
INTO #t1
FROM [nniirt_tblПланЦехаСборкаОшибки] a
GROUP BY Ошибка
ORDER BY [Ошибка]
SELECT case when cc is not null then qwe.Ошибка ELSE '' END AS Ошибка_, isnull(CAST(cc AS varchar), '') AS [Кол-во ошибок_], isnull(Сборка, '') сборка_, isnull(Обозначение, '') обозначение_, isnull(ЕдиницаИзмерения, '') AS ЕдиницаИзмерения_
, isnull(Спецификация, '') AS Спецификация, isnull(Наименование, '') AS Наименование, isnull(CAST(Количество AS varchar), '') AS Количество,
isnull(qwe.Маршрут, '') AS Маршрут, isnull(qwe.Цех, '') AS Цех
from
(SELECT NULL AS OID, NULL AS Сборка, NULL AS Спецификация, NULL AS Обозначение, NULL AS Наименование, NULL AS Количество,
NULL AS ЕдиницаИзмерения, NULL AS Маршрут, NULL AS Цех, NULL AS Раздел, [Ошибка], cc
FROM #t1
UNION all
SELECT OID, Сборка, Спецификация, Обозначение, Наименование, Количество,
ЕдиницаИзмерения, Маршрут, Цех,
Раздел, Ошибка, NULL AS cc
FROM [nniirt_tblПланЦехаСборкаОшибки]
) qwe
--ON a.Ошибка = b.Ошибка
ORDER BY ошибка,Сборка
<file_sep>этим пользователям сбросили 26.04.16 сессии потому что они продлжали генерить кривые доки после замены dll на шаре
'<NAME>',
'<NAME>',
'<NAME>',
'<NAME>',
'<NAME>',
'<NAME>',
'<NAME>',
'<NAME>',
'<NAME>',
'<NAME>',
'<NAME>' | 4d92f96904916a7f5f9d9ceed74d6df1e362aeef | [
"SQL",
"Text"
] | 20 | SQL | isannn/MyWorkableSqlScripts | f5aec738c9e2b522d20a517317283f90c2f9c96f | d5fcc5488137cb335ab2bbbeae225fa7ce276e22 |
refs/heads/master | <repo_name>syedpeer/raink<file_sep>/content/parts/author.md
---
title: author
---
If you have questions, please contact me via email: [<EMAIL>](mailto:<EMAIL>)<file_sep>/Dockerfile
FROM node:slim
RUN npm install --global gatsby-cli && \
apt-get update && \
apt-get install entr libpng-dev make g++ python -y && \
# Clean
apt-get autoclean -y && \
rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* && \
mkdir -p /site
WORKDIR /site
VOLUME ["/site", "/site/content", "/public"]
EXPOSE 8000
COPY scripts/*.sh /
ENTRYPOINT ["bash", "/entry.sh"]
<file_sep>/src/styles/colors.js
module.exports = {
light_bg: "#F0F0D8",
// light_bg: "#EEEEEE",
light_base_text: "#333333",
light_accent: "#58B2DC",
light_gray: "#555555",
light_lightGray: "#bbbbbb",
light_lines: "#dedede",
light_panel: "#FFF",
light_scroll: `rgba(0,0,0,0.2)`,
dark_bg: "#333333",
dark_base_text: "#FFCCCC",
dark_accent: "#FFFF99",
dark_gray: "#CCCCCC",
dark_lightGray: "#CCCCCC",
dark_lines: "#666666",
dark_panel: "#616161",
dark_scroll: `rgba(0,0,0,0.8)`
};
<file_sep>/src/components/Post/PostHeader.js
import React from "react";
import PropTypes from "prop-types";
import injectSheet from "react-jss";
import Chip from 'material-ui/Chip';
import Avatar from 'material-ui/Avatar';
import avatar from "../../images/avatar.svg";
import lang from "../../../content/meta/lang";
const styles = theme => ({
header: {
margin: "0 0 3em"
},
title: {
color: theme.main.colors.title,
fontSize: `${theme.main.fonts.title.size}em`,
letterSpacing: "-0.04em",
fontWeight: theme.main.fonts.title.weight,
lineHeight: theme.main.fonts.title.lineHeight,
margin: "0 0 0.4em",
[`@media (min-width: ${theme.mediaQueryTresholds.M}px)`]: {
fontSize: `${theme.main.fonts.title.sizeM}em`
},
[`@media (min-width: ${theme.mediaQueryTresholds.L}px)`]: {
fontSize: `${theme.main.fonts.title.sizeL}em`,
letterSpacing: "-0.05em"
}
},
subTitle: {
color: theme.main.colors.subTitle,
fontSize: `${theme.main.fonts.subTitle.size}em`,
lineHeight: theme.main.fonts.subTitle.lineHeight,
fontWeight: theme.main.fonts.subTitle.weight,
[`@media (min-width: ${theme.mediaQueryTresholds.M}px)`]: {
fontSize: `${theme.main.fonts.subTitle.sizeM}em`
},
[`@media (min-width: ${theme.mediaQueryTresholds.L}px)`]: {
fontSize: `${theme.main.fonts.subTitle.sizeL}em`
}
},
avatar: {
marginLeft: ".4em",
width: "1.5em",
height: "1.5em",
},
meta: {
fontSize: `${theme.main.fonts.meta.size}em`,
fontWeight: theme.main.fonts.meta.weight,
color: theme.main.colors.meta
},
tags: {
float: "right",
margin: ".21em",
[`@media (max-width: ${theme.mediaQueryTresholds.M}px)`]: {
display: "none"
},
},
alert: {
margin: "2em",
alignItems: "center",
textAlign: "center",
justifyContent: 'center',
flexWrap: 'wrap',
[`@media (max-width: ${theme.mediaQueryTresholds.M}px)`]: {
display: "none"
},
}
});
Date.prototype.format = function(fmt) {
var o = {
"M+" : this.getMonth()+1,
"d+" : this.getDate(),
};
if(/(y+)/.test(fmt)) {
fmt=fmt.replace(RegExp.$1, (this.getFullYear()+"").substr(4 - RegExp.$1.length));
}
for(var k in o) {
if(new RegExp("("+ k +")").test(fmt)){
fmt = fmt.replace(RegExp.$1, (RegExp.$1.length==1) ? (o[k]) : (("00"+ o[k]).substr((""+ o[k]).length)));
}
}
return fmt;
}
const PostHeader = props => {
const { classes, title, subTitle, date, tags } = props;
// const ffdate = date.replace(/[^0-9]/g,"/"); // Firefox date format
const ago = parseInt((new Date() - (new Date(date)))/86400000);
function postDate(dateString) {
const dateObj = new Date(dateString);
const localDate = dateObj.format("yyyy-MM-dd");
if (ago === 0) {
return <p>{lang.published_today}({localDate})</p>;
} else if (ago === 1) {
return <p>{lang.published_yesterday}({localDate})</p>;
} else if (ago === 2) {
return <p>{lang.published_three_days}({localDate})</p>;
} else {
return <p>{lang.published_before}{ago}{lang.published_after}({localDate})</p>;
}
}
return (
<header className={classes.header}>
<h1 className={classes.title}>{title}</h1>
<h2 className={classes.subTitle}>{subTitle}</h2>
<div className={classes.meta}>
<Chip avatar={
<Avatar
alt="avatar"
src={avatar}
className={classes.avatar}
/>}
label={postDate(date)}
/>
{tags.map(data => {
return (
<Chip
label={data}
className={classes.tags}
/>
);
})}
</div>
<div className={classes.alert}>
{ago > 100 && <Chip label={lang.over_date} />}
</div>
</header>
);
};
PostHeader.propTypes = {
classes: PropTypes.object.isRequired,
title: PropTypes.string.isRequired,
subTitle: PropTypes.string,
date: PropTypes.string.isRequired
};
export default injectSheet(styles)(PostHeader);
<file_sep>/src/pages/search.js
import React from "react";
import PropTypes from "prop-types";
import injectSheet from "react-jss";
require("core-js/fn/array/find");
import Main from "../components/Main";
import Article from "../components/Main/Article";
import Search from "../components/Search";
import lang from "../../content/meta/lang";
import { ReactComponent as AlgoliaIcon } from "../images/svg-icons/algolia.svg";
const styles = theme => ({
footer: {
// margin: "0 0 3em",
display: "flex",
flexDirection: "row",
// justifyContent: "flex-start",
// alignContent: "center"
},
note: {
color: theme.main.colors.content,
fontSize: `${theme.main.fonts.content.size}em`,
letterSpacing: "-0.04em",
lineHeight: theme.main.fonts.content.lineHeight,
margin: "0 0 0.4em",
[`@media (min-width: ${theme.mediaQueryTresholds.M}px)`]: {
fontSize: `${theme.main.fonts.content.sizeM}em`
},
[`@media (min-width: ${theme.mediaQueryTresholds.L}px)`]: {
fontSize: `${theme.main.fonts.content.sizeL}em`,
letterSpacing: "-0.05em"
}
},
logo: {
width: "25px",
display: "block",
margin: "0.1em 0 0 0.5em",
[`@media (max-width: ${theme.mediaQueryTresholds.L}px)`]: {
width: "20px"
}
}
});
const SearchPage = props => {
const { data, classes } = props;
return (
<Main>
<Article>
<header className={classes.footer}>
<h1 className={classes.note}>{lang.search_by}</h1>
<a
className={classes.logo}
href="https://www.algolia.com"
rel="noopener noreferrer"
target="_blank"
>
<AlgoliaIcon />
</a>
</header>
<Search algolia={data.site.siteMetadata.algolia} />
</Article>
</Main>
);
};
SearchPage.propTypes = {
data: PropTypes.object.isRequired
};
export default injectSheet(styles)(SearchPage);
// eslint-disable-next-line no-undef
export const query = graphql`
query AlgoliaQuery {
site {
siteMetadata {
algolia {
appId
searchOnlyApiKey
indexName
}
}
}
}
`;
<file_sep>/src/layouts/index.js
import React from "react";
import injectSheet from "react-jss";
import { MuiThemeProvider } from "material-ui/styles";
import PropTypes from "prop-types";
import { connect } from "react-redux";
import theme from "../styles/theme";
import lightTheme from "../styles/theme.light";
import darkTheme from "../styles/theme.dark";
import globals from "../styles/globals";
import { setFontSizeIncrease, setIsWideScreen, setThemeMode } from "../state/store";
import asyncComponent from "../components/common/AsyncComponent/";
import Loading from "../components/common/Loading/";
import Navigator from "../components/Navigator/";
import ActionsBar from "../components/ActionsBar/";
import InfoBar from "../components/InfoBar/";
import { isWideScreen, timeoutThrottlerHandler } from "../utils/helpers";
const InfoBox = asyncComponent(
() =>
import("../components/InfoBox/")
.then(module => {
return module;
})
.catch(error => {}),
<Loading />
);
class Layout extends React.Component {
timeouts = {};
categories = [];
componentDidMount() {
this.props.setIsWideScreen(isWideScreen());
if (typeof window !== "undefined") {
window.addEventListener("resize", this.resizeThrottler, false);
}
}
componentWillMount() {
if (typeof localStorage !== "undefined") {
const localFSize = +localStorage.getItem("font-size-increase");
const storeFSize = this.props.fontSizeIncrease;
const storeTheme = this.props.themeMode;
const localTheme = localStorage.getItem("theme-mode");
if (localFSize && localFSize !== storeFSize && localFSize >= 1 && localFSize <= 1.5) {
this.props.setFontSizeIncrease(localFSize);
}
if (localTheme && localTheme !== storeTheme) {
this.props.setThemeMode(localTheme);
}
}
this.getCategories();
}
getCategories = () => {
this.categories = this.props.data.posts.edges.reduce((list, edge, i) => {
const category = edge.node.frontmatter.category;
if (category && !~list.indexOf(category)) {
return list.concat(edge.node.frontmatter.category);
} else {
return list;
}
}, []);
};
resizeThrottler = () => {
return timeoutThrottlerHandler(this.timeouts, "resize", 500, this.resizeHandler);
};
resizeHandler = () => {
this.props.setIsWideScreen(isWideScreen());
};
render() {
const { children, data, themeMode } = this.props;
const theme = themeMode == "light" ? lightTheme : darkTheme
// TODO: dynamic management of tabindexes for keybord navigation
return (
<MuiThemeProvider theme={theme}>
<div
style={{
position: "absolute",
top: 0,
left: 0,
bottom: 0,
right: 0,
overflow: "hidden",
background: theme.base.colors.background.color,
backgroundImage: theme.base.colors.background.image
}}
>
{children()}
<Navigator posts={data.posts.edges} />
<ActionsBar categories={this.categories} />
<InfoBar pages={data.pages.edges} parts={data.parts.edges} />
{this.props.isWideScreen && <InfoBox pages={data.pages.edges} parts={data.parts.edges} />}
</div>
</MuiThemeProvider>
);
}
}
Layout.propTypes = {
data: PropTypes.object.isRequired,
children: PropTypes.func.isRequired,
setIsWideScreen: PropTypes.func.isRequired,
isWideScreen: PropTypes.bool.isRequired,
fontSizeIncrease: PropTypes.number.isRequired,
setFontSizeIncrease: PropTypes.func.isRequired,
themeMode: PropTypes.string.isRequired,
setThemeMode: PropTypes.func.isRequired
};
const mapStateToProps = (state, ownProps) => {
return {
pages: state.pages,
isWideScreen: state.isWideScreen,
fontSizeIncrease: state.fontSizeIncrease,
themeMode: state.themeMode
};
};
const mapDispatchToProps = {
setIsWideScreen,
setFontSizeIncrease,
setThemeMode
};
export default connect(mapStateToProps, mapDispatchToProps)(injectSheet(globals)(Layout));
// eslint-disable-next-line no-undef
export const globalQuery = graphql`
query LayoutQuery {
posts: allMarkdownRemark(
filter: { id: { regex: "//posts//" } }
sort: { fields: [fields___prefix], order: DESC }
) {
edges {
node {
excerpt
fields {
slug
prefix
}
frontmatter {
title
subTitle
category
cover {
children {
... on ImageSharp {
resolutions(width: 90, height: 90, cropFocus: CENTER) {
...GatsbyImageSharpResolutions_tracedSVG
}
}
}
}
}
}
}
}
pages: allMarkdownRemark(
filter: { id: { regex: "//pages//" }, fields: { prefix: { regex: "/^\\d+$/" } } }
sort: { fields: [fields___prefix], order: ASC }
) {
edges {
node {
fields {
slug
prefix
}
frontmatter {
title
menuTitle
}
}
}
}
parts: allMarkdownRemark(filter: { id: { regex: "//parts//" } }) {
edges {
node {
html
frontmatter {
title
}
}
}
}
}
`;
<file_sep>/src/components/InfoBox/SocialIcons.js
import React from "react";
import PropTypes from "prop-types";
import injectSheet from "react-jss";
import config from "../../../content/meta/config";
import { ReactComponent as GithubIcon } from "../../images/svg-icons/github.svg";
import { ReactComponent as GitlabIcon } from "../../images/svg-icons/gitlab.svg";
import { ReactComponent as TelegramIcon } from "../../images/svg-icons/telegram.svg";
import { ReactComponent as TwitterIcon } from "../../images/svg-icons/twitter.svg";
import { ReactComponent as RssFeedIcon } from "../../images/svg-icons/rss.svg";
const styles = theme => ({
social: {
display: "flex",
justifyContent: "center",
flexWrap: "wrap"
},
link: {
display: "inline-block",
padding: "5px",
"&:hover": {
"& svg": {
fill: theme.info.colors.iconsHover
}
}
},
svg: {
width: "20px",
height: "20px",
fill: theme.info.colors.icons,
transition: "all .5s"
}
});
const Socialcons = props => {
const { classes } = props;
const items = config.authorSocialLinks;
const icons = {
Github: GithubIcon,
Gitlab: GitlabIcon,
Telegram: TelegramIcon,
Twitter: TwitterIcon,
RSS: RssFeedIcon
};
return (
<div className={classes.social}>
{items.map(item => {
const Icon = icons[item.name];
return (
<a
href={item.url}
key={item.name}
className={classes.link}
target="_blank"
rel="noopener noreferrer"
title={item.name}
>
<Icon className={classes.svg} />
</a>
);
})}
</div>
);
};
Socialcons.propTypes = {
classes: PropTypes.object.isRequired
};
export default injectSheet(styles)(Socialcons);
<file_sep>/content/meta/lang.js
module.exports = {
// Menu and Info Box
home: "Home",
note: "Notes",
books: "Books",
reading: "Reading",
about: "About",
friends: "Friends",
contact: "Contact",
resume: "Resume",
build_with: "Build with:",
list_of_posts: "List Of Posts",
expand_the_list: "Expand the list",
expand_the_box: "Expand the box",
remove_filtering: "Remove filtering",
active_category_filter: "Active Category Filter:",
// Info Bar
more: "More",
// Action bar
back_to_home: "Back to home",
search: "Search",
filter: "Category Filter",
all_posts: "All Posts",
font_size: "Font size",
fullscreen: "Fullscreen",
theme: "Theme",
scroll_to_top: "Scroll to top",
// Search
search_by: "Search by",
search: "Search",
// Contact
email_to_me: "You can contact me via email: ",
contact_desc: "You can also contact me via my Telegram (https://t.me/zuolan), or contact me directly using the form below:",
send: "Send",
network_error: "Network error",
field_required: "Field required",
contact_name: "<NAME>",
contact_mail: "Your Email",
mail_not_valid: "Mail not valid",
contact_message: "Message",
// Post
tableOfContents: "Table Of Contents",
published_before: "Published ",
published_after: " days ago",
published_today: "Published Today",
published_yesterday: "Published Yesterday",
published_three_days: "Published 3 days ago",
over_date: "This post has been over 100 days since the date of publication.",
share_before: "Share",
share_after: "to",
}<file_sep>/content/parts/footnote.md
---
title: footnote
---
* Powered by [Gatsby](https://www.gatsbyjs.org/) | Theme by [Raink](https://github.com/izuolan/raink) (Forked from [PersonalBlog](https://github.com/greglobinski/gatsby-starter-personal-blog)) | Deliverd by [Netlify](https://www.netlify.com/)<file_sep>/content/pages/2--friends/index.md
---
title: My Friends
menuTitle: Friends
---
Ok, my friends.
<file_sep>/content/pages/success/index.md
---
title: Send success
---
Thank you for your messages and I will reply to your message as soon as possible.<file_sep>/content/parts/info.md
---
title: info
---
<center>Go Python</center>
<center>Docker Kubernetes</center>
<center>React Vue</center><file_sep>/content/meta/zh-CN.js
module.exports = {
// Menu and Info Box
home: "首页",
note: "笔记",
books: "书架",
reading: "在读",
about: "关于",
friends: "邻居",
contact: "向我提问",
resume: "简历",
build_with: "网站基于以下技术构建:",
list_of_posts: "文章列表",
expand_the_list: "展开列表",
expand_the_box: "关闭列表",
remove_filtering: "清除过滤",
active_category_filter: "当前分类列表:",
// Info Bar
more: "更多",
// Action bar
back_to_home: "返回首页",
search: "搜索",
filter: "分类",
all_posts: "所有文章",
font_size: "字体大小",
fullscreen: "全屏",
theme: "主题",
scroll_to_top: "返回顶部",
// Search
search_by: "搜索基于",
search: "搜索",
// Contact
email_to_me: "您可以通过电子邮件联系我(即时收到):",
contact_desc: "您也可以通过我的 Telegram(https://t.me/zuolan)账号联系到我,或者直接使用下面表单与我联系(信息将发送到我的邮箱):",
send: "发送",
network_error: "网络错误",
field_required: "此输入框必填",
contact_name: "您的称呼",
contact_mail: "您的邮箱(我的回复将发送到您的邮箱中)",
mail_not_valid: "邮件格式无效",
contact_message: "您想对我说什么",
// Post
tableOfContents: "目录",
published_before: "文本于",
published_after: "天前发布",
published_today: "本文于今天发布",
published_yesterday: "本文于昨天发布",
published_three_days: "本文于前天发布",
over_date: "本文自发布日起已经超过100天,文章信息可能已经过时,请酌情阅读。",
share_before: "分享",
share_after: "到其他地方",
}<file_sep>/src/pages/books.js
import React from "react";
import PropTypes from "prop-types";
import injectSheet from "react-jss";
import Obfuscate from "react-obfuscate";
import Helmet from "react-helmet";
import Main from "../components/Main";
import Content from "../components/Main/Content";
import Article from "../components/Main/Article";
import config from "../../content/meta/config";
import lang from "../../content/meta/lang.js";
import books from "../../content/meta/books.js";
import ExpansionPanel, {
ExpansionPanelDetails,
ExpansionPanelSummary,
} from 'material-ui/ExpansionPanel';
import ExpandMoreIcon from 'material-ui-icons/ExpandMore';
import Grid from 'material-ui/Grid';
import Typography from 'material-ui/Typography';
import ButtonBase from 'material-ui/ButtonBase';
import Tooltip from 'material-ui/Tooltip';
const styles = theme => ({
root: {
flexGrow: 1,
[`@media (max-width: ${theme.mediaQueryTresholds.L}px)`]: {
paddingTop: `${theme.bars.sizes.infoBar}px`,
paddingBottom: `${theme.bars.sizes.actionsBar}px`
},
},
panel: {
backgroundColor: theme.base.colors.panel,
},
heading: {
fontSize: theme.typography.pxToRem(15),
flexBasis: '33.33%',
flexShrink: 0,
},
secondaryHeading: {
fontSize: theme.typography.pxToRem(15),
color: theme.palette.text.secondary,
},
image: {
position: 'relative',
height: 200,
width: 140,
boxShadow: "2px 8px 20px -6px hsla(170, 50%, 45%, 1)",
// [theme.breakpoints.down('xs')]: {
// width: '100% !important', // Overrides inline-style
// height: 100,
// },
'&:hover': {
zIndex: 0,
},
'&:hover $imageBackdrop': {
opacity: 0.6,
},
'&:hover $imageButton': {
display: 'flex',
},
},
imageButton: {
position: 'absolute',
left: 0,
right: 0,
top: 0,
bottom: 0,
display: 'none',
alignItems: 'center',
justifyContent: 'center',
color: theme.palette.common.white,
},
imageBackdrop: {
position: 'absolute',
left: 0,
right: 0,
top: 0,
bottom: 0,
backgroundColor: theme.palette.common.black,
opacity: 0.05,
transition: theme.transitions.create('opacity'),
},
imageSrc: {
position: 'absolute',
left: 0,
right: 0,
top: 0,
bottom: 0,
backgroundSize: 'cover',
backgroundPosition: 'center 40%',
}
});
class Books extends React.Component {
state = {
expanded: lang.reading,
};
handleChange = panel => (event, expanded) => {
this.setState({
expanded: expanded ? panel : false,
});
};
render() {
const { classes } = this.props;
const lists = books.lists;
const { expanded } = this.state;
return (
<Main>
<div className={classes.root}>
{lists.map(month => {
const books = month.books;
return (
<ExpansionPanel onChange={this.handleChange(month.time)} expanded={expanded === month.time}
className={classes.panel}
>
<ExpansionPanelSummary expandIcon={<ExpandMoreIcon />}>
<Typography className={classes.heading}>{month.time}</Typography>
<Typography className={classes.secondaryHeading}>{month.summary}</Typography>
</ExpansionPanelSummary>
<ExpansionPanelDetails>
<Grid container xs={12}>
{books.map(book => {
return (
<Grid item>
<ButtonBase focusRipple
href={book.url}
rel="noopener noreferrer"
target="_blank"
key={book.title}
className={classes.image}
>
{ book.coverUrl &&
<span className={classes.imageSrc}
style={{
backgroundImage: `url(${book.coverUrl})`
}}
/>
}
<span className={classes.imageSrc}
style={{
backgroundImage: `url(https://img1.doubanio.com/mpic/${book.doubanID}.jpg)`
}}
/>
<span className={classes.imageBackdrop} />
<span className={classes.imageButton}>
{book.title}
</span>
</ButtonBase>
</Grid>
);
})}
</Grid>
</ExpansionPanelDetails>
</ExpansionPanel>
);
})}
</div>
<Helmet
htmlAttributes={{
lang: config.siteLanguage,
prefix: "og: http://ogp.me/ns#"
}}
>
{/* General tags */}
<title>{lang.books} - {config.shortSiteTitle}</title>
<meta name="description" content={books.description} />
{/* OpenGraph tags */}
<meta property="og:url" content="/books" />
<meta property="og:title" content={lang.books} />
<meta property="og:description" content={books.description} />
<meta property="og:image" content={books.cover} />
<meta property="og:type" content="website" />
{/* Twitter Card tags */}
<meta name="twitter:card" content={books.description} />
<meta
name="twitter:creator"
content={config.authorTwitterAccount ? config.authorTwitterAccount : ""}
/>
</Helmet>
</Main>
);
}
}
Books.propTypes = {
classes: PropTypes.object.isRequired
};
export default injectSheet(styles)(Books);<file_sep>/content/pages/1--about/index.md
---
title: About Me
menuTitle: About
---
Ok, about me.<file_sep>/scripts/entry.sh
#!/bin/bash
set -e
export GATSBY_DIR="/site"
Separator="============================================================="
# Initialize Gatsby or run NPM install if needed
if [ ! -f "$GATSBY_DIR/package.json" ]; then
echo "Raink: package.json not found, check your docker command and volume."
echo $Separator
exit 1
elif [ ! -e "$GATSBY_DIR/node_modules/" ]; then
echo "Raink: Node modules is empty. Running yarn install..."
echo $Separator
yarn install
fi
cd $GATSBY_DIR
yarn clean
echo "Raink: Initialized."
echo $Separator
# Decide what to do
if [ "$1" == "develop" ]; then
gatsby develop --host 0.0.0.0
elif [ "$1" == "build" ]; then
gatsby build
elif [ "$1" == "serve" ]; then
gatsby serve --port 8000
elif [ "$1" == "deploy" ]; then
echo "Raink: Generate app icons."
echo $Separator
bash /generate-app-icons.sh
gatsby build
rm -rf /public/* && cp -r public/* /public
echo "Raink: Build success, now monitoring content folder."
echo $Separator
while true; do
find content src | entr sh -c 'gatsby build && rm -rf /public/* && cp -r public/* /public'
done
else
exec $@
fi
<file_sep>/README.md
# Raink - Personal blog starter for Gatsby.js
[Chinese README](README_CN.md)
## Preview and feature
* [Netlify Demo](https://raink.netlify.com)
* [My Blog](https://zuolan.me/)
Lighthouse score:

* [x] Markdown posts, pages and fragments
* [x] Table of contents
* [x] Themes switch (dark and light)
* [x] Contact form
* [x] Searching (by Algolia)
* [x] Progressive Web App (PWA)
* [x] Favicons generator
* [x] RSS, Sitemap, SEO
* [x] Social sharing
* [x] Google analytics
* [x] Disqus and FB comments
* [ ] Headroom
* [ ] Filtering by tag
* [ ] multi-language
* [ ] Support PWA notification
* [ ] Resume page
## Getting started
#### Enable external services (Required)
The starter uses external services for some functions: contact form, comments, searching, analytics. To use them you have to secure some access data. Don't worries, all services are free or have generous free tiers big enough for a personal blog.
The starter needs an `.env` file like this in the root folder:
```
GOOGLE_ANALYTICS_ID = ...
ALGOLIA_APP_ID = ...
ALGOLIA_SEARCH_ONLY_API_KEY = ...
ALGOLIA_ADMIN_API_KEY = ...
ALGOLIA_INDEX_NAME = ...
FB_APP_ID = ...
DISQUS_ID = ...
```
The contact form does not need any settings it should work out of the box if you deploy the website to [Netlify](https://www.netlify.com/).
----
#### There are various ways to get started with Raink:
<details><summary>Deploying with Docker</summary>
NOTE: Your GatsbyJS site static files will be created into `~/raink/public` automatically.
Clone this repository:
```
$ git clone https://github.com/izuolan/raink.git ~/raink && cd $_
```
#### deploy (production)
This command will be build your site and generate app icons, then run a monitor to monitoring the `content` folder, automatically build and redeploy when file changes:
```shell
$ docker run -dit --restart=always --name raink \
-v ~/raink:/site \
-v ~/content:/site/content \
zuolan/raink deploy
# Check the container build log
$ docker logs -f raink
```
Now, everything is ready, you can host the `~/raink/public` folder to any http service, such as Github Pages.
#### develop (development)
Use `develop` command to deploying your site, then open `SERVER_IP:8000`:
```shell
$ docker run -it --rm -p 8000:8000 \
-v ~/raink:/site \
-v ~/content:/site/content \
zuolan/raink develop
```
#### build and serve
Use `build` command to building your site, then the static files will output the `public` folder:
```shell
$ docker run -it --rm \
-v ~/raink:/site \
-v ~/content:/site/content \
zuolan/raink build
```
Use `serve` command to run a http serve:
```shell
$ docker run -dit --name raink-public \
-p 8000:8000 \
-v ~/raink:/site \
-v ~/content:/site/content \
zuolan/raink serve
```
#### other
For example to install a new NPM-module:
```
$ docker run -it --rm \
-v ~/raink:/site \
-v ~/content:/site/content \
zuolan/raink yarn add gatsby-transformer-yaml
```
</details>
<details><summary>Deploying with Netlify (Recommended, Serverless)</summary>
1. Fork this repository, and sign in [Netlify](https://www.netlify.com/).
2. [Create a new site](https://app.netlify.com/start) and select your forked repository.
3. Set `.ENV` in Netlify.
<details><summary>How</summary>

</details>
4. Keep all default **Basic build settings**, just click **Deploy site** button.
That's all.
</details>
<details><summary>Deploying from Source</summary>
```shell
$ git clone https://github.com/izuolan/raink.git && cd $_
$ npm install --global gatsby-cli
$ yarn install
$ yarn develop
```
</details>
## Thanks
**Note: This starter forked from an [educational project](https://github.com/greglobinski/gatsby-starter-personal-blog). Some features will be merged into the original project in the future (My code seems terrible, lol).**<file_sep>/src/pages/resume.js
import React from "react";
import PropTypes from "prop-types";
import injectSheet from "react-jss";
import Obfuscate from "react-obfuscate";
import Main from "../components/Main";
import Article from "../components/Main/Article";
import Content from "../components/Main/Content";
import config from "../../content/meta/config";
import lang from "../../content/meta/lang.js";
import SwipeableViews from 'react-swipeable-views';
import AppBar from 'material-ui/AppBar';
import Tabs, { Tab } from 'material-ui/Tabs';
import Card, { CardActions, CardContent } from 'material-ui/Card';
import Button from 'material-ui/Button';
import Typography from 'material-ui/Typography';
function TabContainer({ children, dir }) {
return (
<Typography component="div" dir={dir} style={{ padding: 8 * 3 }}>
{children}
</Typography>
);
}
TabContainer.propTypes = {
children: PropTypes.node.isRequired,
dir: PropTypes.string.isRequired,
};
const styles = theme => ({
root: {
// backgroundColor: theme.base.colors.background.color,
height: "100%",
},
info: {
minWidth: 100,
maxWidth: 400,
},
bullet: {
display: 'inline-block',
margin: '0 2px',
transform: 'scale(0.8)',
},
title: {
marginBottom: 16,
fontSize: 14,
},
pos: {
marginBottom: 12,
},
});
class Resume extends React.Component {
state = {
value: 0,
};
handleChange = (event, value) => {
this.setState({ value });
};
handleChangeIndex = index => {
this.setState({ value: index });
};
render() {
const { classes } = this.props;
return (
<Main>
<div className={classes.root}>
<AppBar position="sticky" color="inherit">
<Tabs
value={this.state.value}
onChange={this.handleChange}
indicatorColor="primary"
textColor="primary"
fullWidth centered
>
<Tab label="个人信息" />
<Tab label="工作经历" />
<Tab label="项目经验" />
<Tab label="社区贡献" />
</Tabs>
</AppBar>
<SwipeableViews
// axis={theme.direction === 'rtl' ? 'x-reverse' : 'x'}
index={this.state.value}
onChangeIndex={this.handleChangeIndex}
>
<TabContainer>
<Card className={classes.info}>
<CardContent>
<Typography className={classes.title} color="textSecondary">
<p>{lang.email_to_me}<Obfuscate email={config.authorEmail} /></p>
</Typography>
<Typography variant="headline" component="h2">
左蓝
</Typography>
<Typography className={classes.pos} color="textSecondary">
Docker & DevOps
</Typography>
<Typography component="p">
简单介绍。
</Typography>
</CardContent>
</Card>
</TabContainer>
<TabContainer>Item Two</TabContainer>
<TabContainer>Item Three</TabContainer>
<TabContainer>Item Four</TabContainer>
</SwipeableViews>
</div>
</Main>
);
}
}
Resume.propTypes = {
classes: PropTypes.object.isRequired
};
export default injectSheet(styles)(Resume);
<file_sep>/README_CN.md
# Raink - Gatsby.js 个人博客主题
[English README](README.md)
## 预览与功能
* [Netlify 的演示站点](https://raink.netlify.com)
* [我的博客](https://zuolan.me/)
Lighthouse 评分:

* [x] 文章和页面使用 Markdown 编写
* [x] 文章目录
* [x] 主题切换(目前只有黑白两种)
* [x] 联系表单
* [x] 搜索(基于 Algolia)
* [x] 全站支持 PWA,可离线访问
* [x] Favicons 生成器
* [x] RSS, 站点地图, SEO
* [x] 社交分享
* [x] Google analytics
* [x] Disqus 和 Facebook 评论集成
* [ ] 滑动隐藏上下栏
* [ ] 文章标签
* [ ] 多语言
* [ ] 桌面级更新提醒
* [ ] 简历密码
* [ ] 博客后台
## 入门
#### 开启外部服务(必须)
主题的一些功能使用了外部服务,例如联系表单,评论,搜索,分析等等。要使用这些服务,你必须申请相应的 API/KEY。不要担心,所有服务都是免费的,或者有足够配额的免费套餐供个人博客使用。
首先在相应的网站上申请接口,然后在项目根目录新建一个`.env`文件,内容如下:
```
GOOGLE_ANALYTICS_ID = ...
ALGOLIA_APP_ID = ...
ALGOLIA_SEARCH_ONLY_API_KEY = ...
ALGOLIA_ADMIN_API_KEY = ...
ALGOLIA_INDEX_NAME = ...
FB_APP_ID = ...
DISQUS_ID = ...
```
如果你将网站部署到 [Netlify](https://www.netlify.com/),那么联系表单不需要任何设置即可使用,提交数据会发送到 Netlify 后台。
----
#### 下面有几种不同的方式部署 Raink:
<details><summary>使用 Docker 部署</summary>
提醒:你的 Gatsby.js 网站静态文件将自动创建到 `~/raink/public` 中。
克隆这个仓库:
```
$ git clone https://github.com/izuolan/raink.git ~/raink && cd $_
```
#### deploy(生产级部署)
这个命令首先会生成一些 PWA 必须的图标,然后构建静态文件,构建结束后会进入监视状态,一旦 `content` 文件夹内容有变动便会触发再次构建:
```shell
$ docker run -dit --restart=always --name raink \
-v ~/raink:/site \
-v ~/content:/site/content \
zuolan/raink deploy
# 查看构建日志
$ docker logs -f raink
```
现在一切准备就绪,你可以把 `~/raink/public` 目录放到任意一种 HTTP 服务中,例如 Github Pages。
#### develop(开发)
使用 `develop` 命令部署可以在修改主题文件时快速看到修改结果,打开 `SERVER_IP:8000` 即可看到页面:
```shell
$ docker run -it --rm -p 8000:8000 \
-v ~/raink:/site \
-v ~/content:/site/content \
zuolan/raink develop
```
#### build 和 serve
使用 `build` 命令用于构建生产级的静态页面,构建后的内容会输出到 `public` 文件夹:
```shell
$ docker run -it --rm \
-v ~/raink:/site \
-v ~/content:/site/content \
zuolan/raink build
```
使用 `serve` 命令运行一个 HTTP 服务:
```shell
$ docker run -dit --name raink-public \
-p 8000:8000 \
-v ~/raink:/site \
-v ~/content:/site/content \
zuolan/raink serve
```
#### other
安装一个新的 npm 包:
```
$ docker run -it --rm \
-v ~/raink:/site \
-v ~/content:/site/content \
zuolan/raink yarn add gatsby-transformer-yaml
```
</details>
<details><summary>通过 Netlify 部署(推荐, 不需要服务器)</summary>
1. Fork 这个仓库,注册登录 [Netlify](https://www.netlify.com/)。
2. 点击 [Create a new site](https://app.netlify.com/start) 然后选择你刚才 forked 的仓库。
3. 在 Netlify 构建页面设置 `.ENV` 变量,不懂就看下面那张图。
<details><summary>如何配置环境变量</summary>

</details>
4. 其他设置保持默认(**Basic build settings** 不需要改动),点击 **Deploy site** 即可开始部署。
稍等片刻就可以看到网站已经部署完成,你可以克隆你的 Forked 仓库,修改 `content` 文件夹里面的内容然后提交,Netlify 会自动触发构建。
</details>
<details><summary>从源代码构建部署</summary>
```shell
$ git clone https://github.com/izuolan/raink.git && cd $_
$ npm install --global gatsby-cli
$ yarn install
$ yarn develop
```
</details>
| 73ac0d93d724b5164af2bdc151c0c029e4babd58 | [
"Markdown",
"JavaScript",
"Dockerfile",
"Shell"
] | 19 | Markdown | syedpeer/raink | ffbc49b8877d20fdf5d1ae3bb89c57e85ff86f18 | b6dbc1712b931743ae08263146eb95c17c504da7 |
refs/heads/master | <file_sep>## Plant Water Level
Skill to report on plant water level in the home
## Description
Skill to report on plant water level in home
## Examples
- "Plant water level"
- "Plant need water"
## Credits
janbugge
<file_sep>from mycroft import MycroftSkill, intent_file_handler
class PlantWaterLevel(MycroftSkill):
def __init__(self):
MycroftSkill.__init__(self)
@intent_file_handler('level.water.plant.intent')
def handle_level_water_plant(self, message):
self.speak_dialog('level.water.plant')
def create_skill():
return PlantWaterLevel()
| 3aa475057307ff1b481f8fdffaab628dd99791a3 | [
"Markdown",
"Python"
] | 2 | Markdown | janbugge/plant-water-level-skill | 9b7f5712266d5d4c39bbac4c2561a279d2c9e049 | bdd4b1b13f0e4b23e978cb0292a5dda23f7691e7 |
refs/heads/master | <file_sep><!DOCTYPE html>
<html lang="en">
<head>
<!-- Basic Page Needs
================================================== -->
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<title>Sentinelle</title>
<meta name="description" content="">
<meta name="author" content="">
<meta name="keywords" content="">
<!-- Mobile Specific Metas
================================================== -->
<meta name="viewport" content="width=device-width, minimum-scale=1.0, maximum-scale=1.0">
<meta name="apple-mobile-web-app-capable" content="yes" />
<!-- Fonts -->
<link href="https://fonts.googleapis.com/css?family=Open+Sans:300,400,600,700" rel="stylesheet">
<!-- Favicon
================================================== -->
<link rel="apple-touch-icon" sizes="180x180" href="/img/cible.png">
<link rel="icon" type="assets/image/png" sizes="16x16" href="assets/img/cible.png">
<!-- Stylesheets
================================================== -->
<!-- Bootstrap core CSS -->
<link href="assets/css/bootstrap.min.css" rel="stylesheet">
<link href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/5.1.0/css/v4-shims.min.css" rel="stylesheet">
<link href="//netdna.bootstrapcdn.com/font-awesome/4.0.3/css/font-awesome.css" rel="stylesheet">
<!-- Custom styles for this template -->
<link href="assets/css/style.css" rel="stylesheet">
<link href="assets/css/responsive.css" rel="stylesheet">
<link href="assets/css/custom.css" rel="stylesheet">
<!-- HTML5 shim and Respond.js IE8 support of HTML5 elements and media queries -->
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/libs/html5shiv/3.7.0/html5shiv.js"></script>
<script src="https://oss.maxcdn.com/libs/respond.js/1.4.2/respond.min.js"></script>
<![endif]-->
</head>
<body>
<?php
if (isset($_SESSION['msg-flash']) && !empty($_SESSION['msg-flash'])) {
echo '<div class="alert alert-' . $_SESSION['msg-flash']['type'] . '" role="alert">' . $_SESSION['msg-flash']['msg'] . '
<button type="button" class="close" data-dismiss="alert" aria-label="Close">
<span aria-hidden="true">×</span>
</button>
</div>';
$_SESSION['msg-flash'] = [];
}
?>
<header id="masthead" class="site-header" data-anchor-target=".hero" data-top="background: rgba(255,255,255,0); padding: 30px 0; box-shadow: 0px 0px 20px 6px rgba(0, 0, 0, 0);" data-top-bottom="background: rgba(255,255,255,1); padding: 10px 0; box-shadow: 0px 0px 20px 6px rgba(0, 0, 0, 0.2);">
<nav id="primary-navigation" class="site-navigation">
<div class="container">
<div class="navbar-header page-scroll">
<button type="button" class="navbar-toggle collapsed" data-target="#portfolio-perfect-collapse" aria-expanded="false">
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a href="#hero" class="site-logo"><img src="assets/img/logo_sentinelle.png" alt="logo"></a>
</div><!-- /.navbar-header -->
<div class="main-menu" id="portfolio-perfect-collapse">
<ul class="nav navbar-nav navbar-right">
<li class="page-scroll"><a href="index.php">Accueil</a></li>
<li class="page-scroll"><a href="#about">Portrait</a></li>
<li class="page-scroll"><a href="#service">Objectifs</a></li>
<li class="page-scroll"><a href="#portfolio">Portfolio</a></li>
<li class="page-scroll"><a href="#contact">Contactez Moi</a></li>
<?php if (isset($_SESSION['user'])) { ?>
<li class="page-scroll"><a href="/deconnexion.php">Déconnexion</a></li>
<?php } else { ?>
<li class="page-scroll"><a href="/login.php">Se connecter/ S'incrire</a></li>
<?php } ?>
</ul><!-- /.navbar-nav -->
</div><!-- /.navbar-collapse -->
</div>
</nav><!-- /.primary-navigation -->
</header><!-- /#header --><file_sep><?php
require_once 'config/framework.php';
require_once 'config/connect.php';
require_once 'part/header.php';
$sql = "SELECT P.id, P.titre, P.slug, P.content, P.image, P.creation, U.pseudo
FROM projets AS P
INNER JOIN users AS U
ON U.id = P.auteur
WHERE P.slug = '" . $_GET['slug'] . "' LIMIT 1";
$projets = query($sql, true);
if (!empty($projets)) {
$sql = "SELECT C.comment, U.pseudo
FROM commentaires AS C
INNER JOIN users AS U
ON U.id = C.user
WHERE C.projet = '" . $projets['id'] . "'";
$commentaires = query($sql);
}
if (isset($_POST['token_comment']) && $_POST['token_comment'] === $_SESSION['token_comment']) {
$commentaire = addslashes(htmlentities(strip_tags($_POST['token_comment'])));
$sql = "INSERT INTO commentaires(user,projet,comment) VALUES ('" . $_SESSION['user']['id'] . "', '" . $projets['id'] . "','" . $_POST['comment'] . "')";
if ($mysqli->query($sql)) {
addFlash('success', 'Votre commentaire a bien été envoyé !!');
redirectToRoute('/projet.php?slug=' . $_GET['slug']);
} else {
addFlash('danger', 'Votre commentaire n\'a pas été envoyé !!');
}
}
?>
<div id="hero" class="hero">
<div class="container">
<div class="row">
<div class="col-6">
<div class="comprojet">
<div class="jumbotron jumbotron-fluid">
<div class="container">
<?php if (!empty($projets)) { ?>
<h1 class="display-4"><?= $projets['titre'] ?></h1>
<p class="lead"><img src="<?= $projets['image'] ?>"></p>
<?= $_GET['slug']; ?>
<br>
<?= $page = isset($_GET['page']) && (int) $_GET['page'] ? $_GET['page'] : 1;
$projets['image']; ?>
<br>
<br>
<?= $projets['content'];
$page; ?>
<br>
<?= $projets['creation']; ?>
<?php } else {
echo 'Le projet est indisponible';
}
?>
<br>
</div>
</div>
</div>
<?php if (!empty($projets)) { ?>
<div class="comcom">
<div class="container">
<div class='row'>
<div class='col-md-offset-2 col-md-8'>
<div class="carousel slide" data-ride="carousel" id="quote-carousel">
<!-- Bottom Carousel Indicators -->
<ol class="carousel-indicators">
<?php $i = 0;
foreach ($commentaires as $commentaire) : ?>
<li data-target="#quote-carousel" data-slide-to="<?= $i; ?>" class="<?= $i === 0 ? 'active' : ''; ?>"></li>
<?php $i++;
endforeach; ?>
</ol>
<!-- Carousel Slides / Quotes -->
<div class="carousel-inner">
<!-- comS -->
<?php $i = 0;
foreach ($commentaires as $commentaire) : ?>
<div class="item<?= $i === 0 ? ' active' : ''; ?>">
<blockquote>
<div class=" row">
<div class="col-sm-9">
<p>"<?= $commentaire['comment']; ?>"
<br>
__<br>
<strong>De <?= $commentaire['pseudo']; ?></strong>
</p>
</div>
</div>
</blockquote>
</div>
<?php $i++;
endforeach; ?>
</div>
<!-- Carousel Buttons Next/Prev -->
<a data-slide="prev" href="#quote-carousel" class="left carousel-control"><i class="fa fa-chevron-left"></i></a>
<a data-slide="next" href="#quote-carousel" class="right carousel-control"><i class="fa fa-chevron-right"></i></a>
</div>
</div>
</div>
</div>
</div>
<div class="comcom">
<a class="btn btn-dark" data-toggle="collapse" href="#collapseExample" role="button" aria-expanded="false" aria-controls="collapseExample">
Commentez
</a>
<div class="collapse" id="collapseExample">
<div class="card card-body">
<?php if (isset($_SESSION['user'])) { ?>
<form method="post">
<input type="hidden" name="token_comment" value="<?= miniToken('token_comment'); ?>">
<div class="form-group">
<br><label for="Textarea1">Entrez votre commentaire</label><br>
<textarea class="form-control" id="Textarea1" rows="3"></textarea>
</div>
<button type="submit" class="btn btn-primary">Envoyer</button>
</form>
<?php
} else {
echo '<div class="my-5">Réservé aux membres ! <a href="/login.php">Veulliez vous identifier svp </a></div>';
}
?>
</div>
</div>
</div>
<?php }
?>
</div>
</div>
</div>
</div>
<?php require_once "part/footer.php"; ?><file_sep><?php
use Faker\Factory;
require_once '../vendor/autoload.php';
require_once '../config/framework.php';
require_once '../config/connect.php';
$faker = Factory::create();
for ($i = 0; $i < 200; $i++){
$roles = ['ROLE_USER'];
$role = [false, true, false];
shuffle($role);
if ($role[0] === true) {
array_push($roles, 'ROLE_ADMIN');
}
$pseudo = str_replace([' '],[''],strtolower($faker->name));
$email = $pseudo."@".$faker->freeEmailDomain;
$datetime = $faker->dateTimeBetween('-12 month', 'now');
$datetime = date_format($datetime, 'Y-m-d H:i:s');
$password_hash = password_hash($pseudo, PASSWORD_DEFAULT);
$roles = json_encode($roles);
$sql= "INSERT INTO users(email, password, pseudo, roles, register) VALUES ('".$email."','".$password_hash."','".$pseudo."','".$roles."','".$datetime."')";
if ($mysqli->query($sql) === true) {
echo $roles. "<br>";
echo $pseudo. "<br>";
echo $email."<br>";
echo $datetime ."<br>";
echo $password_hash. "<br>";
echo "<br>";
} else {
echo 'une erreur est survenue. Veuillez recommencer';
}
}
<file_sep><?php
require_once '../config/framework.php';
require_once '../config/connect.php';
require_once 'header.php';
?>
</nav>
<!-- /. NAV SIDE -->
<div id="page-wrapper">
<div id="page-inner">
<div class="row">
<div class="col-md-12">
<h2>USERS </h2>
</div>
</div>
<!-- /. ROW -->
<hr />
<!-- ------------------------------------------------- PAGE ---------------------------------------->
<table id="example" class="display" width="100%"></table>
<!-- ------------------------------------------------- PAGE ---------------------------------------->
<!-- /. ROW -->
</div>
<!-- /. PAGE INNER -->
</div>
</body>
</html>
<?php require_once "footer.php" ?><file_sep><?php
// Démarrage session
session_start();
// Ajoute le fichier defines.php
require_once 'defines.php';
/*
* Met a jour l'horloge avec le timezone par default
* avec la constante TIMEZONE_DEFAULT défini dans le fichier defines.php
*/
date_default_timezone_set(TIMEZONE_DEFAULT);
/**
* Redirige sur une autre page.
*/
function redirectToRoute(string $target = '/')
{
header('Location: ' . $target);
exit();
}
/**
* Undocumented function.
*/
function miniToken(string $token = 'token')
{
$alpha = str_shuffle(
implode(range('a', 'z'))
. implode(range('A', 'Z'))
. implode(range(0, 9))
);
$_SESSION[$token] = $alpha;
return $alpha;
}
/**
* Function var_dump().
*
* Affiche les var_dump seulement si l'application
* est en environnement développement.
*
* APP_ENV est definie dans le fichier defines.php
* APP_ENV = dev (environnement développement)
* APP_ENV = prod (environnement production)
*
* @param void $variable (varibale a tester, peu être de type bool,array,string,int,float...)
* @param bool $type (false pour le print_r, true pour le var_dump)
*/
function dump($variable, bool $type = false)
{
if (APP_ENV === 'dev') {
if ($type === false) {
echo '<pre class="my-4">' . print_r($variable, true) . '</pre>';
} else {
var_dump($variable);
}
}
}
function addFlash(string $type, string $message)
{
$_SESSION['msg-flash'] = [
'type' => $type,
'msg' => $message,
];
}
<file_sep><?php
define('APP_ENV', 'dev');
define('APP_ROOT', str_replace('\config', '', __DIR__));
define('TIMEZONE_DEFAULT', 'Europe/Paris');
<file_sep><?php
define('ICLOUD_EMAIL, 'votre email');
define('ICLOUD_PASSWORD', '<PASSWORD>');
?><file_sep><?php
require_once '../vendor/autoload.php';
require_once '../config/framework.php';
require_once '../config/connect.php';
?>
<file_sep><?php
require_once 'config/framework.php';
require_once 'config/connect.php';
require_once 'part/header.php';
$errors = [];
if (isset($_POST['email'])) {
$email = strip_tags(stripslashes($_REQUEST['email']));
$query = "SELECT * FROM `users` WHERE email='" . $email . "'";
if ($result = $mysqli->query($query)) {
if ($result->num_rows > 0) {
while ($row = $result->fetch_assoc()) {
if (password_verify($_POST['password1'], $row['password'])) {
$_SESSION['user'] = $row;
if ($_POST['souvenir'] === 'on') {
$expire = time() + 1 * 1 * 900;
setcookie('souvenir', $row['security'], $expire, '/', '', false, true);
}
redirectToRoute('/compte.php');
} else {
echo 'compte non reconnu';
}
}
}
$result->close();
}
}
?>
<div id="hero" class="hero">
<main id="main">
<section class="site-section section-about text-center">
<div class="container my-5">
<div class="row">
<div class="col-md-4 col-md-offset-4">
<p class="text-center h1 fw-bold mb-5 mx-1 mx-md-4 mt-4">
Connectez vous
</p>
<form class="mx-1 mx-md-4" method="post">
<input type="hidden" name="token" value="<?= miniToken(); ?>">
<label class="form-label" for="email">E-mail</label>
<div class="d-flex flex-row align-items-center mb-4">
<div class="form-outline flex-fill mb-0">
<input type="email" class="form-control" name="email" id="email" />
</div>
</div>
<label class="form-label" for="password">Mot de passe</label><br>
<div class="d-flex flex-row align-items-center mb-4">
<div class="form-outline flex-fill mb-0">
<input type="password" class="form-control" name="<PASSWORD>" id="<PASSWORD>" />
</div>
</div>
<div id="hero" class="hero">
<div class="d-flex justify-content-center m-4 my-5">
<button type="submit" class="btn btn-primary">Se connecter</button>
</div>
<input type="checkbox" name="souvenir" id="souvenir" />
<label for="souvenir">
<p>Rester connecté</p>
</label>
</form>
<div><a href="/register.php">Pas de compte ? S'inscrire</a></div>
</div>
</div>
</div>
</div>
</section>
</main><file_sep><?php
require_once '../config/connect.php';
$sql = "SELECT email FROM users";
$users = query($sql);
foreach ($users as $user) {
$security = sha1($user['email']);
$sql = "UPDATE users SET security='" . $security . "' WHERE email='" . $user['email'] . "'";
if ($mysqli->query($sql) === true) {
echo "('" . sha1($user['email']) . "')";
echo "<br>";
}
}
<file_sep><?php
require_once '../config/framework.php';
require_once '../config/connect.php';
require_once "header.php";
$errors = [];
if (isset($_GET['edition'])) {
switch ($_GET['edition']) {
case 'new':
$sql = "INSERT INTO projets FROM projets WHERE id='" . $_GET['id'] . "'";
break;
case 'edit':
$sql = "UPDATE FROM projets WHERE id='" . $_GET['id'] . "'";
break;
case 'delete':
$sql = "DELETE FROM projets WHERE id='" . $_GET['id'] . "'";
break;
}
if ($mysqli->query($sql) === true) {
redirectToRoute('/admin/projets.php');
} else {
$errors['sql'] = $mysqli->error;
}
} else {
$errors['sql'] = 'tutu';
}
?>
<div class="container">
<div class="row">
<div class="md-6">
<h1>MODIFICATION DE PROJET</h1>
<?= !empty($errors) ? $errors['sql'] : ''; ?>
<div class="formprojet">
<form method="post">
<div class="form-group">
<label for="titre">Titre du Projet</label>
<input type="text" class="form-control" id="titre" placeholder="" value="<?= $projets['titre'] ?>">
</div>
<div class="form-group">
<label for="auteur">En ligne</label>
<input type="checkbox" name="statut" id="statut" />
<label for="auteur">Fictif</label>
<input type="checkbox" name="statut" id="statut" />
</div>
<div class="form-group">
<label for="description">Description</label>
<input type="text" class="form-control" id="description" placeholder=" " value="<?= $projet['content'] ?>">
</div>
<div class="form-group">
<label for="exampleInputFile">Modifier l'image</label>
<input type="file" id="file">
</div>
<button type="submit" class="btn btn-secondary" name="projet">Envoyer le Projet</button><br>
</form><br>
</div>
<div>
</div>
</div>
<?php require_once "footer.php" ?><file_sep>-- phpMyAdmin SQL Dump
-- version 4.9.2
-- https://www.phpmyadmin.net/
--
-- Hôte : 127.0.0.1:3306
-- Généré le : jeu. 07 oct. 2021 à 07:25
-- Version du serveur : 8.0.18
-- Version de PHP : 7.2.31
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET AUTOCOMMIT = 0;
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Base de données : `portfolio`
--
-- --------------------------------------------------------
--
-- Structure de la table `projets`
--
DROP TABLE IF EXISTS `projets`;
CREATE TABLE IF NOT EXISTS `projets` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`name` varchar(255) COLLATE utf8mb4_general_ci NOT NULL,
`description` text CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci,
`image` varchar(255) COLLATE utf8mb4_general_ci NOT NULL,
`time_at` datetime NOT NULL,
`slug` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL,
`technos` varchar(255) COLLATE utf8mb4_general_ci NOT NULL,
`github` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,
`lien` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci DEFAULT NULL,
`status` tinyint(1) NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;
-- --------------------------------------------------------
--
-- Structure de la table `roles`
--
DROP TABLE IF EXISTS `roles`;
CREATE TABLE IF NOT EXISTS `roles` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`role` varchar(30) COLLATE utf8mb4_general_ci NOT NULL,
`statut` tinyint(1) NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=5 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;
--
-- Déchargement des données de la table `roles`
--
INSERT INTO `roles` (`id`, `role`, `statut`) VALUES
(1, 'admin', 1),
(2, 'moderateur', 1),
(3, 'user', 1),
(4, 'validateur', 0);
-- --------------------------------------------------------
--
-- Structure de la table `users`
--
DROP TABLE IF EXISTS `users`;
CREATE TABLE IF NOT EXISTS `users` (
`id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT,
`email` varchar(255) COLLATE utf8mb4_general_ci NOT NULL,
`password` varchar(255) COLLATE utf8mb4_general_ci NOT NULL,
`pseudo` varchar(30) COLLATE utf8mb4_general_ci NOT NULL,
`roles` json NOT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `id` (`id`),
UNIQUE KEY `pseudo` (`pseudo`),
UNIQUE KEY `email` (`email`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;
COMMIT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
<file_sep><?php
require_once '../config/framework.php';
require_once '../config/connect.php';
require_once "header.php";
if (!isset($_SESSION['user'])) {
redirectToRoute();
}
if (isset($_SESSION['user'])) {
$roles = json_decode($_SESSION['user']['roles']);
if (!in_array('ROLE_ADMIN', $roles)) {
redirectToRoute();
}
}
/* if (isset($_POST['projet'])) {
$sql = "UPDATE projets SET titre,content,image,auteur ='" . $projets . "' WHERE projets='" . $projets['titre'] . $projets['content'] . $projets['image'] . $projets['auteur'] . "'";
if ($mysqli->query($sql)) {
echo " Votre projet à bien été modifié";
} else {
echo " Modification à echouer";
}
} */
?>
<h1> Tous les projets </h1>
<br>
<br>
<br>
<!-- ------------------------------------------------- TABLE ---------------------------------------->
<div class="container">
<div class="row">
<div class="col-12">
<table>
<br>
<button type="button" class="btn btn-success mb-2">
<bold>+</bold> Ajouter un projet
</button><br>
<thead>
<tr>
<th></th>
<th></th>
<th>ID</th>
<th>Nom</th>
<th>Image</th>
<th>Activé</th>
<th>Fictif</th>
</tr>
</thead>
<tbody>
<?php foreach (query('SELECT * FROM projets LIMIT 10') as $projet) : ?>
<tr>
<td>
<a href="/admin/formprojet.php?edition=edit&id=<?= $projet['id']; ?>"><span class="fa fa-edit"></span></a>
</td>
<td>
<a href="/admin/formprojet.php?edition=delete&id=<?= $projet['id']; ?>"> <span class="fa fa-trash-o"></span></a>
</td>
<td>
<?= $projet['id'] ?>
</td>
<td>
<?= $projet['titre'] ?>
</td>
<td>
<img src="<?= $projet['image'] ?>" width="32" height="32">
</td>
<td>
<span class="fa fa-check"></span>
</td>
<td>
<span class="fa fa-times"></span>
</td>
</tr>
<?php endforeach; ?>
</tbody>
</tbody>
</table>
</div>
</div>
</div>
<!-- ------------------------------------------------- TABLE ---------------------------------------->
<br>
</body>
<br>
<?php require_once "footer.php" ?><file_sep><?php
require_once '../vendor/autoload.php';
require_once '../config/framework.php';
require_once '../config/connect.php';
$mysqli;
$sql= "SELECT COUNT(*) as nbProjets FROM projets";
$data= query($sql);
$nbProjets= $data[0]['nbProjets'];
$parPage = 10;
$nbPage = ceil($nbProjets/$parPage);
$cPage= 1;
$sql = "SELECT P.id, P.titre, P.slug, P.content, P.image, P.creation, U.id AS id_user, U.email, U.pseudo
FROM projets AS P
INNER JOIN users AS U
ON P.auteur = U.id
WHERE P.statut = 1
ORDER BY creation DESC
LIMIT ".(($cPage-1)*$parPage)." , $parPage";
$projets = query($sql);
dump($projets);
echo "$cPage sur $nbPage" ;
?>
<file_sep><?php
require_once 'config/framework.php';
require_once 'config/connect.php';
$errors = [];
if (!isset($_SESSION['user'])) {
redirectToRoute();
}
// pour supprimer le compte
if (isset($_POST['token_delete']) && $_POST['token_delete'] === $_SESSION['token_delete']) {
$sql = "DELETE FROM users WHERE id = '" . $_SESSION['user']['id'] . "'";
if ($mysqli->query($sql) === true) {
redirectToRoute('/deconnexion.php');
}
}
// pour modifier le pseudo
if (isset($_POST['token_pseudo']) && $_POST['token_pseudo'] === $_SESSION['token_pseudo']) {
if (strlen($_POST['pseudo']) < 3 || strlen($_POST['pseudo']) > 30) {
$errors['pseudo'] = 'Votre Pseudo doit contenir minimum 3 caractères et maximum 30 caracteres !';
}
if (empty($error)) {
$sql = "UPDATE users SET pseudo='" . $_POST['pseudo'] . "' WHERE id='" . $_SESSION['user']['id'] . "'";
if ($mysqli->query($sql) === true) {
$_SESSION['user']['pseudo'] = $_POST['pseudo'];
} else {
$errors['sql'] = 'une erreur est survenue. Veuillez recommencer';
}
}
}
// pour modifier l'email
if (isset($_POST['token_email']) && $_POST['token_email'] === $_SESSION['token_email']) {
if (isset($_POST['email']) && preg_match('#^[\w.-]+@[\w.-]+\.[a-z]{2,6}$#i', $_POST['email'])) {
$errors['email'] = 'Votre Pseudo doit contenir minimum 3 caractères et maximum 30 caracteres !';
$sql = "UPDATE users SET email='" . $_POST['email'] . "' WHERE id = '" . $_SESSION['user']['id'] . "'";
if ($mysqli->query($sql) === true) {
redirectToRoute('/deconnexion.php');
} else {
echo 'une erreur est survenue. Veuillez recommencer';
}
}
}
// pour modifier le mdp
if (isset($_POST['token_password']) && $_POST['token_password'] === $_SESSION['token_password']) {
$password_hash = password_hash($_POST['password'], PASSWORD_DEFAULT);
$sql = "UPDATE users SET password='" . $password_hash . "' WHERE id = '" . $_SESSION['user']['id'] . "'";
if ($mysqli->query($sql) === true) {
redirectToRoute('/deconnexion.php');
} else {
echo 'une erreur est survenue. Veuillez recommencer';
}
}
require_once 'part/header.php';
?>
<div id="hero" class="hero">
<header></header>
<main id="main">
<section class="site-section section-about text-center">
<div class="container my-5">
<h2> <strong> Ravi de vous revoir <?= $_SESSION['user']['pseudo']; ?></strong></h2><br>
<div class="row">
<div class="col-md-4 col-md-offset-4">
<form class="form-inline" method="post">
<div class="form-group mx-sm-3 mb-3">
<label for="emailmodif" class="sr-only">Email</label>
<input type="hidden" name="token_email" value="<?= miniToken('token_email'); ?>">
<input type="text" class="form-control" name="email" id="modifemail" placeholder="Nouvelle email">
</div>
<button type="submit" class="btn btn-secondary mb-2">Modifier L'email</button><br>
</form>
<form class="form-inline" method="post">
<div class="form-group mx-sm-3 mb-3">
<label for="pseudomodif" class="sr-only">Pseudo</label>
<input type="hidden" name="token_pseudo" value="<?= miniToken('token_pseudo'); ?>">
<input type="text" class="form-control" name="pseudo" id="modifpseudo" placeholder="Nouveau Pseudo">
</div>
<button type="submit" class="btn btn-secondary mb-2">Modifier le Pseudo</button><br>
</form>
<form class="form-inline" method="post">
<div class="form-group mx-sm-3 mb-3">
<label for="passwordmodif" class="sr-only">Mot de passe</label>
<input type="hidden" name="token_password" value="<?= miniToken('token_motdepasse'); ?>">
<input type="<PASSWORD>" class="form-control" name="password" id="modifpassword" placeholder="<PASSWORD>">
</div>
<button type="submit" class="btn btn-secondary mb-2">Modifier le mot de passe</button><br>
</form><br>
<em>OU</em><br>
<form method="post" onclick="return confirm('Vous êtes sûre de vouloir nous quitter ? :(')">
<input type="hidden" name="token_delete" value="<?= miniToken('token_delete'); ?>">
<input type="submit" name="delete" value="Supprimer le Compte" class="btn btn-danger">
</form><br>
<a href="politiquergpd.php">Politique de confidentialité</a>
</section>
</main>
</div>
</html><file_sep><?php
require_once 'config/framework.php';
require_once 'config/connect.php';
require_once 'part/header.php';
$errors = [];
if (isset($_POST['token']) && $_POST['token'] === $_SESSION['token']) {
if (strlen($_POST['pseudo']) < 3 || strlen($_POST['pseudo']) > 30) {
$errors['pseudo'] = 'Votre Pseudo doit contenir minimum 3 caractères et maximum 30 caracteres !';
}
if (isset($_POST['email']) && !preg_match('#^[\w.-]+@[\w.-]+\.[a-z]{2,6}$#i', $_POST['email'])) {
$errors['email'] = 'Votre Pseudo doit contenir minimum 3 caractères et maximum 30 caracteres !';
}
if (isset($_POST['password1']) && !empty($_POST['password1']) && $_POST['password1'] === $_POST['password2']) {
$password_hash = password_hash($_POST['password1'], PASSWORD_DEFAULT);
} else {
$errors['password1'] = 'Les mots de passe ne sont pas identiques !';
}
if (empty($errors)) {
$sql = "INSERT INTO users(email, password, pseudo, roles) VALUES ('" . $_POST['email'] . "','" . $password_hash . "','" . $_POST['pseudo'] . "','" . json_encode(['ROLE_USER']) . "')";
if ($mysqli->query($sql) === true) {
redirectToRoute();
} else {
echo 'une erreur est survenue. Veuillez recommencer';
}
}
}
?>
<title>Inscrivez Vous !</title>
<main id="main">
<section class="site-section section-about text-center">
<div class="container my-5">
<div class="row">
<div class="col-md-4 col-md-offset-4">
<p class="text-center h1 fw-bold mb-5 mx-1 mx-md-4 mt-4">
Inscrivez vous !
<form method="POST">
<input type="hidden" name="token" value="<?= miniToken(); ?>">
<div class="form-group">
<label for="pseudo">Pseudo</label>
<input type="text" class="form-control" id="pseudo" name="pseudo">
<label for="exampleInputEmail1">Adresse e-mail</label>
<input type="email" class="form-control" id="Email1" aria-describedby="emailHelp" name="email">
</div>
<div class="form-group">
<label for="exampleInputPassword1">Mot de passe</label>
<input type="password" class="form-control" id="exampleInputPassword1" name="password1">
</div>
<label for="exampleInputPassword2">Répétez le mot de passe</label>
<input type="<PASSWORD>" class="form-control" id="exampleInputPassword2" name="password2"><br>
<button type="submit" class="btn btn-secondary">M'inscrire</button>
</div>
</form>
</div>
</div>
</div>
</section>
</main>
<?php
$pseudo = valid_donnees($_POST["pseudo"]);
$email = valid_donnees($_POST["email"]);
$password = <PASSWORD>($_POST["password"]);
function valid_donnees($donnees)
{
$donnees = trim($donnees);
$donnees = stripslashes($donnees);
$donnees = htmlspecialchars($donnees);
return $donnees;
}
?>
<?php require_once 'part/footer.php';
<file_sep><?php
/**
* Cette méthode permet de supprimer tout
* les caractères spéciaux d'une chaîne.
*
* @param string $text comment
*
* @return string
*/
function removeSpecialChar(string $text): string
{
return preg_replace('/[^A-Za-z0-9\-]/', '', $text);
}
/**
* Supprime les balises HTML et PHP d'une chaîne.
*
* @param string $text comment
*
* @return string
*/
function stripTags(string $text): string
{
return strip_tags($text);
}
/**
* Remplace tous les accents par leur équivalent sans accent.
*
* @param string $text comment
*
* @return string
*/
function enleveAccents(string $text): string
{
return str_replace(
[
'À', 'Á', 'Â', 'Ã', 'Ä', 'Å', 'à', 'á', 'â', 'ã', 'ä', 'å',
'Ò', 'Ó', 'Ô', 'Õ', 'Ö', 'Ø', 'ò', 'ó', 'ô', 'õ', 'ö', 'ø',
'È', 'É', 'Ê', 'Ë', 'è', 'é', 'ê', 'ë',
'Ç', 'ç',
'Ì', 'Í', 'Î', 'Ï', 'ì', 'í', 'î', 'ï',
'Ù', 'Ú', 'Û', 'Ü', 'ù', 'ú', 'û', 'ü',
'ÿ',
'Ñ', 'ñ',
],
[
'a', 'a', 'a', 'a', 'a', 'a', 'a', 'a', 'a', 'a', 'a', 'a',
'o', 'o', 'o', 'o', 'o', 'o', 'o', 'o', 'o', 'o', 'o', 'o',
'e', 'e', 'e', 'e', 'e', 'e', 'e', 'e',
'c', 'c',
'i', 'i', 'i', 'i', 'i', 'i', 'i', 'i',
'u', 'u', 'u', 'u', 'u', 'u', 'u', 'u',
'y',
'n', 'n',
],
$text
);
}
/**
* Formate un text pour un slug bdd.
* Ex: mon-dossier-perso.
*
* @param string $text comment
* @param string $replace comment
*
* @return string
*/
function slug(string $text, string $replace = '-'): string
{
$text = strtolower(
removeSpecialChar(
str_replace(
[' ', '_', ',', '.'],
[$replace, $replace, $replace, $replace],
enleveAccents($text)
)
)
);
if (substr($text, strlen($text) - 1, strlen($text)) === $replace) {
$text = rtrim($text, $replace);
slug($text);
}
return $text;
}<file_sep><?php
use Faker\Factory;
require_once '../vendor/autoload.php';
require_once '../config/framework.php';
require_once '../config/connect.php';
require_once '../config/formtext.php';
$faker = Factory::create('fr_FR');
for ($i=0; $i < 200; $i++) {
// generer une image //
$image = "https://lorempixel.com/640/480/";
// generer un titre entre 10 et 20 caractere //
$titre=$faker->sentence(rand(2, 4));
// generer un username/ID //
$id=$faker->userName;
// generer du contenu //
$content=addslashes(htmlentities(htmlspecialchars($faker->realText(255, 4))));
// generer un statut //
$status = [true, false];
shuffle($status);
$statut = $status[0];
// date creation du projet //
$creation = $faker->dateTimeBetween('-6 month', 'now');
$creation = date_format($creation, 'Y-m-d H:i:s');
//slug//
$slug = slug($titre);
$titre = addslashes(htmlentities(htmlspecialchars($titre)));
// generer un username/ID //
if ($result = $mysqli->query("SELECT id FROM users ORDER BY RAND() LIMIT 1")) {
if ($result->num_rows > 0) {
while ($row = $result->fetch_assoc()) {
$auteur = $row['id'];
}
}
$result->close();
}
?>
<?php
$sql= "INSERT INTO projets(titre,slug,content,image,statut,creation,auteur) VALUES ('".addslashes(htmlentities(htmlspecialchars($titre)))."','".$slug."','".$content."','".$image."','".$statut."','".$creation."','".$auteur."')";
if ($mysqli->query($sql)) {
echo $id. "<br>";
echo addslashes(htmlentities(htmlspecialchars($titre))). "<br>";
echo $slug. "<br>";
echo $content. "<br>";
echo $statut. "<br>";
echo $creation. "<br>";
echo $auteur. "<br>";
echo $image. "<br>";
} else {
printf("Message d'erreur : %s\n", $mysqli->error);
}
}
<file_sep><?php
use Faker\Factory;
require_once '../vendor/autoload.php';
require_once '../config/framework.php';
require_once '../config/connect.php';
require_once '../config/formtext.php';
$faker = Factory::create('fr_FR');
for ($i=0; $i < 3000; $i++) {
//generer un commentaire
$commentaire = addslashes(htmlentities($faker->realText(100, 2)));
// generer un id user
$result = $mysqli->query("SELECT id FROM users ORDER BY RAND() LIMIT 1");
$auteur = $result->fetch_assoc();
$result->close();
// generer un id projet
$result = $mysqli->query("SELECT id FROM projets ORDER BY RAND() LIMIT 1");
$projet = $result->fetch_assoc();
$result->close();
$sql = "INSERT INTO commentaires(user,projet,comment) VALUES ('".$auteur['id']."','".$projet['id']."','".$commentaire."')";
if ($mysqli->query($sql)) {
echo $auteur['id']. "<br>";
echo $projet['id']. "<br>";
echo $commentaire. "<br>";
} else {
printf("Message d'erreur : %s\n", $mysqli->error);
}
}<file_sep><?php
require_once 'config/framework.php';
require_once 'config/connect.php';
require_once 'part/header.php';
?>
<div id="hero" class="hero">
<div class="container">
<div class="row">
<div class="col-md-6">
<h1><NAME></h1>
<div class="page-scroll">
<p class="job-title">Developpeur Web Junior</p>
<a href="#contact" class="btn btn-fill ">Me contacter</a>
<div class="clearfix visible-xxs"></div>
<a href="#portfolio" class="btn btn-border">Réalisations</a>
</div>
</div>
<div class="col-md-6 text-right">
.
</div>
</div>
</div>
</div>
</div><!-- /.hero -->
<main id="main" class="site-main">
<section id="about" class="site-section section-about text-center">
<div class="container">
<div class="row">
<div class="col-md-6 col-md-offset-3">
<h2>Portrait</h2>
<img src="assets/img/lines.svg" class="img-lines" alt="lines">
<p> Developpeur Web Junior, Je debute ma carrière , un seul mot d'ordre, perseverance. Actuellement
en remise à niveau dans les métiers du numerique. Je serais bientot apte à être votre solution.
Vous avez un projet et vous voulez vous lancer, besoin d'aide ? Je suis votre meilleur atout.
</p>
<?php if (isset($_SESSION['user'])) { ?>
<a href="CDC.docx" class="btn btn-fill">Votre devis en Ligne !</a>
<?php
} else {
echo '<div class="my-5">Réservé aux membres ! <a href="/login.php">Veulliez vous identifier svp </a></div>';
}
?>
</div>
</div>
</div>
</section><!-- /.secton-about -->
<section class="site-section section-skills">
<div class="container">
<div class="text-center">
<h3>COMPETENCES</h3>
<img src="assets/img/lines.svg" class="img-lines" alt="lines">
</div>
<div class="row">
<div class="col-md-4">
<div class="skill">
<h4><img src="assets/img/competence.png"> Html <img src="assets/img/competence.png"></h4>
</div><!-- /.skill -->
<div class="skill">
<h4><img src="assets/img/competence.png"> Css <img src="assets/img/competence.png"></h4>
</div><!-- /.skill -->
</div>
<div class="col-md-4">
<div class="skill">
<h4><img src="assets/img/competence.png"> Php <img src="assets/img/competence.png"></h4>
</div><!-- /.skill -->
<div class="skill">
<h4><img src="assets/img/competence.png"> Python <img src="assets/img/competence.png"></h4>
</div><!-- /.skill -->
</div>
<div class="col-md-4">
<div class="skill">
<h4><img src="assets/img/competence.png"> Excel <img src="assets/img/competence.png"></h4>
</div><!-- /.skill -->
<div class="skill">
<h4><img src="assets/img/competence.png"> Javascript <img src="assets/img/competence.png"></h4>
</div><!-- /.skill -->
</div>
</div>
</div>
</section><!-- /.secton-skills -->
<section id="service" class="site-section section-services overlay text-center">
<div class="container">
<div class="row">
<div class="col-md-12">
<h3>Mes Objectifs</h3>
<img src="assets/img/lines.svg" class="img-lines" alt="lines">
</div>
<div class="col-sm-4">
<div class="service">
<img src="assets/img/front-end.svg" alt="Front End Developer">
<h4>Création Projet Vitrine</h4>
<p>Je souhaite dévelloper un site vitrine</p>
</div><!-- /.service -->
</div>
<div class="col-sm-4">
<div class="service">
<img src="assets/img/back-end.svg" alt="Back End Developer">
<h4>Projet Back-office</h4>
<p>Manier avec précision le language PHP et les mettre en lien avec des base de données
MYSQL; Créer des interfaces ADMINISTRATEUR</p>
</div><!-- /.service -->
</div>
<div class="col-sm-4">
<div class="service">
<img src="assets/img/consultancy.svg" alt="Coding">
<h4>Consultant</h4>
<p>Definir un cahier des charges, et proposer des solutions adaptées à votre projet.</p>
</div><!-- /.service -->
</div>
</div>
</div>
</section><!-- /.secton-services -->
<section id="portfolio" class="site-section section-portfolio">
<div class="container">
<div class="text-center">
<h3>Mes Dernieres réalisations</h3>
<img src="assets/img/lines.svg" class="img-lines" alt="lines">
</div>
<?php $sql = "SELECT P.id, P.titre, P.slug, P.content, P.image, P.creation, U.pseudo
FROM projets AS P
INNER JOIN users AS U
ON P.auteur = U.id
WHERE P.statut = 1
ORDER BY creation DESC
LIMIT 5";
$projets = query($sql);
?>
<div class="row">
<?php $i = 1;
foreach ($projets as $projet) { ?>
<div class="col-md-4 col-xs-6">
<div class="portfolio-item">
<img src="<?= $projet['image']; ?>" class="img-res" alt="<?= $projet['titre']; ?>">
<div class="portfolio-item-info">
<h4><?= $projet['titre']; ?>
<a href="/projet.php?slug=<?= $projet['slug']; ?>"><span class="glyphicon glyphicon-eye-open"></span></a>
<a href="https://github.com/Johan667"><span class="glyphicon glyphicon-link"><img src=../assets/img/logo-github.png></span></a>
</div>
</div>
</div>
<?php $i++;
} ?>
<div class="col-md-4 col-xs-6">
<div class="portfolio-item">
<img src="assets/img/portfolio-5.jpg" class="img-res" alt="">
<div class="portfolio-item-info">
<h4>Project personnelle en cours</h4>
<a href="/projet.php?slug=<?= $projet['slug']; ?>" data-target="#portfolioItem<?= $i; ?>"><span class="glyphicon glyphicon-eye-open"></span></a>
<a href="https://github.com/Johan667"><span class="glyphicon glyphicon-link"><img src=../assets/img/logo-github.png></span></a>
</div>
</div>
</div>
</div>
</div>
</div>
</section>
<section class="site-section section-counters text-center">
<div class="container">
<div class="row">
<div class="col-sm-4 col-xs-12">
<p class="counter start" data-to="10" data-speed="2000">3</p>
<h4>Mois D'experience</h4>
</div>
<div class="col-sm-4 col-xs-12">
<p class="counter start" data-to="1" data-speed="2000">3</p>
<h4>Projet réalisées</h4>
</div>
<div class="col-sm-4 col-xs-12">
<p id="infinity" class="counter" data-from="0" data-to="1" data-speed="1000">204</p>
<h4>Cafés</h4>
</div>
</div>
</div>
</section><!-- /.section-counters -->
<section id="contact" class="site-section section-form text-center">
<div class="container">
<h3>Contactez moi</h3>
<img src="assets/img/lines.svg" class="img-lines" alt="lines">
<form method="post">
<div class="row">
<div class="col-sm-12">
<input type="text" name="nom" placeholder="Prénom/Nom" maxlength="50" size="30" value="<?php if (
isset($_POST['nom'])
) echo htmlspecialchars($_POST['nom']); ?>">
<input type="text" name="email" placeholder="E-mail" maxlength="80" size="30" value="<?php if (isset($_POST['email'])) echo htmlspecialchars($_POST['email']); ?>">
</div>
<div class="col-sm-12">
<textarea name="commentaire" placeholder=" Contenu de votre demande" cols="40" rows="5">
<?php if (isset($_POST['commentaire'])) echo htmlspecialchars($_POST['commentaire']); ?></textarea>
</div>
</div>
<button href="#" class="btn btn-border" type="submit">Envoyer <span class="glyphicon glyphicon-send"></span></button>
</form>
</div>
</section><!-- /.section-form -->
</main><!-- /#main -->
<?php $i = 1;
foreach ($projets as $projet) { ?>
<div id="portfolioItem<?= $i; ?>" class="modal fade" role="dialog">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-header">
<a class="close" data-dismiss="modal"><span class="glyphicon glyphicon-remove"></span></a>
<img class="img-res" src="<?= $projet['image']; ?>" alt="<?= $projet['titre']; ?>">
</div>
<div class="modal-body">
<h4 class="modal-title"><?= $projet['titre']; ?></h4>
<p><?= $projet['content']; ?></p>
</div>
<div class="modal-footer">
<a href="https://github.com/Johan667" class="btn btn-fill"><span class="fa fa-github mr-2"></span>Github</a>
<a class="btn btn-dark" data-toggle="collapse" href="#multiCollapse-<?= $i; ?>-1" role="button" aria-expanded="false" aria-controls="multiCollapse-<?= $i; ?>-1">Commentaires</a>
<a class="btn btn-dark" data-toggle="collapse" href="#multiCollapse-<?= $i; ?>-2" role="button" aria-expanded="false" aria-controls="multiCollapse-<?= $i; ?>-2">Votre avis</a>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<?php $i++;
} ?>
<?php require_once 'part/footer.php'; ?><file_sep><?php
require_once 'header.php';
require_once '../config/framework.php';
require_once '../config/connect.php';
if (!isset($_SESSION['user'])) {
redirectToRoute();
}
if (isset($_SESSION['user'])) {
$roles = json_decode($_SESSION['user']['roles']);
if (!in_array('ROLE_ADMIN', $roles)) {
redirectToRoute();
}
}
?>
</nav>
<!-- /. NAV SIDE -->
<div id="page-wrapper">
<div id="page-inner">
<div class="row">
<div class="col-lg-12">
<h2>ADMIN DASHBOARD</h2>
</div>
</div>
<!-- /. ROW -->
<hr />
<div class="row">
<div class="col-lg-12 ">
<div class="alert alert-info">
<strong>Welcome <?= $_SESSION['user']['pseudo']; ?> ! </strong> Travail bien aujourd'hui !
</div>
</div>
</div>
<!-- /. ROW -->
<div class="row text-center pad-top">
<div class="col-lg-2 col-md-2 col-sm-2 col-xs-6">
<div class="div-square">
<a href="mail.php">
<i class="fa fa-envelope-o fa-5x"></i>
<h4>Mail</h4>
</a>
</div>
</div>
<div class="col-lg-2 col-md-2 col-sm-2 col-xs-6">
<div class="div-square">
<a href="users.php">
<i class="fa fa-users fa-5x"></i>
<h4>Users</h4>
</a>
</div>
</div>
<div class="col-lg-2 col-md-2 col-sm-2 col-xs-6">
<div class="div-square">
<a href="admin.php">
<i class="fa fa-key fa-5x"></i>
<h4>Admin </h4>
</a>
</div>
</div>
</div>
<div class="row text-center pad-top">
<div class="col-lg-2 col-md-2 col-sm-2 col-xs-6">
<div class="div-square">
<a href="projets.php">
<i class="fa fa-clipboard fa-5x"></i>
<h4>Projets</h4>
</a>
</div>
</div>
<div class="col-lg-2 col-md-2 col-sm-2 col-xs-6">
<div class="div-square">
<a href="support.php">
<i class="fa fa-comments-o fa-5x"></i>
<h4>Support</h4>
</a>
</div>
</div>
<div class="col-lg-2 col-md-2 col-sm-2 col-xs-6">
<div class="div-square">
<a href="settings.php">
<i class="fa fa-gear fa-5x"></i>
<h4>Settings</h4>
</a>
</div>
</div>
</div>
</div>
<!-- /. PAGE INNER -->
</div>
<!-- /. PAGE WRAPPER -->
</div>
<div class="footer">
<div class="row">
<div class="col-lg-12">
© 2021 <NAME>| Administrator
</div>
</div>
</div>
<!-- /. WRAPPER -->
<!-- SCRIPTS -AT THE BOTOM TO REDUCE THE LOAD TIME-->
<!-- JQUERY SCRIPTS -->
<script src="assets/js/jquery-1.10.2.js"></script>
<!-- BOOTSTRAP SCRIPTS -->
<script src="assets/js/bootstrap.min.js"></script>
<!-- CUSTOM SCRIPTS -->
<script src="assets/js/custom.js"></script>
</body>
</html>
re | d9e3ad02652569a30359584f684bba0eee0a0bc8 | [
"SQL",
"PHP"
] | 21 | PHP | Johan667/Portfolio | 67025a24a417d5e57db6d4089bc116cb67b0e2b4 | 0691e92577ff01b552b51288258c6bd61aaf43d9 |
refs/heads/master | <repo_name>asiojs/finotek<file_sep>/EDMS3/WebContent/js/popup/registMail.js
var registMail = {
treeObjectForRegistMail : null,
// URL메일송부 Table tr count
wMailDoc : 0,
uMailDoc : 0,
// 사용자선택 받는사람/참조/숨은참조 Table tr count
wReceiver : 0,
// 첨부파일
gPageList : null,
// 0. 초기화
initTree : function(rowDataList) {
// 작업카트 > URL 메일 송부 - 창 닫기
$('.url_emailSend_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.url_emailSend').addClass('hide');
$('.url_emailSend_wrapper').addClass('hide');
});
// 작업카트 > URL 메일 송부 창 닫기 : 음영진 부분 클릭 시 닫기
$('.url_emailSend_wrapper').bind("click", function(){
$(this).addClass('hide');
$('.url_emailSend').addClass('hide');
});
// 작업카트 > URL 메일 송부 > 찾기 - 창 닫기
$('.user_choose_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.user_choose').addClass('hide');
$('.user_choose_wrapper').addClass('hide');
});
// 작업카트 > URL 메일 송부 > 찾기 창 닫기 : 음영진 부분 클릭 시 닫기
$('.user_choose_wrapper').bind("click", function(){
$(this).addClass('hide');
$('.user_choose').addClass('hide');
});
// 1. 트리 초기화
if (registMail.treeObjectForRegistMail == undefined) {
var treeOption = {
divId : "#user_tree",
context : exsoft.contextRoot,
url : "/group/groupList.do"
};
registMail.treeObjectForRegistMail = new XFTree(treeOption);
registMail.treeObjectForRegistMail.callbackSelectNode = function(e, data) {
// 검색 옵션 초기화
$("#mail_userName").val("");
$("#mail_groupName").val("");
var param = {
groupName : "",
userName : "",
groupId : data.node.id
}
// 부서 사용자 목록 조회
exsoft.util.grid.gridPostDataRefresh('#mail_user_table', exsoft.contextRoot + '/user/searchUserList.do', param);
}
registMail.treeObjectForRegistMail.init();
registMail.event.mailDocData(rowDataList);
} else {
registMail.treeObjectForRegistMail.refresh();
registMail.event.mailDocData(rowDataList);
}
// 2. 사용자 목록 그리드 초기화
if ($("#mail_user_table")[0].grid == undefined) {
$('#mail_user_table').jqGrid({
url:exsoft.contextRoot + '/user/searchUserList.do',
mtype:"post",
datatype:'json',
jsonReader:{
page:'page',total:'total',root:'list'
},
colNames:['','group_nm','user_name_ko','position_nm','role_nm','email','user_status_nm'],
colModel:[
{name:'user_id',index:'user_id',width:80, editable:false,sortable:false,resizable:true,align:'center',key:true},
{name:'group_nm',index:'group_nm',width:90, editable:false,sortable:true,resizable:true,hidden:false,align:'center'},
{name:'user_name_ko',index:'user_name_ko',width:100, editable:false,sortable:false,resizable:true,hidden:false,align:'center'},
{name:'position_nm',index:'position_nm',width:30, editable:false,sortable:true,resizable:true,hidden:true,align:'center'},
{name:'role_nm',index:'role_nm',width:10, editable:false,sortable:true,resizable:true,hidden:true,align:'center'},
{name:'email',index:'email',width:30, editable:false,sortable:true,resizable:true,hidden:true,align:'center'},
{name:'user_status_nm',index:'user_status_nm',width:30, editable:false,sortable:true,resizable:true,hidden:true,align:'center'}
],
autowidth:true,
height:200,
viewrecords: true,multiselect:true,sortable: true,shrinkToFit:true,gridview: true,
sortname : "user_name_ko",
sortorder:"asc",
scroll:true, // virtual Scrolling
scrollOffset : 0,
rowNum : 10,
rowList : exsoft.util.grid.listArraySize(),
emptyDataText: "데이터가 없습니다.",
caption:'사용자 목록',
loadError:function(xhr, status, error) {
exsoft.util.error.isErrorChk(xhr);
}
,loadBeforeSend: function() {
exsoft.util.grid.gridTitleBarHide('mail_user_table');
}
,loadComplete: function() {
exsoft.util.grid.gridInputInit(false);
}
});
// Grid 컬럼정렬 처리
var headerData = '{"user_name_ko":"사용자명","user_id":"ID","group_nm":"그룹명"}';
exsoft.util.grid.gridColumHeader('mail_user_table',headerData,'center');
headerData = null;
}
},
// 1. 팝업
open : {
// URL메일송부 - 창열기
open : function() {
exsoft.util.layout.divLayerOpen("url_emailSend_wrapper", "url_emailSend");
},
// 사용자선택 - 창열기
addrOpen : function() {
// 받는사람 / 참조 / 숨은참조 테이블 초기화
$('#receiver_table tr:gt(0)').remove();
$('#cc_table tr:gt(0)').remove();
$('#hcc_table tr:gt(0)').remove();
// 받는사람 / 참조 / 숨은참조 체크박스 초기화
exsoft.util.common.checkboxInit("checkAll_receiver_table");
exsoft.util.common.checkboxInit("checkAll_cc_table");
exsoft.util.common.checkboxInit("checkAll_hcc_table");
exsoft.util.layout.divLayerOpen("user_choose_wrapper", "user_choose");
},
},
//2. layer + show
layer : {
},
//3. 닫기 + hide
close : {
// URL메일송부 - 창열기
close : function() {
exsoft.util.layout.divLayerClose("url_emailSend_wrapper", "url_emailSend");
},
// 사용자선택 - 창닫기
addrClose : function() {
exsoft.util.layout.divLayerClose("user_choose_wrapper", "user_choose");
},
},
//4. 화면 이벤트 처리
event : {
// URL메일송부 - 보내기버튼 클릭시
sendButtonClick : function() {
if($("#receiver_email").val() == "") {
jAlert("받는사람을 입력해주세요", "URL메일송부", 0);
return;
} else if($("#email_subject").val() == "") {
jAlert("메일 제목을 입력해주세요", "URL메일송부", 0);
return;
} else {
urlCopyPeriod.init("URL_SEND");
}
},
// 사용자선택 - 확인버튼 클릭시
okButtonClick : function() {
var receiver_mail_add = "";
var cc_mail_add = "";
var hcc_mail_add = "";
var receiver_user_id = $("input[name=chb_receiver_table]");
var cc_user_id = $("input[name=chb_cc_table]");
var hcc_user_id = $("input[name=chb_hcc_table]");
// 받는사람
if(receiver_user_id.length > 0) {
$(receiver_user_id).each(function(index) {
if(receiver_user_id.length > 1) {
receiver_mail_add += $("#mail_"+$(this).val()).val() + ";";
} else {
receiver_mail_add = $("#mail_"+$(this).val()).val();
}
});
if(receiver_user_id.length > 1) {
receiver_mail_add = receiver_mail_add.substring(0, receiver_mail_add.length-1);
}
$("#receiver_email").val(receiver_mail_add);
} else {
jAlert("받는사람을 선택하세요", "사용자선택", 0);
return false;
}
// 참조
$(cc_user_id).each(function(index) {
if(cc_user_id.length > 1) {
cc_mail_add += $("#mail_"+$(this).val()).val() + ";";
} else {
cc_mail_add = $("#mail_"+$(this).val()).val();
}
});
if(cc_user_id.length > 1) {
cc_mail_add = cc_mail_add.substring(0, cc_mail_add.length-1);
}
$("#cc_email").val(cc_mail_add);
// 숨은참조
$(hcc_user_id).each(function(index) {
if(hcc_user_id.length > 1) {
hcc_mail_add += $("#mail_"+$(this).val()).val() + ";";
} else {
hcc_mail_add = $("#mail_"+$(this).val()).val();
}
});
if(hcc_user_id.length > 1) {
hcc_mail_add = hcc_mail_add.substring(0, hcc_mail_add.length-1);
}
$("#hcc_email").val(hcc_mail_add);
// 사용자 선택 창 닫기
registMail.close.addrClose();
},
// URL메일송부 - 취소버튼 클릭시
sendCancelButtonClick : function() {
registMail.close.close();
},
// 사용자선택 - 취소버튼 클릭시
selectUserCancelButtonClick : function() {
// 받는사람 / 참조 / 숨은참조 테이블 초기화
$('#receiver_table tr:gt(0)').remove();
$('#cc_table tr:gt(0)').remove();
$('#hcc_table tr:gt(0)').remove();
// 받는사람 / 참조 / 숨은참조 체크박스 초기화
exsoft.util.common.checkboxInit("checkAll_receiver_table");
exsoft.util.common.checkboxInit("checkAll_cc_table");
exsoft.util.common.checkboxInit("checkAll_hcc_table");
registMail.close.addrClose();
},
// URL메일송부 - Table List Print
mailDocData : function(mailDocList) {
// 화면 초기화
$('#mailDocTable tr:gt(0)').remove();
$("#receiver_email").val("");
$("#cc_email").val("");
$("#hcc_email").val("");
$("#email_subject").val("");
$("#email_msgTxt").val("");
var buffer = "";
var doc_id_list = "";
$(mailDocList).each(function(index) {
if(mailDocList.length > 1) {
doc_id_list += "'"+mailDocList[index].doc_id+"'" + ",";
} else {
doc_id_list = "'"+mailDocList[index].doc_id+"'";
}
});
if(mailDocList.length > 1) {
doc_id_list = doc_id_list.substring(0, doc_id_list.length-1);
}
exsoft.util.ajax.ajaxDataFunctionWithCallback({doc_id_list:doc_id_list}, exsoft.contextRoot+'/document/documentAttachFileByIDList.do', "attachFile", function(data, param) {
if (data.result == "false") {
jAlert(data.message, "URL메일송부", 0);
return;
} else {
registMail.gPageList = data.pageList;
for(var j = 0; j < data.pageList.length; j++){
buffer += "<tr id='MAIL_{0}'>".format(data.pageList[j].page_id);
buffer += " <td class='left'>" + exsoft.util.common.stripHtml(data.pageList[j].page_name) + "</td>";
buffer += " <td>" + data.pageList[j].fsize + "</td>";
buffer += " <td> <img src='"+ exsoft.contextRoot +"/img/icon/window_close3.png' onclick=javascript:registMail.event.mailDocDelete('"+data.pageList[j].page_id+"')></td> ";
buffer += "</tr>";
}
}
$("#mailDocTable").append(buffer);
// 보내는사람 / Email 세팅
$("#sender_name").val(exsoft.user.user_name);
$("#sender_email").val(exsoft.user.user_email);
registMail.open.open();
});
},
// URL메일송부 - Table TR delete
mailDocDelete : function(delId){
$("#MAIL_"+delId).remove();
},
// 받는사람 / 참조 / 숨은참조 사용자 추가
mailReceiverPlus : function(target_name, table_name) {
registMail.wReceiver = 0;
if(!exsoft.util.grid.gridSelectCheck('mail_user_table')){
jAlert("사용자를 선택하세요", target_name, 0);
return false;
} else {
var docIdList = exsoft.util.grid.gridSelectData('mail_user_table', 'user_id');
var rowDataList = new Array();
$(docIdList.split(",")).each(function(i) {
if(this != ""){
var row = $("#mail_user_table").getRowData(this);
rowDataList.push(row);
registMail.wReceiver += rowDataList.length;
}
});
registMail.event.setMailReceiverTable(rowDataList, table_name);
}
},
// 받는사람 / 참조 / 숨은참조 사용자리스트 테이블 세팅
setMailReceiverTable : function(receiverList, table_name) {
var buffer = "";
for(var i = 0; i < receiverList.length; i++){
registMail.uMailDoc++;
buffer += "<tr id='RCV_{0}'>".format(receiverList[i].user_id);
buffer += " <td class='left'><input type='checkbox' name='chb_"+table_name+"' value='{0}'></td>".format(receiverList[i].user_id);
buffer += " <td>" + receiverList[i].user_name_ko + "<input type='hidden' id='mail_"+receiverList[i].user_id+"' value='{0}'/> </td>".format(receiverList[i].email);
buffer += " <td>" + receiverList[i].email + "</td>";
buffer += "</tr>";
}
$('#'+table_name).append(buffer);
},
// 받는사람 / 참조 / 숨은참조 사용자 제외
mailReceiverMinus : function(target_name, table_name) {
var checkedVal = "";
var selected_user = $("input[name=chb_"+table_name+"]:checked");
if(selected_user.length > 0) {
$(selected_user).each(function(index) {
if(selected_user.length > 1) {
checkedVal += $(this).val() + ",";
} else {
checkedVal = $(this).val();
}
});
if(selected_user.length > 1) {
checkedVal = checkedVal.substring(0, checkedVal.length-1);
}
var a = checkedVal.split(',');
for(i=0; i< a.length; i++) {
$("#"+table_name+" #RCV_"+a[i]).remove();
}
} else {
jAlert("사용자를 선택하세요", target_name, 0);
return false;
}
},
// 사용자 검색
searchGroupUser : function() {
var param = {
groupName : $("#mail_groupName").val(),
userName : $("#mail_userName").val(),
groupId : ''
}
// 부서 사용자 목록 조회
exsoft.util.grid.gridPostDataRefresh('#mail_user_table', exsoft.contextRoot + '/user/searchUserList.do', param);
},
// 엔터키 입력시
enterKeyPress : function(e) {
if (e.keyCode == 13) {
registMail.event.searchGroupUser();
return false;
}
},
// 메일발송
sendOperation : function() {
// 문서첨부파일 처리
var buffer = "";
var params = "";
if(registMail.gPageList.length == 0) {
$("#copy_file_list").html("첨부된 파일이 없습니다.");
} else {
$(registMail.gPageList).each(function(index) {
if(urlCopyPeriod.setUrlValue == 0) {
params = this.page_id + "#" + "9999-12-31";
}else {
params = this.page_id + "#" + exsoft.util.date.addDate("d",urlCopyPeriod.setUrlValue, exsoft.util.date.todayStr(),"-");
}
buffer += "<br><a href='" + urlCopyPeriod.gServerUrl + base64Encode(params) + "'>" + this.page_name + "</a><br>";
params = "";
});
$("#copy_file_list").html(buffer);
}
if($("#email_msgTxt").val() == "") {
$("#copy_msg_txt").html("입력한 내용이 없습니다.");
} else {
$("#copy_msg_txt").html($("#email_msgTxt").val());
}
var postData = {
subject : $("#email_subject").val(),
receiver_address : $("#receiver_email").val(),
cc_address : $("#cc_email").val(),
hcc_address : $("#hcc_email").val(),
messageText : $("#copyToMail").html()
}
exsoft.util.ajax.ajaxDataFunctionWithCallback(postData, exsoft.contextRoot+'/common/sendURLMail.do', "sendURLMail", function(data, param) {
if (data.result == "success") {
registMail.close.close();
jAlert("메일 발송 완료", "URL메일송부", 0);
} else {
jAlert(data.message, "URL메일송부", 0);
return;
}
});
}
},
//5. 화면 UI 변경 처리
ui : {
},
//6. callback 처리
callback : {
}
}
<file_sep>/EDMS3/src/kr/co/exsoft/external/service/ExternalServiceImpl.java
package kr.co.exsoft.external.service;
import java.util.HashMap;
import java.util.List;
import org.apache.commons.collections.map.CaseInsensitiveMap;
import kr.co.exsoft.eframework.library.ExsoftAbstractServiceImpl;
import org.apache.ibatis.session.SqlSession;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Service;
import kr.co.exsoft.external.dao.ExternalDao;
/***
* External 서비스 구현 부분 - 외부 DB 미사용시 @Autowired @Qualifier 주석처리하세요.
* @author 패키지 개발팀
* @since 2014.07.17
* @version 3.0
*
*/
@Service("externalService")
public class ExternalServiceImpl extends ExsoftAbstractServiceImpl implements ExternalService {
@Autowired
@Qualifier("sqlSessionImp")
private SqlSession sqlSessionImp;
@Autowired
@Qualifier("sqlSession")
private SqlSession sqlSession;
@Override
public CaseInsensitiveMap externalUserDetail(HashMap<String,Object> map) throws Exception {
ExternalDao externalDao = sqlSessionImp.getMapper(ExternalDao.class);
CaseInsensitiveMap ret = new CaseInsensitiveMap();
ret = externalDao.externalUserDetail(map);
if (ret == null)
throw processException("result.nodata.msg");
return ret;
}
@Override
public int externalUserWrite(HashMap<String,Object> map) throws Exception {
int ret = 0;
ExternalDao externalDao = sqlSessionImp.getMapper(ExternalDao.class);
ret = externalDao.externalUserWrite(map);
return ret;
}
@Override
public int externalUserWriteTx(HashMap<String,Object> map) throws Exception {
int ret = 0;
ExternalDao externalDao = sqlSessionImp.getMapper(ExternalDao.class);
ret = externalDao.externalUserWrite(map);
if(ret != 1)
throw processException("result.insert.fail");
ret = externalDao.externalGroupedWrite(map);
if(ret != 1)
throw processException("result.insert.fail");
return ret;
}
@Override
public void batchUserWrite(List<HashMap<String,Object>> userList) throws Exception {
ExternalDao externalDao = sqlSessionImp.getMapper(ExternalDao.class);
for(HashMap<String,Object> map : userList) {
externalDao.externalGroupedWrite(map);
}
}
}
<file_sep>/EDMS3/src/kr/co/exsoft/statistics/service/StatisticsServiceImpl.java
package kr.co.exsoft.statistics.service;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.HashSet;
import kr.co.exsoft.eframework.configuration.Constant;
import kr.co.exsoft.eframework.library.ExsoftAbstractServiceImpl;
import kr.co.exsoft.eframework.util.CommonUtil;
import kr.co.exsoft.eframework.util.PagingAjaxUtil;
import kr.co.exsoft.folder.service.FolderService;
import kr.co.exsoft.statistics.dao.StatisticsDao;
import kr.co.exsoft.statistics.vo.DocumentDecadeVO;
import kr.co.exsoft.statistics.vo.DocumentGroupHtVO;
import kr.co.exsoft.statistics.vo.DocumentStatusVO;
import kr.co.exsoft.statistics.vo.DocumentUserHtVO;
import kr.co.exsoft.user.vo.ConnectLogVO;
import org.apache.commons.collections.map.CaseInsensitiveMap;
import org.apache.ibatis.session.SqlSession;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Service;
/**
* Statistics 서비스 구현 부분
* @author <NAME>
* @since 2014.07.17
* @version 3.0
*
*/
@Service("statisticsService")
public class StatisticsServiceImpl extends ExsoftAbstractServiceImpl implements StatisticsService {
@Autowired
@Qualifier("sqlSession")
private SqlSession sqlSession;
@Autowired
private FolderService folderService;
@Override
public Map<String, Object> userDocStatisticsList(HashMap<String,Object> map) throws Exception {
Map<String, Object> resultMap = new HashMap<String, Object>();
List<DocumentUserHtVO> ret = new ArrayList<DocumentUserHtVO>();
int total = 0;
StatisticsDao statisticsDao = sqlSession.getMapper(StatisticsDao.class);
total = statisticsDao.userDocStatisticsCnt(map);
ret = statisticsDao.userDocStatisticsList(map);
resultMap.put("page",map.get("nPage").toString());
resultMap.put("records",total);
resultMap.put("total",CommonUtil.getTotPageSize(total,Integer.parseInt(map.get("page_size").toString())));
resultMap.put("list",ret);
// Ajax Paging
String strLink = "javascript:exsoftStatisticsFunc.event.gridPage";
String contextRoot = map.get("contextRoot") != null ? map.get("contextRoot").toString() : "";
PagingAjaxUtil pagingInfo = new PagingAjaxUtil(Integer.parseInt(map.get("nPage").toString()),total,Integer.parseInt(map.get("page_size").toString()),10,strLink,contextRoot);
resultMap.put("pagingInfo",pagingInfo);
return resultMap;
}
@Override
public Map<String, Object> groupDocStatisticsList(HashMap<String,Object> map) throws Exception {
Map<String, Object> resultMap = new HashMap<String, Object>();
List<DocumentGroupHtVO> ret = new ArrayList<DocumentGroupHtVO>();
int total = 0;
StatisticsDao statisticsDao = sqlSession.getMapper(StatisticsDao.class);
total = statisticsDao.groupDocStatisticsCnt(map);
ret = statisticsDao.groupDocStatisticsList(map);
resultMap.put("page",map.get("nPage").toString());
resultMap.put("records",total);
resultMap.put("total",CommonUtil.getTotPageSize(total,Integer.parseInt(map.get("page_size").toString())));
resultMap.put("list",ret);
// Ajax Paging
String strLink = "javascript:exsoftStatisticsFunc.event.gridPage";
String contextRoot = map.get("contextRoot") != null ? map.get("contextRoot").toString() : "";
PagingAjaxUtil pagingInfo = new PagingAjaxUtil(Integer.parseInt(map.get("nPage").toString()),total,Integer.parseInt(map.get("page_size").toString()),10,strLink,contextRoot);
resultMap.put("pagingInfo",pagingInfo);
return resultMap;
}
@Override
public Map<String, Object> decadeUserDocStatisticsList(HashMap<String,Object> map) throws Exception {
Map<String, Object> resultMap = new HashMap<String, Object>();
List<DocumentDecadeVO> ret = new ArrayList<DocumentDecadeVO>();
StatisticsDao statisticsDao = sqlSession.getMapper(StatisticsDao.class);
ret = statisticsDao.decadeUserDocStatisticsList(map);
resultMap.put("records",ret.size());
resultMap.put("list",ret);
return resultMap;
}
@Override
public Map<String, Object> decadeGroupDocStatisticsList(HashMap<String,Object> map) throws Exception {
Map<String, Object> resultMap = new HashMap<String, Object>();
List<DocumentDecadeVO> ret = new ArrayList<DocumentDecadeVO>();
StatisticsDao statisticsDao = sqlSession.getMapper(StatisticsDao.class);
ret = statisticsDao.decadeGroupDocStatisticsList(map);
resultMap.put("records",ret.size());
resultMap.put("list",ret);
return resultMap;
}
@Override
public Map<String, Object> userFolderStatisticsList(HashMap<String,Object> map) throws Exception {
Map<String, Object> resultMap = new HashMap<String, Object>();
List<DocumentStatusVO> ret = new ArrayList<DocumentStatusVO>();
int total = 0;
StatisticsDao statisticsDao = sqlSession.getMapper(StatisticsDao.class);
total = statisticsDao.userFoldertatisticsCnt(map);
ret = statisticsDao.userFolderStatisticsList(map);
resultMap.put("page",map.get("nPage").toString());
resultMap.put("records",total);
resultMap.put("total",CommonUtil.getTotPageSize(total,Integer.parseInt(map.get("page_size").toString())));
resultMap.put("list",ret);
// Ajax Paging
String strLink = "javascript:exsoftStatisticsFunc.event.gridPage";
String contextRoot = map.get("contextRoot") != null ? map.get("contextRoot").toString() : "";
PagingAjaxUtil pagingInfo = new PagingAjaxUtil(Integer.parseInt(map.get("nPage").toString()),total,Integer.parseInt(map.get("page_size").toString()),10,strLink,contextRoot);
resultMap.put("pagingInfo",pagingInfo);
return resultMap;
}
@Override
public Map<String, Object> folderStatisticsList(HashMap<String,Object> map) throws Exception {
Map<String, Object> resultMap = new HashMap<String, Object>();
List<DocumentStatusVO> statusList = new ArrayList<DocumentStatusVO>();
List<CaseInsensitiveMap> folderList = new ArrayList<CaseInsensitiveMap>();
List<DocumentStatusVO> ret = new ArrayList<DocumentStatusVO>();
List<String> subFolders = new ArrayList<String>();
int total = 0;
StatisticsDao statisticsDao = sqlSession.getMapper(StatisticsDao.class);
// 1.1 프로젝트 그룹인 경우 1 DEPTH
if(map.get("strFolderIdx").toString().equals(Constant.MAP_ID_PROJECT)) {
map.put("project_root_id",statisticsDao.statisticsProjectRootId(map));
}
// 1.2 문서함/폴더별 보유현황
statusList = statisticsDao.userFolderStatisticsList(map);
// 2. 기준 폴더 리스트 :: 화면 정렬 및 리스트 수 기준
total = statisticsDao.statisticsFolderListCnt(map);
folderList = statisticsDao.statisticsFolderList(map);
if(folderList != null && folderList.size() > 0) {
for(CaseInsensitiveMap caseMap : folderList) {
int page_cnt = 0;
long page_total = 0;
int doc_cnt = 0;
String folder_id = caseMap.get("folder_id").toString();
subFolders = new ArrayList<String>(new HashSet<String>(folderService.childFolderIdsByfolderId(folder_id, null)));
// 개인함/프로젝트폴더/부서함내에 문서가 없는 경우도 리스트에 보이게 처리
if(statusList != null && statusList.size() > 0) {
for(DocumentStatusVO documentStatusVO : statusList) {
String subFolder = documentStatusVO.getFolder_id();
if(subFolders != null && subFolders.size() > 0) {
for(String subFolderInfo : subFolders) {
// 기준 폴더 하위에 포함이 된 경우
if(subFolder.equals(subFolderInfo)) {
page_cnt += documentStatusVO.getPage_cnt();
page_total += documentStatusVO.getPage_total();
doc_cnt += documentStatusVO.getDoc_cnt();
}
}
}
}
}
// return value add
DocumentStatusVO docStVO = new DocumentStatusVO();
docStVO.setGroup_nm(caseMap.get("folder_nm").toString());
docStVO.setDoc_cnt(doc_cnt);
docStVO.setPage_cnt(page_cnt);
docStVO.setPage_total(page_total);
ret.add(docStVO);
} // END OF FOR folderList
} // END OF IF folderList
resultMap.put("page",map.get("nPage").toString());
resultMap.put("records",total);
resultMap.put("total",CommonUtil.getTotPageSize(total,Integer.parseInt(map.get("page_size").toString())));
resultMap.put("list",ret);
// Ajax Paging
String strLink = "javascript:exsoftStatisticsFunc.event.gridPage";
String contextRoot = map.get("contextRoot") != null ? map.get("contextRoot").toString() : "";
PagingAjaxUtil pagingInfo = new PagingAjaxUtil(Integer.parseInt(map.get("nPage").toString()),total,Integer.parseInt(map.get("page_size").toString()),10,strLink,contextRoot);
resultMap.put("pagingInfo",pagingInfo);
return resultMap;
}
@Override
public Map<String, Object> statisticsTypeList(HashMap<String,Object> map) throws Exception {
StatisticsDao statisticsDao = sqlSession.getMapper(StatisticsDao.class);
Map<String, Object> resultMap = new HashMap<String, Object>();
List<DocumentStatusVO> ret = new ArrayList<DocumentStatusVO>();
ret = statisticsDao.statisticsTypeList(map);
resultMap.put("records",ret.size());
resultMap.put("list",ret);
return resultMap;
}
@Override
public Map<String, Object> statisticsSecurityList(HashMap<String,Object> map) throws Exception{
StatisticsDao statisticsDao = sqlSession.getMapper(StatisticsDao.class);
Map<String, Object> resultMap = new HashMap<String, Object>();
List<DocumentStatusVO> ret = new ArrayList<DocumentStatusVO>();
int total = 0;
total = statisticsDao.statisticsSecurityCnt(map);
ret = statisticsDao.statisticsSecurityList(map);
resultMap.put("page",map.get("nPage").toString());
resultMap.put("records",total);
resultMap.put("total",CommonUtil.getTotPageSize(total,Integer.parseInt(map.get("page_size").toString())));
resultMap.put("list",ret);
// Ajax Paging
String strLink = "javascript:exsoftStatisticsFunc.event.gridPage";
String contextRoot = map.get("contextRoot") != null ? map.get("contextRoot").toString() : "";
PagingAjaxUtil pagingInfo = new PagingAjaxUtil(Integer.parseInt(map.get("nPage").toString()),total,Integer.parseInt(map.get("page_size").toString()),10,strLink,contextRoot);
resultMap.put("pagingInfo",pagingInfo);
return resultMap;
}
@Override
public Map<String, Object> statisticsQuotaList(HashMap<String,Object> map) throws Exception {
StatisticsDao statisticsDao = sqlSession.getMapper(StatisticsDao.class);
Map<String, Object> resultMap = new HashMap<String, Object>();
List<CaseInsensitiveMap> gridList = new ArrayList<CaseInsensitiveMap>();
List<DocumentStatusVO> quotaList = new ArrayList<DocumentStatusVO>();
List<DocumentStatusVO> ret = new ArrayList<DocumentStatusVO>();
List<String> subFolders = new ArrayList<String>();
int total = 0;
// 1. 개인/부서별 현 사용량 리스트
//
quotaList =statisticsDao.statisticsQuotaList(map);
// 2. 기준(개인/부서) 목록 리스트
total = statisticsDao.statisticsQuotaPageCnt(map);
gridList = statisticsDao.statisticsQuotaPageList(map);
// 해당 조건이 만족된 경우에만 통계 데이터를 화면에 출력할 수 있다.
if(gridList != null && gridList.size() > 0 && quotaList != null && quotaList.size() > 0) {
// PART_ID,PART_NM,STORAGE_QUOTA
for(CaseInsensitiveMap caseMap : gridList) {
long page_total = 0; // 현재 사용량
String folder_id = caseMap.get("part_id").toString(); // 사용자ID나 부서ID
subFolders = new ArrayList<String>(new HashSet<String>(folderService.childFolderIdsByfolderId(folder_id, null)));
for(DocumentStatusVO documentStatusVO : quotaList) {
String subFolder = documentStatusVO.getFolder_id();
if(subFolders != null && subFolders.size() > 0) {
for(String subFolderInfo : subFolders) {
// 기준 폴더 하위에 포함이 된 경우
if(subFolder.equals(subFolderInfo)) {
page_total += documentStatusVO.getPage_total();
}
}
}
} // END OF FOR quotaList
// return value add
DocumentStatusVO docStVO = new DocumentStatusVO();
docStVO.setPart_id(folder_id);
docStVO.setPart_nm(caseMap.get("part_nm").toString());
docStVO.setStorage_quota(Long.parseLong(caseMap.get("storage_quota").toString()));
docStVO.setPage_total(page_total);
ret.add(docStVO);
} // END OF FOR gridList
} // END OF IF gridList
resultMap.put("page",map.get("nPage").toString());
resultMap.put("records",total);
resultMap.put("total",CommonUtil.getTotPageSize(total,Integer.parseInt(map.get("page_size").toString())));
resultMap.put("list",ret);
// Ajax Paging
String strLink = "javascript:exsoftStatisticsFunc.event.gridPage";
String contextRoot = map.get("contextRoot") != null ? map.get("contextRoot").toString() : "";
PagingAjaxUtil pagingInfo = new PagingAjaxUtil(Integer.parseInt(map.get("nPage").toString()),total,Integer.parseInt(map.get("page_size").toString()),10,strLink,contextRoot);
resultMap.put("pagingInfo",pagingInfo);
return resultMap;
}
@Override
public Map<String, Object> loginLogList(HashMap<String,Object> map) throws Exception {
StatisticsDao statisticsDao = sqlSession.getMapper(StatisticsDao.class);
Map<String, Object> resultMap = new HashMap<String, Object>();
List<ConnectLogVO> ret = new ArrayList<ConnectLogVO>();
int total = 0;
total = statisticsDao.loginLogPageCnt(map);
ret = statisticsDao.loginLogPageList(map);
resultMap.put("page",map.get("nPage").toString());
resultMap.put("records",total);
resultMap.put("total",CommonUtil.getTotPageSize(total,Integer.parseInt(map.get("page_size").toString())));
resultMap.put("list",ret);
// Ajax Paging
String strLink = "javascript:exsoftStatisticsFunc.event.gridPage";
String contextRoot = map.get("contextRoot") != null ? map.get("contextRoot").toString() : "";
PagingAjaxUtil pagingInfo = new PagingAjaxUtil(Integer.parseInt(map.get("nPage").toString()),total,Integer.parseInt(map.get("page_size").toString()),10,strLink,contextRoot);
resultMap.put("pagingInfo",pagingInfo);
return resultMap;
}
@Override
public Map<String, Object> myStatisticsList(HashMap<String,Object> map) throws Exception {
StatisticsDao statisticsDao = sqlSession.getMapper(StatisticsDao.class);
Map<String, Object> resultMap = new HashMap<String, Object>();
List<DocumentUserHtVO> ret = new ArrayList<DocumentUserHtVO>();
ret = statisticsDao.myStatisticsList(map);
resultMap.put("records",ret.size());
resultMap.put("list",ret);
return resultMap;
}
}
<file_sep>/EDMS3/src/kr/co/exsoft/process/vo/ProcessVO.java
package kr.co.exsoft.process.vo;
import java.sql.Date;
import java.util.ArrayList;
import java.util.List;
import kr.co.exsoft.eframework.configuration.Constant;
import kr.co.exsoft.eframework.util.StringUtil;
import kr.co.exsoft.eframework.vo.VO;
/**
* 협업 VO
* @author 패키지 개발팀
* @since 2015.03.12
* @version 3.0
*
*/
public class ProcessVO extends VO {
private String process_id; // 업무(프로세스) 키값
private String doc_root_id; // 협업 대상 문서 root_id
private String creator_id; // 등록자 user_id
private String creator_name; // 등록자 이름
private String create_date; // 업무(프로세스) 등록일
private String name; // 업무(프로세스) 명
private String status; // 업무요청(Q)>작성중(W)>승인중(A)>승인완료(AE)>보완중(M)>완료(E)
private String expect_date; // 업무 완료 예상일
private Date expect_dateDB; // DB insert 및 update에서 사용
private String complete_date; // 업무 실제 완료일
private Date complete_dateDB; // DB insert 및 update에서 사용
private String status_nm; // 협업 상태 한글화
private String author_nm; // 대표 작성자 이름
private String write_count; // 작성현황 ex) 1/3
private List<String> write_list; // 작성자 목록
private String approval_count; // 승인현홍 ex) 1/3
private List<String> approval_list; // 승인현황 목록
private String receiver_count; // 열람현황 ex) 2/4
private List<String> receiver_list; // 열람현황 목록
private String recently_id; // 최근 협업 등록 목록 ID
private String content; // 업무 요청내용
private String status_number; // 상세 조회에서 단계 이미지 li 순번
public ProcessVO(){
this.process_id = "";
this.doc_root_id = "";
this.creator_id = "";
this.creator_name = "";
this.create_date = "";
this.name = "";
this.status = Constant.PROCESS_STATUS_REQUEST; //Q
this.expect_date = "";
this.expect_dateDB = null;
this.complete_date = "";
this.complete_dateDB = null;
this.status_nm = "요청";
this.author_nm = "없음";
this.write_count = "0/0";
this.write_list = new ArrayList<String>();
this.approval_count = "0/0";
this.approval_list = new ArrayList<String>();
this.receiver_count = "0/0";
this.receiver_list = new ArrayList<String>();
this.recently_id = "";
this.content = "";
this.status_number = "1";
}
public String getProcess_id() {
return process_id;
}
public void setProcess_id(String process_id) {
this.process_id = process_id;
}
public String getDoc_root_id() {
return doc_root_id;
}
public void setDoc_root_id(String doc_root_id) {
this.doc_root_id = doc_root_id;
}
public String getCreator_id() {
return creator_id;
}
public void setCreator_id(String creator_id) {
this.creator_id = creator_id;
}
public String getCreator_name() {
return creator_name;
}
public void setCreator_name(String creator_name) {
this.creator_name = creator_name;
}
public String getCreate_date() {
return (!StringUtil.isEmpty(create_date) && create_date.length() > 10 ) ? create_date.substring(0,10) : create_date;
}
public void setCreate_date(String create_date) {
this.create_date = create_date;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
//setStatus_nm()
//업무요청(Q)>작성중(W)>승인중(A)>승인완료(AE)>보완중(M)>완료(E)
switch (status) {
case Constant.PROCESS_STATUS_REQUEST : setStatus_nm("업무요청"); break;
case Constant.PROCESS_STATUS_WRITE : setStatus_nm("작성중"); setStatus_number("1"); break;
case Constant.PROCESS_STATUS_APPROVAL : setStatus_nm("승인중"); setStatus_number("2"); break;
case Constant.PROCESS_STATUS_APPROVAL_END : setStatus_nm("승인완료"); break;
case Constant.PROCESS_STATUS_MODIFY : setStatus_nm("보완중"); setStatus_number("3"); break;
case Constant.PROCESS_STATUS_END : setStatus_nm("완료"); setStatus_number("4"); break;
default:
setStatus_nm("업무요청"); break;
}
}
public String getExpect_date() {
return (!StringUtil.isEmpty(expect_date) && expect_date.length() > 10 ) ? expect_date.substring(0,10) : expect_date;
}
public void setExpect_date(String expect_date) {
this.expect_date = expect_date;
}
public String getComplete_date() {
return (!StringUtil.isEmpty(complete_date) && complete_date.length() > 10 ) ? complete_date.substring(0,10) : complete_date;
}
public void setComplete_date(String complete_date) {
this.complete_date = complete_date;
}
public String getStatus_nm() {
return status_nm;
}
public void setStatus_nm(String status_nm) {
this.status_nm = status_nm;
}
public String getAuthor_nm() {
return author_nm;
}
public void setAuthor_nm(String author_nm) {
this.author_nm = author_nm;
}
public String getWrite_count() {
return write_count;
}
public void setWrite_count(String write_count) {
this.write_count = write_count;
}
public List<String> getWrite_list() {
return write_list;
}
public void setWrite_list(List<String> write_list) {
this.write_list = write_list;
}
public String getApproval_count() {
return approval_count;
}
public void setApproval_count(String approval_count) {
this.approval_count = approval_count;
}
public List<String> getApproval_list() {
return approval_list;
}
public void setApproval_list(List<String> approval_list) {
this.approval_list = approval_list;
}
public String getReceiver_count() {
return receiver_count;
}
public void setReceiver_count(String receiver_count) {
this.receiver_count = receiver_count;
}
public List<String> getReceiver_list() {
return receiver_list;
}
public void setReceiver_list(List<String> receiver_list) {
this.receiver_list = receiver_list;
}
public String getRecently_id() {
return recently_id;
}
public void setRecently_id(String recently_id) {
this.recently_id = recently_id;
}
public String getContent() {
return content;
}
public void setContent(String content) {
this.content = content;
}
public Date getExpect_dateDB() {
return expect_dateDB;
}
public void setExpect_dateDB(Date expect_dateDB) {
this.expect_dateDB = expect_dateDB;
}
public String getStatus_number() {
return status_number;
}
public void setStatus_number(String status_number) {
this.status_number = status_number;
}
public Date getComplete_dateDB() {
return complete_dateDB;
}
public void setComplete_dateDB(Date complete_dateDB) {
this.complete_dateDB = complete_dateDB;
}
}
<file_sep>/EDMS3/src/kr/co/exsoft/eframework/configuration/Constant.java
package kr.co.exsoft.eframework.configuration;
import java.util.HashMap;
/**
* 전역 상수값 정의
* @author 패키지 개발팀
* @since 2014.07.15
* @version 3.0
*
*/
public class Constant {
//Ehcache
public static final String EHCACHE_CACHE_NAME_FOLDERLIST = "FolderListCache"; //ehcace-default.xml 설정값
public static final String EHCACHE_CACHE_KEY_FOLDERIDS = "FOLDERIDS"; //factory에 저장 되어 있는 캐쉬 key
public static final String EHCACHE_CACHE_KEY_FOLDERNAMES = "FOLDERNAMES"; //factory에 저장 되어 있는 캐쉬 key
//document
public static final String DOCUMENT_DOC_ID = "doc_id";
// ---------------------------------------------------------------------------
// ACL
// ---------------------------------------------------------------------------
public static final String ACL_ACL_TYPE_ALL = "ALL";
public static final String ACL_ACL_TYPE_TEAM = "TEAM";
public static final String ACL_ACL_TYPE_DEPT = "DEPT";
public static final String ACL_ACL_TYPE_PRIVATE = "PRIVATE";
public static final String ACL_ACL_TYPE_ALL_NAME = "전사";
public static final String ACL_ACL_TYPE_TEAM_NAME = "부서";
public static final String ACL_ACL_TYPE_DEPT_NAME = "하위부서포함";
public static final String ACL_ACL_TYPE_PRIVATE_NAME = "공유안함";
public static final String ACL_IS_TYPE_FOLDER = "F";
public static final String ACL_IS_TYPE_DOCUMENT = "D";
public static final String ACL_DELETE = "DELETE";
public static final String ACL_UPDATE = "UPDATE";
public static final String ACL_READ = "READ";
public static final String ACL_BROWSE = "BROWSE";
public static final String ACL_NONE = "NONE";
// Folder type 정의
public static final String FOLDER_TYPE_DOCUMENT = "DOCUMENT";
// Locale 정의
public static final String KOR = "KO";
public static final String ENG = "EN";
public static final String JPN = "JA";
public static final String CHN = "ZH";
// 라이센스 관련 정의
public static final String LICENSE_TYPE_CONCURRENT = "CONCURRENT";
public static final String LICENSE_TYPE_NAMED = "NAMED";
// 접속로그
public static final String NORMAL_LOGIN_TYPE = "NORAML";
public static final String SSO_LOGIN_TYPE = "SSO";
public static final String CONNECT_TYPE_LOGIN = "LOGIN";
public static final String CONNECT_TYPE_LOGOUT = "LOGOUT";
// 코드값 정의
public static final String YES = "Y";
public static final String NO = "N";
public static final String NOTHING = "F";
public static final String T = "T";
public static final String F = "F";
public static final String C = "C";
public static final String D = "D";
// 세션 구분 코드값
public static final String SESSION_ADMIN = "admin";
public static final String SESSION_USER = "user";
public static final String LOCATION_ADMIN = "A";
public static final String LOCATION_USER = "U";
// ResultMap 정의
public static final String RESULT_TRUE = "true";
public static final String RESULT_FALSE = "false";
public static final String RESULT_SUCCESS = "success";
public static final String RESULT_FAIL = "fail";
// 시퀀스 테이블 카운터값 정의
public static final String COUNTER_ID_FILE = "ID_FILE"; // 문서, 폴더, 그룹.
public static final String COUNTER_ID_PAGE = "ID_PAGE"; // 파일
public static final String COUNTER_ID_CONNECT_LOG = "XR_CONNECT_LOG"; // 접속로그
public static final String COUNTER_ID_HISTORY = "XR_HISTORY"; // 폴더/권한/문서 이력
public static final String COUNTER_ID_GROUP_HT = "XR_GROUP_HT"; // 그룹 히스토리
public static final String COUNTER_ID_DOCUMENT_HT = "XR_DOCUMENT_HT"; // 문서이력
public static final String COUNTER_ID_PAGE_HT = "XR_PAGE_HT"; // 문서이력
public static final String COUNTER_ID_USER_HT = "XR_USER_HT"; // 사용자 히스토리
public static final String COUNTER_ID_BATCH_WORK = "XR_BATCHWORK"; // 사용자 히스토리
public static final String COUNTER_ID_ACL = "XR_ACL"; // 권한
public static final String COUNTER_ID_NOTE = "XR_NOTE"; // 쪽지 정보
public static final String COUNTER_ID_NOTEMANAGE = "XR_NOTEMANAGE"; // 쪽지 수발신 정보
public static final String COUNTER_ID_PROCESS = "XR_PROCESS"; // 협업 정보
public static final String COUNTER_ID_PROCESS_EXECUTOR = "XR_PROCESS_EXECUTOR"; // 협업실행자 정보
public static final String COUNTER_ID_COMMENT = "XR_COMMENT"; // 댓글
public static final String COUNTER_ID_RECENTLY = "XR_RECENTLY_OBJECT"; // 최근 등록한 문서, 폴더, 협업
// prefix 및 시퀀스 테이블 관련 prefix
public static final String ID_PREFIX_FOLDER = "FOL";
public static final String ID_PREFIX_DOCUMENT = "DOC";
public static final String ID_PREFIX_REF = "REF";
public static final String ID_PREFIX_PAGE = "PAG";
public static final String ID_PREFIX_GROUP = "GRP";
public static final String ID_PREFIX_ACL = "ACL";
public static final String ID_PREFIX_NOTE = "NNI"; // XR_NOTE
public static final String ID_PREFIX_NOTEMANAGE = "NMI"; // XR_NOTEMANAGE
public static final String ID_PREFIX_PROCESS = "PMI"; // XR_PROCESS
public static final String ID_PREFIX_PROCESS_EXECUTOR = "PEI"; // XR_PROCESS_EXECUTE
public static final String ID_PREFIX_COMMENT = "COM"; // XR_COMMONT
public static final String ID_PREFIX_RECENTLY = "REC"; // XR_RECENTLY_OBJECT
// 사용자 업무처리 구분
public static final String TYPE = "type";
public static final String STYPE = "stype";
public static final String DELETE = "delete";
public static final String EXPIRED = "expired";
public static final String INSERT = "insert";
public static final String UPDATE = "update";
public static final String REPLY = "reply";
public static final String UPDATE_TYPE = "updateType";
public static final String COPY = "copy";
public static final String SELECT = "select";
public static final String MOVE = "move";
public static final String UPDATE_STATUS = "update_status";
public static final String RESET_PASS = "reset_pass";
public static final String PTRASH = "privateTrash";
public static final String STRASH = "systemTrash";
public static final String OWNER = "OWNER";
public static final String WORLD = "WORLD";
// 코드값 정의
public static final String CODE_ROLE = "ROLE";
public static final String CODE_POSITION = "POSITION";
public static final String CODE_DUTY = "DUTY";
public static final String CODE_VERSION = "VERSION";
public static final String CODE_SECURITY_LEVEL = "SECURITY_LEVEL";
public static final String CODE_ACTION_ID = "ACTION_ID";
public static final String CODE_PRESERVATION_YEAR = "YEAR";
// 메뉴접근권한
public static final String MENU_ALL = "ALL";
public static final String MENU_GROUP = "GROUP";
public static final String MENU_TEAM= "TEAM";
public static final String MENU_DISABLE= "DISABLE";
public static final String RESULT = "result";
public static final String SUCCESS = "success";
// 로컬저장금지 관련
public static final String LSC_CONTROL = "LSC_CONTROL";
public static final String LSC_ENABLE_USB = "LSC_ENABLE_USB";
public static final String LSC_EPROC = "LSC_EPROC";
public static final String LSC_NETDRIVE_ADDR = "LSC_NETDRIVE_ADDR";
public static final String RGC_UNINSTALL_PASS = "RGC_UNINSTALL_PASS";
public static final String LSC_EXTENSION = "LSC_EXTENSION";
public static final String LSC_WDIRS_PROC = "LSC_WDIRS_PROC";
public static final String LSC_POLICY_UPDATE_CYCLE = "LSC_POLICY_UPDATE_CYCLE";
// 로컬저장금지 테이블명
public static final String LSC_RGATE_PROCESS = "XR_RGATE_PROCESS";
public static final String LSC_RGATE_MAPPING = "XR_RGATE_MAPPING";
public static final String MANAGE_EXT = "EXT";
public static final String MANAGE_PROC = "PROC";
public static final String MANAGE_IP = "IP";
// 정책 활성/비활성
public static final String IS_OFF = "OFF";
public static final String IS_ON = "ON";
// 구분(사용자/부서)
public static final String IS_USER = "USER";
public static final String IS_GROUP = "GROUP";
public static final String IS_ALL = "ALL";
// 시스템계정
public static final String SYSTEM_ACCOUNT = "edmsadmin";
public static final String SYSTEM_ROLE = "SYSTEM_OPERATOR";
// 사용자 ROLE 정의
public static final String USER_ROLE = "CREATOR";
// 문서유형 테이블 PREFIX
public static final String TABLE_PREFIX = "XR_";
public static final String DOC_TABLE = "XR_DOCUMENT";
public static final String DOC_DEL_TABLE = "XR_DOCUMENT_DEL";
public static final String DOC_FAVORITE_TABLE = "XR_FAVORITE_DOC";
public static final String DOC_REF_TABLE = "XR_REF_DOC";
public static final String DOC_TABLE_ALL_TYPE = "ALL_TYPE";
// 이력관리
public static final String ACTION_PLACE = "EDMS";
// 이력관리 TARGET_TYPE
public static final String TARGET_TYPE = "TYPE";
public static final String TARGET_FOLDER = "FOLDER";
public static final String TARGET_ACL = "ACL";
// 환경설정 관리
public static final String PRIVATE_TRASH = "PTRASH";
public static final String SYSTEM_TRASH= "STRASH";
// URL 유효기간
public static final String SYSTEM_EXPIRED= "EXPIRED";
// 볼륨타입
public static final String VOLUME_FILE = "FILE";
public static final String VOLUME_INDEX= "INDEX";
// 문서상태
public static final String DOC_STATUS_CREATE = "C";
public static final String DOC_STATUS_DELETE = "D";
public static final String DOC_STATUS_ERASE = "E";
// 메뉴관리 :: 상위메뉴코드
public static final String SYS_MENU = "M001";
public static final String DOC_MENU = "M002";
public static final String RGATE_MENU = "M003";
public static final String STATICS_MENU = "M004";
public static final String USERAUTH_MENU = "M005";
public static final String DOC_AUTH_MENU = "M052";
public static final String ACL_AUTH_MENU = "M051";
// 관리자 초기메뉴
public static final String MANAGER_INDEX_MENU = "M015";
// 사용자 권한메뉴
public static final String USER_FOLDER_MENU_CODE = "M050";
public static final String USER_ACL_MENU_CODE = "M051";
public static final String USER_DOC_MENU_CODE = "M052";
// 문서등록 :: 파일첨부관련
public static final String FILE_EXT = "EXT";
public static final String FILE_CNT = "FILECNT";
public static final String FILE_SIZE = "FILESIZE";
public static final String FILE_TOTAL= "FILETOTAL";
// 배치프로그램 TYPE
public static final String WORK_BATCH = "BATCH";
public static final String WORK_WEB = "WEB";
public static final String BATCH_TEMP_LOG_FOLDER_DELETE = "LogTempFolderDelete";
public static final String BATCH_AUDIT = "Audit";
public static final String BATCH_EXPIRED_DOC = "ExpiredDoc";
public static final String BATCH_PTRASH_DOC = "PTrashDoc";
public static final String BATCH_STRASH_DOC = "STrashDoc";
public static final String BATCH_STATISITCS = "Statistics";
public static final String BATCH_TERMINATED = "DeletePage";
public static final String BATCH_TEMP_DOC = "TempDocDelete";
public static final String BATCH_IP = "127.0.0.1";
// 제품버전 정보
public static final String PRODUCT_EDMS = "EDMS";
public static final String PRODUCT_EDMS_RGATE = "EDMS_RGATE";
public static final String PRODUCT_EDMS_FC = "EDMS_FC";
public static final String PRODUCT_EDMS_APPLIANCE = "EDMS_APPLIANCE_RGATE";
// 문서유형 확장속성 DISPLAY_TYPE
public static final String DISPLAY_TYPE_CHECK = "CHECK";
public static final String DISPLAY_TYPE_INPUT = "INPUT";
public static final String DISPLAY_TYPE_SELECT = "SELECT";
public static final String DISPLAY_TYPE_RADIO = "RADIO";
// EXREP
public static final String EXREP_VOLUME_NM = "EXREP_VOLUME";
public static final String EXREP_ROOT_EDMS_NM = "EXREP_ROOT_EDMS";
// EXCEL DOWN OPTION
public static final String EXCEL_FORMAT = "excel";
public static final String EXCEL_LIST = "list";
public static final String CHART = "chart";
public static final String FAIL = "fail";
public static final String FAIL_UNKNOWN = "fail_unknown";
public static final String NEXT = "next";
public static final String INVALID_SESSION = "invalid_session";
public static final String EXCEPTION = "exception";
public static final String FAIL_MESSAGE = "fail_message";
public static final String EXCEPTION_MESSAGE = "exception_message";
public static final String SESSION_CHECK = "session_check";
public static final String SESSION_USER_ID = "session_user_id";
public static final String USER_DTO = "user_dto";
public static final String TRUE = "TRUE";
public static final String FALSE = "FALSE";
public static final String DATE_START = "date_start";
public static final String DATE_END = "date_end";
public static final String KEYWORD1 = "keyword1";
public static final String KEYWORD2 = "keyword2";
public static final String KEYWORD3 = "keyword3";
public static final String SEARCH_TYPE = "search_type";
public static final String SEARCH_RANGE = "search_range";
// 문서목록 미리보기 타입
public static final String PREVIEW_LIST = "LIST";
public static final String PREVIEW_RIGHT = "RIGHT";
public static final String PREVIEW_BOTTOM = "BOTTOM";
public static final String STREAM = "stream";
public static enum Status {
INSERT,
UPDATE,
DELETE
}
// ---------------------------------------------------------------------------
// Tree.
// ---------------------------------------------------------------------------
public static final String TREE_MAP_ID = "map_id";
public static final String TREE_PARENT_ID = "parent_id";
public static final String TREE_ROOT_ID = "root_id";
public static final String TREE_WORK_TYPE = "work_Type";
public static final String TREE_WORKTYPE_MYDEPT = "WORK_MYDEPT"; // 업무 문서함 부서 work type
public static final String TREE_WORKTYPE_ALLDEPT = "WORK_ALLDEPT"; // 업무 문서함 전사 work type
public static final String TREE_WORKTYPE_PROJECT = "WORK_PROJECT"; // 업무 문서함 프로젝트 work type
// ---------------------------------------------------------------------------
// Performance.
// ---------------------------------------------------------------------------
public static final String ACTION_LOGIN = "LOGIN";
public static final String ACTION_VIEW = "VIEW";
public static final String ACTION_BROWSE = "BROWSE";
public static final String ACTION_READ = "READ";
public static final String ACTION_CREATE = "CREATE";
public static final String ACTION_CREATE_FOLDER = "CREATE_FOLDER";
public static final String ACTION_UPDATE = "UPDATE";
public static final String ACTION_DELETE = "DELETE";
public static final String ACTION_CHECKIN = "CHECKIN";
public static final String ACTION_CHECKOUT = "CHECKOUT";
public static final String ACTION_MODIFY = "MODIFY";
public static final String ACTION_CANCEL_CHECKOUT = "CANCEL_CHECKOUT";
public static final String ACTION_CHANGE_PERMISSION = "CHANGE_PERMISSION";
public static final String ACTION_PAGE = "PAGE";
public static final String ACTION_CHANGE_ACL_ID = "CHANGE_ACL_ID";
public static final String ACTION_ADD_TO_FAVORITES = "ADD_TO_FAVORITES";
public static final String ACTION_DELETE_FAVORITES = "DELETE_FAVORITES";
public static final String ACTION_COPY = "COPY";
public static final String ACTION_MOVE = "MOVE";
public static final String ACTION_CHANGE_PASSWORD = "<PASSWORD>";
public static final String ACTION_CHANGE_CREATOR = "CHANGE_CREATOR";
public static final String ACTION_CHANGE_OWNER = "CHANGE_OWNER";
public static final String ACTION_CHANGE_OWNER_WORK = "CHANGE_OWNER_WORK";
public static final String ACTION_RESTORE = "RESTORE";
public static final String ACTION_ERASE = "ERASE";
public static final String ACTION_CHECK_EXISTS = "CHECK_EXISTS";
public static final String ACTION_TIME_EXPIRED = "TIME_EXPIRED";
public static final String ACTION_TIME_EXTEND = "TIME_EXTEND";
public static final String ACTION_TERMINATE = "TERMINATE";
public static final String ACTION_VERSION_TERMINATE = "VERSION_DELETE";
public static final String ACTION_ADD_TO_TEMPWORK = "ADD_TO_TEMPWORK";
public static final String ACTION_SWAP_INDEX = "SWAP_INDEX";
public static final String ACC_MODE_FILE = "420";
public static final String ACC_MODE_FOLDER = "493";
public static final String MAP_ID = "map_id";
public static final String MAP_ID_MYPAGE = "MYPAGE";
public static final String MAP_ID_DEPT = "MYDEPT";
public static final String MAP_ID_PROJECT = "PROJECT";
public static final String MAP_ID_WORKSPACE = "WORKSPACE";
// ---------------------------------------------------------------------------
// Document.
// ---------------------------------------------------------------------------
public static final String VERSION_NEW_DOCUMENT = "NEW";
public static final String VERSION_SAME_VERSION = "SAME";
public static final String VERSION_MAJOR_VERSION = "MAJOR";
public static final String VERSION_MINOR_VERSION = "MINOR";
public static final String DOCUMENT_DEFALUT_ACCESSGRADE = "P001"; // 문서 기본 접근등급(사원)
public static final String DOCUMENT_DEFALUT_SECURITY_LEVEL = "COMMON"; // 문서 기본 보안등급(일반)
public static final String DOCUMENT_STATUS_PROCESS_ING = "P"; // 프로세스 진행 단계 완료 후 C로 전환
public static final String DEFAULT_VERSION_NO = "1.0";
public static final String MAJOR_VERSION_UP = "1";
public static final String MINOR_VERSION_UP = "1";
public static final String DEFAULT_BRANCH_VERSION_NO = "1.0";
// 문서 리스트 관련 정의
public static final String DOCUMENT_LIST_TYPE = "LIST_TYPE"; // 리스트 타입 KEY 명칭
public static final String DOCUMENT_LIST_TYPE_GENERAL = "GENERAL"; // 보편적인 문서 리스트
public static final String DOCUMENT_LIST_TYPE_EXPIRED = "EXPIRED"; // 만기 문서 관리 문서 리스트
public static final String DOCUMENT_LIST_TYPE_TRASHCAN = "TRASHCAN"; // 관리자 / 사용자 휴지통 관리 문서 리스트
public static final String DOCUMENT_LIST_TYPE_OWNER = "OWNER"; // 소유권 변경 문서 리스트
public static final String DOCUMENT_LIST_TYPE_DUPLICATE = "DUPLICATE"; // 중복 관리 문서 리스트
public static final String DOCUMENT_LIST_TYPE_AUDIT = "AUDIT"; // 대량 문서 리스트
public static final String DOCUMENT_LIST_TYPE_CHECKOUT = "CHECKOUT"; // 내 수정 중 문서 리스트
public static final String DOCUMENT_LIST_TYPE_FAVORITE = "FAVORITE"; // 즐겨찾기(가상폴더) 문서 리스트
public static final String DOCUMENT_LIST_TYPE_SHARE = "SHARE"; // 공유 문서
public static final String DOCUMENT_LIST_TYPE_SHARE_FOLDER = "SHARE_FOLDER"; // 공유 폴더 문서
public static final String DOCUMENT_LIST_TYPE_TEMPDOC = "TEMPDOC"; // 작업카트 문서 리스트
public static final String DOCUMENT_LIST_TYPE_RECENTLYDOC = "RECENTLYDOC"; // 최신문서 리스트
// 메인페이지 문서목록 타입정의
public static final String MAIN_NEWDOC = "NEWDOC"; // 새로운문서
public static final String MAIN_MOSTVIEWDOC = "MOSTVIEWDOC"; // 최다조회 문서
public static final String MAIN_MOSTMYDOC = "MOSTMYDOC"; // 최다조회 내문서
public static final String MAIN_RECENTLYDOC = "RECENTLYDOC"; // 최근조회 문서
public static final String MAIN_CHECKOUTDOC = "CHECKOUTDOC"; // 내수정중인 문서
// 소유권 변경 관련
public static final String CHANGE_SELECT_DOC = "SELECT_DOC"; // 선택한 문서를 소유권 이전함
public static final String CHANGE_SEARCH_DOC = "SEARCH_DOC"; // 검색 결과 문서를 소유권 이전함
public static final String CHANGE_ALL_DOC = "ALL_DOC"; // 소유자의 전체문서를 소유권 이전함
public static final String CHANGE_SCOPE_MYWORK = "MYWORK"; // 개인 문서함
public static final String CHANGE_SCOPE_WORKSPACE = "WORKSPACE"; // 업무 문서함
// ---------------------------------------------------------------------------
// Config. (구 kr.co.exsoft.xframework.configuration.Config)
// ---------------------------------------------------------------------------
public static final String ACL_DEPT_DEFAULT = "부서 기본 권한";
public static final String ACL_ID_OWNER = "acl_public_owner";
public static final String ACL_ID_WORLD = "acl_public_world";
public static final String GROUP_TOP_ID = ID_PREFIX_GROUP + "000000000000"; // 최상위 그룹ID
public static final String FOLDER_TOP_ID = ID_PREFIX_FOLDER + "000000000000"; // 최상위 그룹ID
// ---------------------------------------------------------------------------
// Folder
// ---------------------------------------------------------------------------
public static final String FOLDER_TABLE = "XR_FOLDER";
public static final String FOLDER_TYPE_DEPT = "MYDEPT";
public static final String FOLDER_TYPE_ALL_TYPE = "ALL_TYPE";
public static final String FOLDER_STATUS_CREATE = "C";
public static final String FOLDER_STATUS_UPDATE = "U";
public static final String FOLDER_STATUS_DELETE = "D";
public static final String FOLDER_SAVE_YES = "Y";
public static final String FOLDER_SAVE_NO = "N";
// ---------------------------------------------------------------------------
// History
// ---------------------------------------------------------------------------
public static final String HISTORY_STATUS_CREATE = "C"; // 생성
public static final String HISTORY_STATUS_UPDATE = "U"; // 수정
public static final String HISTORY_STATUS_CLOSE = "D"; // 폐쇄
public static final String HISTORY_STATUS_DELETE = "E"; // 삭제
// ---------------------------------------------------------------------------
// trash
// ---------------------------------------------------------------------------
public static final String TRASH_DELETE = "DELETE"; //휴지통 문서 삭제(선택삭제)
public static final String TRASH_ALL_DELETE = "ALL_DELETE"; //휴지통 비우기 (전체삭제)
// EXCEL DOWNLOAD - 통계
public static final String SUM_CREATE_CNT = "create_cnt"; // 등록건수
public static final String SUM_READ_CNT = "read_cnt"; // 조회건수
public static final String SUM_UPDATE_CNT = "update_cnt"; // 수정건수
public static final String SUM_DELETE_CNT = "delete_cnt"; // 삭제건수
public static final String SUM_DOC_CNT = "doc_cnt"; // 문서수
public static final String SUM_FILE_CNT = "page_cnt"; // 파일수
public static final String SUM_FSIZE = "fsize"; // 용량::치환
public static final String SUM_PAGE_TOTAL = "page_total"; // 용량
// CHART GRAPH TYPE - 통계
public static final String LINE_CHART = "line"; // 라인차트
public static final String BAR_CHART = "bar"; // 막대차트
public static final String PIE_CHART = "pie"; // 파이차트
// CHART GRAPH 구분 - 통계
public static final String CHART_DOC_TYPE = "typeChart"; // 문서유형별 보유현황 차트
public static final String CHART_GROUP_STATUS = "groupChart"; // 부서별 등록/활용 현황
public static final String CHART_DECADE_STATUS = "decadeChart"; // 기간별 등록/활용 현황
public static final String CHART_FOLDER_STATUS = "folderChart"; // 문서함/폴더별 등록/활용 현황
public static final String CHART_SECURITY_STATUS = "securityChart"; // 보안등급별 보유현황
public static final String USER_DOC_STATISTICS = "user_nm"; // 사용자명
public static final String GROUP_DOC_STATISTICS = "group_nm"; // 그룹명
public static final String FOLDER_DOC_STATISTICS = "folder_nm"; // 폴더명
public static final String TYPE_DOC_STATISTICS = "type_name"; // 타입명
public static final String CODE_DOC_STATISTICS = "code_nm"; // 보안등급
public static final String DAY_TERM = "daily"; // 일별
public static final String MONTH_TERM = "monthly"; // 월별
public static final String WORK_TYPE_USER = "USER"; // 사용자/문서함별
public static final String WORK_TYPE_FOLDER = "FOLDER"; // 문서함/폴더별
// 시스템 환경 설정 구분 값
public static final String SYS_TYPE_VERSION = "VERSION"; // 버전관리 정책
public static final String SYS_TYPE_URL = "URL"; // URL 복사 기간 정책
public static final String SYS_TYPE_AUDIT = "AUDIT"; // 감사정책
public static final String SYS_TYPE_TRASH = "TRASH"; // 휴지통관리 정책
public static final String SYS_TYPE_FILE = "FILE"; // 문서첨부 정책
// 사용자메뉴
public static final String TOPMENU_MYDOC = "myDocMenu"; // 내문서
public static final String TOPMENU_MYWORK = "myWorkMenu"; // 개인문서함
public static final String TOPMENU_WORKSPACE = "workSpaceMenu"; // 업무문서함
public static final String TOPMENU_WORKPROCESS = "workProcessMenu"; // 협업함
public static final String TOPMENU_STATISTICS = "statisticsMenu"; // 통계
// 관리자메뉴
public static final String TOPMENU_SYSTEM = "systemMenu"; // 시스템관리
public static final String TOPMENU_DOCUMENT = "documentMenu"; // 문서관리
public static final String TOPMENU_RGATE = "rGateMenu"; // RGATE관리
// 관리자 시스템 메뉴
public static final String SYSTEM_CONFMANAGER = "confManager"; // 시스템관리-환경설정관리
public static final String SYSTEM_MENUMANAGER = "menuAuthManager"; // 시스템관리-메뉴접속관리
public static final String SYSTEM_USERMANAGER = "userManager"; // 시스템관리-사용자관리
public static final String SYSTEM_GROUPMANAGER = "groupManager"; // 시스템관리-그룹관리
public static final String DOCUMENT_ACLMANAGER = "aclManager"; // 문서관리-권한관리
public static final String DOCUMENT_TYPEMANAGER = "typeManager"; // 문서관리-문서유형관리
public static final String DOCUMENT_FOLDERMANAGER = "folderManager"; // 문서관리-폴더관리
public static final String DOCUMENT_EXPIREDMANAGER = "expiredManager"; // 문서관리-만기문서관리
public static final String DOCUMENT_OWNERMANAGER = "ownerManager"; // 문서관리-소유권변경관리
public static final String DOCUMENT_WASTEMANAGER = "wasteManager"; // 문서관리-휴지통관리
public static final String DOCUMENT_DUPLICATEMANAGER = "duplicateManager"; // 문서관리-중복문서관리
public static final String DOCUMENT_AUDITMANAGER = "auditManager"; // 문서관리-대량문서열람감사관리
public static final String RGATE_EXTMANAGER = "extManager"; // 저장금지 확장자 관리
public static final String RGATE_PROCMANAGER = "procManager"; // 저장 허용 프로그램 관리
public static final String RGATE_CONTROLMANAGER = "controlManager"; // 로컬 저장 허용관리
public static final String RGATE_USBMANAGER = "usbManager"; // USB 저장 허용관리
public static final String RGATE_NETWORKMANAGER = "networkManager"; // USB 저장 허용관리
public static final String RGATE_EXCEPTIONMANAGER = "exceptionManager"; // 프로세스 예외폴더 설정
public static final String RGATE_UNINSTALLMANAGER = "uninstallManager"; // CLIENT 제거 비밀번호 설정
// 협업메뉴
public static final String PROCESS_REQUEST_MENU = "REQUEST"; // 요청 문서
public static final String PROCESS_WRITE_ING_MENU = "WRITE_ING"; // 작성중 문서
public static final String PROCESS_APPROVAL_ING_MENU = "APPROVAL_ING"; // 승인중 문서
public static final String PROCESS_WRITE_END_MENU = "WRITE_END"; // 작성한 문서
public static final String PROCESS_APPROVAL_END_MENU = "APPROVAL_END"; // 승인한 문서
public static final String PROCESS_RECEIVE_MENU = "RECEIVE"; // 수신문서
public static final String PROCESS_STATUS_REQUEST = "Q"; // 업무요청
public static final String PROCESS_STATUS_WRITE = "W"; // 작성중
public static final String PROCESS_STATUS_APPROVAL = "A"; // 승인중
public static final String PROCESS_STATUS_APPROVAL_END = "AE"; // 승인완료
public static final String PROCESS_STATUS_MODIFY = "M"; // 보완중
public static final String PROCESS_STATUS_END = "E"; // 완료
public static final String PROCESS_TYPE_REQUESTOR = "Q"; // 업무 요청자
public static final String PROCESS_TYPE_AUTHOR = "R"; // Responsible(대표 작성자)
public static final String PROCESS_TYPE_COAUTHOR = "C"; // 공동 작성자
public static final String PROCESS_TYPE_APPROVER = "A"; // Accountable(의사결정권자::승인자)
public static final String PROCESS_TYPE_RECEIVER = "I"; // Informed(사후에 결과를 통보 반는자::수신자)
public static final String PROCESS_EXECUTOR_WAIT = "N"; // 실행자 해당 단계 대기
public static final String PROCESS_EXECUTOR_START = "S"; // 실행자 해당 단계 시작
public static final String PROCESS_EXECUTOR_END = "E"; // 실행자 해당 단계 종료
public static final String PROCESS_ACTION_APPROVEREQUEST = "APPROVEREQUEST"; // 승인요청
public static final String PROCESS_ACTION_APPROVE = "APPROVE"; // 승인
public static final String PROCESS_ACTION_APPROVEREJECT = "APPROVEREJECT"; // 반려
// 쪽지관리 TAB
public static final String NOTE_TAB_TALK = "TALK"; // 대화함
public static final String NOTE_TAB_RECEIVE = "RECEIVE"; // 받은쪽지함
public static final String NOTE_TAB_SEND = "SEND"; // 보낸쪽지함
public static final String NOTE_TAB_BOX = "BOX"; // 쪽지보관함
// 환경설정 TAB
public static final String CONFIG_TAB_MYINFO = "myinfo"; // 환경설정-내정보
public static final String CONFIG_TAB_PASSWD= "<PASSWORD>"; // 환경설정-비밀번호 관리
public static final String CONFIG_TAB_CONFIG = "myconfig"; // 환경설정-기본환경
public static final String CONFIG_USERNM = "usernm"; // 환경설정-내정보 기본정보 수정
// 환경설정 AUDIT
public static final String READ_COUNT_THRESHOLD = "read_count_threshold";
public static final String SEND_REPORT_MAIL = "send_report_mail";
public static final String REPORT_MAIL_RECEIVER_ADDRESS = "report_mail_receiver_address";
// 환경설정 새창크기
public static final String CONFIG_WIDTH = "740"; // 환경설정-내정보 창넓이
public static final HashMap<String, Object> CONFIG_HEIGHT = new HashMap<String, Object>() {
private static final long serialVersionUID = 1L;
{
put(Constant.CONFIG_TAB_MYINFO,572);
put(Constant.CONFIG_TAB_PASSWD,592);
put(Constant.CONFIG_TAB_CONFIG,552);
}
};
// 환경설정 TABLE
public static final String XR_USER = "XR_USER";
public static final String XR_USER_DT = "XR_USER_DT";
public static final String XR_USER_CONFIG = "XR_USER_CONFIG";
// 에러메세지 코드
public static final String ERROR_403 = "403";
public static final String ERROR_404 = "404";
public static final String ERROR_503 = "503";
public static final String ERROR_505 = "505";
// ---------------------------------------------------------------------------
// 기타 시스템 상수
// ---------------------------------------------------------------------------
public static final String INCLUDE_SUB_FOLDER = "includeSubFolder";
public static final String NOTE_RECENT_CNT="5";
// 최근 문서,폴더,업무(협업) 등록 현황
public static final String RECENTLY_TYPE_DOCUMENT = "D";
public static final String RECENTLY_TYPE_FOLDER = "F";
public static final String RECENTLY_TYPE_PROCESS = "P";
}
<file_sep>/EDMS3/src/kr/co/exsoft/quartz/vo/AuditConfigVO.java
package kr.co.exsoft.quartz.vo;
/**
* 감사환경설정 VO - 삭제예정
* @author 패키지 개발팀
* @since 2014.07.31
* @version 3.0
*
*/
public class AuditConfigVO {
// 테이블 객체(XR_AUDIT_CONFIG)
private int read_count_threshold; // 감사조회 횟수 - 기준 열람건수 : 기본 100 , 최대조회건수 99999
private String send_report_mail; // 메일발송여부
private String report_mail_receiver_address; // 메일수신주소 : 다수 메일 구분자(;)
public AuditConfigVO() {
this.read_count_threshold = 0;
this.send_report_mail = "";
this.report_mail_receiver_address = "";
}
public int getRead_count_threshold() {
return read_count_threshold;
}
public void setRead_count_threshold(int read_count_threshold) {
this.read_count_threshold = read_count_threshold;
}
public String getSend_report_mail() {
return send_report_mail;
}
public void setSend_report_mail(String send_report_mail) {
this.send_report_mail = send_report_mail;
}
public String getReport_mail_receiver_address() {
return report_mail_receiver_address;
}
public void setReport_mail_receiver_address(String report_mail_receiver_address) {
this.report_mail_receiver_address = report_mail_receiver_address;
}
}
<file_sep>/EDMS3/WebContent/js/docadmin/folderManager.js
var folderManager = {
gWorkType : "WORK_MYDEPT", // 현재 선택된 탭 workType
groupTree : null, // 그룹 트리
projectTree : null, // 프로젝트 트리
gFolderTree : null, // 나의 부서 JStree object
binder : new DataBinder("#form_details"),
// 0. 초기화
init : function(workType) {
var treeOption = null;
// 트리 초기화
if (workType == Constant.WORK_MYDEPT) {
treeOption = {
divId : "#myDeptFolderTree",
context : exsoft.contextRoot,
url : "/folder/folderList.do",
mapId : Constant.MAP_MYDEPT,
workType : Constant.WORK_MYDEPT
};
if (folderManager.groupTree == undefined) {
folderManager.groupTree = new XFTree(treeOption);
folderManager.groupTree.callbackSelectNode = folderManager.callback.selectFolder;
folderManager.groupTree.init(); //부서 rootId는 서버에서 처리
} else {
folderManager.groupTree.refresh();
}
folderManager.gFolderTree = folderManager.groupTree;
} else {
treeOption = {
divId : "#projectFolderTree",
context : exsoft.contextRoot,
url : "/folder/folderList.do",
mapId : Constant.MAP_PROJECT,
workType : Constant.WORK_PROJECT
};
if (folderManager.projectTree == undefined) {
folderManager.projectTree = new XFTree(treeOption);
folderManager.projectTree.callbackSelectNode = folderManager.callback.selectFolder;
folderManager.projectTree.init(); //부서 rootId는 서버에서 처리
} else {
folderManager.projectTree.refresh();
}
folderManager.gFolderTree = folderManager.projectTree;
}
},
// 1. 팝업
open : {
},
//2. layer + show
layer : {
},
//3. 닫기 + hide
close : {
},
//4. 화면 이벤트 처리
event : {
// 구분 셀렉트박스 변경시
changeWorkType : function(work_type) {
if(work_type == "WORK_MYDEPT") {
$("#myDeptFolderTree").removeClass("hide");
$("#projectFolderTree").addClass("hide");
} else {
$("#projectFolderTree").removeClass("hide");
$("#myDeptFolderTree").addClass("hide");
}
folderManager.init(work_type);
},
// 스토리지 할당량 체크박스 클릭시
folderQuotaCheckBox : function(){
if($("input:checkbox[id='folder_storage_quota_chk']").is(":checked") == false) {
$('#storage_quota').prop("readonly", false);
$('#storage_quota').prop("disabled", false);
$('#storage_quota').removeClass("readonly");
$('#storage_quota').val("");
} else {
$('#storage_quota').prop("readonly", true);
$('#storage_quota').prop("disabled", true);
$('#storage_quota').addClass("readonly");
$('#storage_quota').val("무제한");
}
},
//폴더 상세조회 저장 버튼
submitUpdate : function() {
if(folderManager.event.validate('form_details')) {
folderManager.binder.set("type","UPDATE");
folderManager.binder.set("is_save",exsoft.util.layout.getSelectBox('is_save','option'));
folderManager.binder.set("folder_status",exsoft.util.layout.getSelectBox('folder_status','option'));
folderManager.binder.set("is_type",exsoft.util.layout.getSelectBox('is_type','option'));
var jsonObject = folderManager.binder.getDataToJson();
exsoft.util.ajax.ajaxDataFunctionWithCallback(jsonObject ,exsoft.contextRoot + '/folder/folderControl.do', "folderUpdate",
function(data, param) {
if(data.result == 'true') {
folderManager.gFolderTree.refreshNodeForAddChildren(data.refresh_id);
folderManager.gFolderTree.refreshNodeForAddChildren(data.target_refresh_id);
jAlert(data.message, "폴더관리", 0);
}else {
jAlert(data.message, "폴더관리", 0);
}
});
}
},
//폴더 상세조회 취소 버튼
cancelUpdate : function() {
// 폴더 기본정보 조회
var jsonData = folderManager.binder.getDataToJson();
exsoft.util.ajax.ajaxDataFunctionWithCallback(jsonData, exsoft.contextRoot + "/folder/folderDetail.do", "folderDetail", folderManager.callback.showDetail);
jsonData = null;
},
// 등록 및 수정 정합성 체크
validate : function(formId) {
var obj = $('#'+formId);
//폴더명 체크
if($.trim(obj.find('input[name=folder_name_ko]').val()) === ''){
jAlert('폴더명을 입력 하세요.', "폴더관리", 0);
return false;
}
//정렬순서
if($.trim(obj.find('input[name=sort_index]').val()) === ''){
jAlert('정렬값을 입력 하세요.', "폴더관리", 0);
return false;
}
//스토리지 할당량
if($.trim(obj.find('input[name=storage_quota]').val()) === ''){
jAlert('스토리지 할당량을 입력 하세요.', "폴더관리", 0);
return false;
}
//상속여부
if(obj.find('input[name=is_inherit_acl_chk]').is(':checked')){
$('#create_is_inherit_acl').val('T');
}else{
$('#create_is_inherit_acl').val('F');
}
return true;
},
// 등록버튼 클릭시
registFolder : function() {
var node = folderManager.gFolderTree.getCurrentNode();
folderWindow.callback = folderManager.callback.refreshTree;
folderWindow.initForm(node);
folderWindow.binder.set("type", Constant.ACTION_CREATE);
folderWindow.open();
},
// 이동버튼 클릭시
moveFolder : function() {
var folder_type = $('#folder_type').val();
if(folder_type == 'DOCUMENT') {
selectSingleFolderWindow.init(folderManager.callback.moveFolderCallback, "MYDEPT", "WORK_MYDEPT", true, "ALL_TYPE");
} else {
jAlert('부서 및 프로젝트 Type 폴더는 이동 할 수 없습니다.', "폴더관리", 0);
}
folder_type = null;
},
// 삭제버튼 클릭시
deleteFolder : function() {
var folder_type = $('#folder_type').val();
if(folder_type != 'DOCUMENT') {
jAlert('부서 및 프로젝트 Type 폴더는 삭제 할 수 없습니다.', "폴더관리", 0);
return;
}
var jsonData = {
type : Constant.ACTION_DELETE,
folder_id : $("#folder_id").val(),
folder_name_ko : $("#folder_name_ko").val()
};
// 폴더 기본정보 조회
exsoft.util.ajax.ajaxDataFunctionWithCallback(jsonData, exsoft.contextRoot + "/folder/folderControl.do", "folderDelete",
function(data, param) {
if(data.result == 'true'){
folderManager.callback.refreshTree();
jAlert('폴더 삭제에 성공하였습니다.', "폴더관리", 0);
}else {
jAlert(data.message, "폴더관리", 0);
}
});
},
// 권한변경버튼 클릭시
changeAcl : function() {
selectAclWindow.init($("#acl_id").val(), Constant.ACL.TYPE_FOLDER, folderManager.callback.selectAclSubmit);
var obj = {
current_acl_id : $('#acl_id').val(),
current_acl_name : $('#acl_name_title').text(),
parent_folder_id : $('#parent_id').val(),
folder_id : "",
type : "folder"
}
selectAclWindow.initInherit(obj);
}
},
//5. 화면 UI 변경 처리
ui : {
// 기본권한, 추가권한 출력
aclItemData : function(data,divIds) {
exsoft.util.table.tableFolderAclItemPrintList('aclDocTable',data.aclItemList);
},
// 저장문서유형 세팅.
docTypeData : function(data, param) {
$("#is_type").remove();
$('#docType_template').append('<select id="is_type" data-bind="is_type" data-select="true">');
$.each(data.typeList, function(){
$("#is_type").append("<option value='{0}'>{1}</option>".format(this.type_id, this.type_name));
});
$('#docType_template').append('</select>');
}
},
//6. callback 처리
callback : {
// 폴더 선택시
selectFolder : function(e, data) {
//1. 문서목록에 폴더명 title 변경
$("#folder_name_title").html(data.node.text);
exsoft.util.ajax.ajaxDataFunctionWithCallback({folder_id : data.node.id}, exsoft.contextRoot + "/folder/folderDetail.do", "folderDetail", folderManager.callback.showDetail);
},
// 폴더상세
showDetail : function(folderInfo, param) {
if(folderInfo.result == 'true'){
var folder = folderInfo.folderDetail;
// storage Quota/Usage 사이즈 변환
var getQuota = folder.storage_quota;
var getUsage = folder.storage_usage;
if(getQuota != -1){
getQuota = getQuota/1024/1024/1024;
}
if(getUsage != -1){
getUsage = exsoft.util.common.bytesToSize(getUsage, 1);
}
$("#folder_name_title").html(folder.folder_name_ko);
$("#acl_name_title").html(folderInfo.aclDetail.acl_name);
folderManager.binder.set("folder_name_ko", folder.folder_name_ko);
folderManager.binder.set("folder_name_en", folder.folder_name_en);
folderManager.binder.set("is_save", folder.is_save);
folderManager.binder.set("folder_status", folder.folder_status);
exsoft.util.ajax.ajaxDataFunctionWithCallback('', exsoft.contextRoot+'/folder/makeTypeSelectbox.do', '', function(data, param){
$.when(folderManager.ui.docTypeData(data, param)).then(exsoft.util.common.ddslick('#is_type', 'srch_type1', 'is_type', 98, function(divId, selectedData){
folderManager.binder.set("is_type", selectedData.selectedData.value);
})).done(folderManager.binder.set("is_type", folder.is_type));
});
folderManager.binder.set("sort_index", folder.sort_index);
$('#is_inherit_acl_chk').prop('checked',folder.is_inherit_acl == 'T');
folderManager.binder.set("is_inherit_acl", folder.is_inherit_acl);
$("#folder_full_path").text(folderManager.gFolderTree.getCurrentNodeFullPath().join(" > "));
$("#create_date").text(folder.create_date);
$("#create_name").text(folder.creator_name);
$("#storage_usage").text(getUsage);
var node = folderManager.gFolderTree.getCurrentNode();
if((folder.map_id == folder.folder_type)
|| (folder.folder_type == 'DOCUMENT' && folder.map_id == "PROJECT" && node.parents.length < 3 )) {
$('#folder_storage_quota_chk').prop("disabled", false);
// storage_quota 셋팅
if(folder.storage_quota != -1) {
$('#folder_storage_quota_chk').prop("checked", false);
$('#storage_quota').val(getQuota);
$('#storage_quota').prop("readonly", false);
$('#storage_quota').prop("disabled", false);
$('#storage_quota').removeClass("readonly");
} else {
$('#folder_storage_quota_chk').prop("checked", true);
$('#storage_quota').val("무제한");
$('#storage_quota').prop("readonly", true);
$('#storage_quota').prop("disabled", true);
$('#storage_quota').addClass("readonly");
}
} else {
$('#folder_storage_quota_chk').prop("checked", true);
$('#folder_storage_quota_chk').prop("disabled", true);
$('#storage_quota').val("무제한");
$('#storage_quota').prop("readonly", true);
$('#storage_quota').prop("disabled", true);
$('#storage_quota').addClass("readonly");
}
//hidden값 set
folderManager.binder.set("folder_id", folder.folder_id);
folderManager.binder.set("parent_id", folder.parent_id);
folderManager.binder.set("map_id", folder.map_id);
folderManager.binder.set("acl_id", folder.acl_id);
folderManager.binder.set("folder_type", folder.folder_type);
// 접근자 List 셋팅
$("#acl_name_title").html(folderInfo.aclDetail.acl_name);
// 기본권한 :: default - 기본권한 사용안함 권한
folderManager.ui.aclItemData(folderInfo,"aclDocTable");
} else {
jAlert(data.message, "폴더관리", 0);
}
},
// 트리 새로고침
refreshTree : function (e, data) {
folderManager.gFolderTree.refresh();
},
// 권한변경이후 콜백
selectAclSubmit : function(aclItemList) {
folderManager.binder.set("acl_id", aclItemList.aclId);
$("#acl_name_title").html(aclItemList.aclDetail.acl_name);
exsoft.util.table.tableFolderAclItemPrintList('aclDocTable',aclItemList.aclItems);
},
// 폴더트리에서 이동 버튼 callback 함수
moveFolderCallback : function(returnFolder) {
if (returnFolder != null) {
var isLoop = false;
// 이동하려는 대상 폴더가 자신 및 자신의 하위 폴더인지 체크함
var current_folder_id = $("#folder_id").val();
$(returnFolder.parentIdList).each(function(index) {
if (this == current_folder_id ) {
alert("현재 폴더 및 현재 폴더 하위로 이동할 수 없습니다.", "폴더관리", 0);
isLoop = true;
return false;
}
});
var current_folder_object = folderManager.gFolderTree;
var originalRootFolder = current_folder_object.getFolderGroupId(current_folder_object.selectedNode[0]);
var targetRootFolder;
var changeRootFolder = 'F';
if(returnFolder.mapId == "PROJECT" && returnFolder.parentGroup == null){
targetRootFolder = returnFolder.id;
} else {
targetRootFolder = returnFolder.parentGroup.id;
}
if(originalRootFolder.id != targetRootFolder) {
changeRootFolder = 'T';
}
if (!isLoop) {
$("#move_folder_id").val(current_folder_id); // 변경할 현재 ID 저장
$("#move_parent_id").val(returnFolder.id); // 선택된 부모 ID 저장
$("#move_map_id").val(returnFolder.mapId); // 선택된 맵 ID 저장
$("#move_folder_name_ko").val($("#folder_name_ko").val()); // 변경할 폴더 이름
$("#parentGroup_id").val(targetRootFolder);
$("#root_folder_change").val(changeRootFolder);
exsoft.util.ajax.ajaxFunctionWithCallback('form_move', exsoft.contextRoot + '/folder/folderControl.do','folderUpdate',
function(data, param) {
if(data.result == 'true'){
folderManager.gFolderTree.refreshNodeForAddChildren(data.refresh_id);
folderManager.gFolderTree.refreshNodeForAddChildren(data.target_refresh_id);
jAlert(data.message, "폴더관리", 0);
}else {
jAlert(data.message, "폴더관리", 0);
}
});
}
}
}
},
}<file_sep>/EDMS3/src/kr/co/exsoft/statistics/vo/DocumentStatusVO.java
package kr.co.exsoft.statistics.vo;
import kr.co.exsoft.eframework.util.StringUtil;
import kr.co.exsoft.eframework.vo.VO;
/**
* 문서보유현황 - 사용자/문서별, 문서함/폴더별, 문서유형별, 보안등급별 보유 현황
*
* @author 패키지팀
* @since 2014. 11. 21.
* @version 1.0
*
*/
public class DocumentStatusVO extends VO {
// VIEW : VW_DOC_STATUS => ALIAS : DocumentStatus
private String user_nm; // 사용자명
private String owner_id; // 사용자ID :: 소유자ID
private String group_nm; // 부서명
private String group_id; // 부서ID
private String map_nm; // 맵명
private String map_id; // 맵ID
private int doc_cnt; // 문서수
private int page_cnt; // 파일수
private long page_total; // 용량
private String folder_nm; // 폴더명 :: 문서함/폴더별 보유현황 기준
private String folder_id; // 폴더ID
private String doc_type; // 문서유형ID
private String type_name; // 문서유형명
private String code_nm; // 코드명
private String security_levle; // 보안등급
private String part_id; // 문서Quota 현황(개인ID/부서ID)
private String part_nm; // 문서Quota 현황(사용자명/부서명)
private long storage_quota; // 개인/부서함 할당량
@SuppressWarnings("unused")
private String fsize; // 파일사이즈
@SuppressWarnings("unused")
private String ssize; // 할당량사이즈
public DocumentStatusVO() {
this.user_nm = "";
this.owner_id = "";
this.group_nm = "";
this.group_id = "";
this.map_nm = "";
this.map_id = "";
this.doc_cnt = 0;
this.page_cnt = 0;
this.page_total = 0;
this.fsize = "";
this.folder_nm = "";
this.folder_id = "";
this.doc_type = "";
this.type_name = "";
this.code_nm = "";
this.security_levle = "";
this.part_id = "";
this.part_nm = "";
this.storage_quota = 0;
this.ssize = "";
}
public String getSsize() {
return StringUtil.fileSize(storage_quota);
}
public void setSsize(String ssize) {
this.ssize = ssize;
}
public long getStorage_quota() {
return storage_quota;
}
public void setStorage_quota(long storage_quota) {
this.storage_quota = storage_quota;
}
public String getPart_id() {
return part_id;
}
public void setPart_id(String part_id) {
this.part_id = part_id;
}
public String getPart_nm() {
return part_nm;
}
public void setPart_nm(String part_nm) {
this.part_nm = part_nm;
}
public String getSecurity_levle() {
return security_levle;
}
public void setSecurity_levle(String security_levle) {
this.security_levle = security_levle;
}
public String getCode_nm() {
return code_nm;
}
public void setCode_nm(String code_nm) {
this.code_nm = code_nm;
}
public String getDoc_type() {
return doc_type;
}
public void setDoc_type(String doc_type) {
this.doc_type = doc_type;
}
public String getType_name() {
return type_name;
}
public void setType_name(String type_name) {
this.type_name = type_name;
}
public String getFolder_id() {
return folder_id;
}
public void setFolder_id(String folder_id) {
this.folder_id = folder_id;
}
public String getFolder_nm() {
return folder_nm;
}
public void setFolder_nm(String folder_nm) {
this.folder_nm = folder_nm;
}
public String getFsize() {
return StringUtil.fileSize(page_total);
}
public void setFsize(String fsize) {
this.fsize = fsize;
}
public String getUser_nm() {
return user_nm;
}
public void setUser_nm(String user_nm) {
this.user_nm = user_nm;
}
public String getOwner_id() {
return owner_id;
}
public void setOwner_id(String owner_id) {
this.owner_id = owner_id;
}
public String getGroup_nm() {
return group_nm;
}
public void setGroup_nm(String group_nm) {
this.group_nm = group_nm;
}
public String getGroup_id() {
return group_id;
}
public void setGroup_id(String group_id) {
this.group_id = group_id;
}
public String getMap_nm() {
return map_nm;
}
public void setMap_nm(String map_nm) {
this.map_nm = map_nm;
}
public String getMap_id() {
return map_id;
}
public void setMap_id(String map_id) {
this.map_id = map_id;
}
public int getDoc_cnt() {
return doc_cnt;
}
public void setDoc_cnt(int doc_cnt) {
this.doc_cnt = doc_cnt;
}
public int getPage_cnt() {
return page_cnt;
}
public void setPage_cnt(int page_cnt) {
this.page_cnt = page_cnt;
}
public long getPage_total() {
return page_total;
}
public void setPage_total(long page_total) {
this.page_total = page_total;
}
}
<file_sep>/EDMS3/src/kr/co/exsoft/quartz/vo/AuditTrailVO.java
package kr.co.exsoft.quartz.vo;
/**
* 감사기록 VO
* @author 패키지 개발팀
* @since 2014.07.31
* @version 3.0
*
*/
public class AuditTrailVO {
// 테이블 객체(XR_AUDIT_TRAIL)
private String audit_date; // 감사일 - YYYYMMDD
private String user_id; // 사용자 아이디
private int read_count; // 조회수
private String report_mail_sent_date; // 메일 발송일
private String report_mail_receiver_address; // 감사메일 수신주소
// 조회항목
private String user_name;
private String group_name;
private String group_id;
public AuditTrailVO() {
this.audit_date = "";
this.user_id = "";
this.read_count = 0;
this.report_mail_sent_date = "";
this.report_mail_receiver_address = "";
}
public String getReport_mail_sent_date() {
return report_mail_sent_date;
}
public void setReport_mail_sent_date(String report_mail_sent_date) {
this.report_mail_sent_date = report_mail_sent_date;
}
public String getUser_name() {
return user_name;
}
public void setUser_name(String user_name) {
this.user_name = user_name;
}
public String getGroup_name() {
return group_name;
}
public void setGroup_name(String group_name) {
this.group_name = group_name;
}
public String getGroup_id() {
return group_id;
}
public void setGroup_id(String group_id) {
this.group_id = group_id;
}
public String getAudit_date() {
return this.audit_date.substring(0,4) + "-" + this.audit_date.substring(4,6) + "-" + this.audit_date.substring(6);
}
public void setAudit_date(String audit_date) {
this.audit_date = audit_date;
}
public String getUser_id() {
return user_id;
}
public void setUser_id(String user_id) {
this.user_id = user_id;
}
public int getRead_count() {
return read_count;
}
public void setRead_count(int read_count) {
this.read_count = read_count;
}
public String getReport_mail_receiver_address() {
return report_mail_receiver_address;
}
public void setReport_mail_receiver_address(String report_mail_receiver_address) {
this.report_mail_receiver_address = report_mail_receiver_address;
}
}
<file_sep>/EDMS3/WebContent/js/popup/registUserWindow.js
var registUserWindow = {
type : null, // 등록(userWrite) / 수정(userUpdate) 구분
callbackFunction : null, // 콜백
binder : new DataBinder("#pop_user_form"),
// 0. 초기화
init : {
// 사용자 등록 팝업창 팝업
initRegistUserWindow : function(groupName, groupId) {
// 팝업창 폼 초기화
exsoft.util.common.formClear("pop_user_form");
registUserWindow.binder.set("jobtitle", "P001"); // 사원
registUserWindow.binder.set("position", "D001"); // 팀원
registUserWindow.binder.set("role_id", "CREATOR"); // 작성자
registUserWindow.binder.set("user_status", "C"); // 사용
// 사용자 ID 입력 가능하도록 처리
$("#pop_user_id").prop("readonly", false);
// 전달값 셋팅
registUserWindow.binder.set("group_id", groupId);
registUserWindow.binder.set("manage_group", groupId);
registUserWindow.binder.set("group_nm", groupName);
registUserWindow.binder.set("manage_group_text", groupName);
// UI 셋팅
$("#popupTitle").html("사용자 추가");
$("#pop_btn_deleteUser").hide();
$('#pop_storage_quota_chk').prop("checked", true);
$('#pop_storage_quota').val("무제한");
registUserWindow.binder.set("storage_quota", -1);
$('#pop_storage_quota').prop("readonly", true);
$('#pop_storage_quota').prop("disabled", true);
$("#pop_storage_usage_info").addClass("hide");
$("#pop_storage_quota").addClass("readonly");
$("#pop_group_nm").addClass("readonly");
$("#pop_user_id").removeClass("readonly");
// 타입 설정
registUserWindow.type = "userWrite";
registUserWindow.binder.set("type", "insert");
exsoft.util.filter.maxNumber();
registUserWindow.open();
},
// 사용자 조회 / 수정 팝업
initUpdateUserWindow : function(userId, callback) {
// callback
registUserWindow.callbackFunction = callback;
// 유저 정보 조회
exsoft.util.ajax.ajaxDataFunctionWithCallback({user_id:userId, type:'select'}, exsoft.contextRoot + "/admin/userInfoManager.do", "userDetail", registUserWindow.callback)
// 팝업창 폼 클리어
exsoft.util.common.formClear("pop_user_form");
// 사용자 ID 입력 불가능하도록 처리
$("#pop_user_id").prop("readonly", true);
// UI 셋팅
$("#popupTitle").html("사용자 수정");
$("#pop_btn_deleteUser").show();
$("#pop_storage_usage_info").show();
$("#pop_user_id").addClass("readonly");
$("#pop_group_nm").addClass("readonly");
// 타입 설정
registUserWindow.type = "userUpdate";
registUserWindow.binder.set("type", "update");
exsoft.util.filter.maxNumber();
registUserWindow.open();
}
},
// 1. 팝업
open : function() {
exsoft.util.layout.divLayerOpen("user_regist_wrapper", "user_regist");
},
//2. layer + show
layer : {
},
//3. 닫기 + hide
close : function() {
exsoft.util.layout.divLayerClose('user_regist_wrapper', 'user_regist');
},
//4. 화면 이벤트 처리
event : {
// 확인버튼 클릭시
registUser : function() {
// 필수항목 유효성 체크
if (!registUserWindow.event.validationForm()) {
return;
}
// 스토리지 할당량 변환
var quota = 0;
if($("#pop_storage_quota").val() > -1) {
quota = $("#pop_storage_quota").val() *1024*1024*1024; // 숫자값을 입력했을때
} else {
quota = $("#storage_quota").val() *1024*1024*1024; //무제한일때
}
registUserWindow.binder.set("storage_quota", quota);
registUserWindow.binder.set("user_id", $("#pop_user_id").val());
registUserWindow.binder.set("user_name_ko", $("#pop_user_name_ko").val());
registUserWindow.binder.set("user_name_en", $("#pop_user_name_en").val());
registUserWindow.binder.set("user_name_zh", $("#pop_user_name_zh").val());
registUserWindow.binder.set("jobtitle", exsoft.util.layout.getSelectBox('pop_jobtitle','option'));
registUserWindow.binder.set("position", exsoft.util.layout.getSelectBox('pop_position','option'));
registUserWindow.binder.set("role_id", exsoft.util.layout.getSelectBox('pop_role_id','option'));
registUserWindow.binder.set("telephone", $("#pop_telephone").val());
registUserWindow.binder.set("user_status", exsoft.util.layout.getSelectBox('pop_user_status','option'));
registUserWindow.binder.set("email", $("#pop_email").val());
var jsonObject = registUserWindow.binder.getDataToJson();
// 서버로 전송
exsoft.util.ajax.ajaxDataFunctionWithCallback(jsonObject ,exsoft.contextRoot + '/admin/userInfoManager.do',registUserWindow.type,
function(data,param){
if (data.result == "success") {
// 목록 갱신
userManager.event.searchUserList();
if (param == "userUpdate") {
jAlert("사용자 수정이 완료 됐습니다.", "사용자 관리", 0);
registUserWindow.callbackFunction(data, param);
} else if(param == "userWrite") {
jAlert("사용자 등록이 완료 됐습니다.", "사용자 관리", 0);
}
} else {
jAlert(data.message, "사용자 관리", 0);
}
});
// 팝업 숨김
registUserWindow.close();
},
// 관리부서 선택
selectManageGroup : function() {
// Role이 본부 문서 관리자인지 체크
if (exsoft.util.layout.getSelectBox('pop_role_id','option') != "HEAD_DOC_OPERATOR") {
jAlert("선택한 역할에 폴더 관리 권한이 없어서 관리부서를 선택할 수 없습니다.", "사용자 관리", 0);
return;
}
selectGroupWindow.init.initPage(registUserWindow.selectManageGroupCallback, "GROUP");
},
// Form Validation start
validationForm : function() {
// 사용자 ID 체크
if ($("#pop_user_id").val().length == 0) {
jAlert("사용자 ID를 입력해주세요", "사용자 관리", 0);
$("#pop_user_id").focus();
return;
} else if (!exsoft.util.check.userIdCheck($("#pop_user_id").val())) {
jAlert("사용자 아이디는 영문이나 숫자로만 입력해주세요", "사용자 관리", 0);
return;
}
// 사용자 성명 체크
if ($("#pop_user_name_ko").val().length == 0) {
jAlert("사용자 성명을 입력해주세요", "사용자 관리", 0);
$("#pop_user_name_ko").focus();
return false
}
// 이메일 유효성 체크
if ($("#pop_email").val().length > 0 && !exsoft.util.check.emailCheck($("#pop_email").val())) {
jAlert("이메일주소가 정확하지 않습니다. 다시입력해주세요.", "사용자 관리", 0);
return;
}
// 전화번호 유효성 체크
if ($("#pop_telephone").val().length > 0 && !exsoft.util.check.phoneCheck($("#pop_telephone").val())) {
jAlert("전화번호가 정확하지 않습니다. 다시 입력해주세요.", "사용자 관리", 0)
return;
}
// 기본 값 외 서버로 전송 할 값 설정
$("#pop_position_nm").val($("#pop_position option:selected").text());
return true;
},
userQuotaCheckBox : function() {
if($("input:checkbox[id='pop_storage_quota_chk']").is(":checked") == false){
$('#pop_storage_quota').prop("readonly", false);
$('#pop_storage_quota').prop("disabled", false);
$("#pop_storage_quota").removeClass("readonly");
$("#pop_storage_quota").val("");
} else {
$('#pop_storage_quota').prop("readonly", true);
$('#pop_storage_quota').prop("disabled", true);
$("#pop_storage_quota").addClass("readonly");
registUserWindow.binder.set("storage_quota", "-1");
$("#pop_storage_quota").val("무제한");
}
},
// Form Validation end
// 삭제
deleteUser : function() {
jConfirm($("#pop_user_name_ko").val() + "를 삭제 하시겠습니까 ?", "사용자 관리", 0,
function(ret) {
if (ret) {
exsoft.util.ajax.ajaxDataFunctionWithCallback({userIdList:$("#pop_user_id").val(), type:"delete"}, exsoft.contextRoot + "/admin/userInfoManager.do", "userDelete", registUserWindow.callback);
}
});
}
},
//5. 화면 UI 변경 처리
ui : {
},
//6. callback 처리
callback : function(data, param) {
if (data.result == "success") {
if (param == "userDetail") {
// storage Quota/Usage 사이즈 변환
var quota = data.userInfo.storage_quota;
var usage = data.userInfo.storage_usage;
if (quota != -1) {
quota = quota/1024/1024/1024;
}
if (usage > 0){
usage = exsoft.util.common.bytesToSize(usage, 1);
}
registUserWindow.binder.set("type", "update");
registUserWindow.binder.set("group_id", data.userInfo.group_id);
registUserWindow.binder.set("group_nm", data.userInfo.group_nm);
registUserWindow.binder.set("user_id", data.userInfo.user_id);
registUserWindow.binder.set("user_name_ko", data.userInfo.user_name_ko);
registUserWindow.binder.set("user_name_en", data.userInfo.user_name_en);
registUserWindow.binder.set("user_name_zh", data.userInfo.user_name_zh);
registUserWindow.binder.set("jobtitle", data.userInfo.jobtitle);
registUserWindow.binder.set("position", data.userInfo.position);
registUserWindow.binder.set("role_id", data.userInfo.role_id);
registUserWindow.binder.set("telephone", data.userInfo.telephone);
registUserWindow.binder.set("user_status", data.userInfo.user_status);
registUserWindow.binder.set("email", data.userInfo.email);
registUserWindow.binder.set("manage_group", data.userInfo.manage_group);
registUserWindow.binder.set("manage_group_text", data.userInfo.manage_group_nm);
$("#pop_storage_usage").text(usage);
if(quota > -1){
$('#pop_storage_quota_chk').prop("checked", false);
registUserWindow.binder.set("storage_quota", quota);
$("#pop_storage_quota").val(quota);
$('#pop_storage_quota').prop("readonly", false);
$('#pop_storage_quota').prop("disabled", false);
$("#pop_storage_quota").removeClass("readonly");
} else {
$('#pop_storage_quota_chk').prop("checked", true);
registUserWindow.binder.set("storage_quota", "-1");
$("#pop_storage_quota").val("무제한");
$('#pop_storage_quota').prop("readonly", true);
$('#pop_storage_quota').prop("disabled", true);
$("#pop_storage_quota").addClass("readonly");
}
} else if (param == "userDelete") {
// 팝업 숨김
registUserWindow.close();
// 목록 갱신
userManager.event.searchUserList();
jAlert("사용자를 삭제했습니다.", "사용자 관리", 0);
}
} else {
jAlert(data.message, "사용자 관리", 0);
}
},
// 관리부서 선택 콜백
selectManageGroupCallback : function(returnGroup) {
registUserWindow.binder.set("manage_group", returnGroup[0].original.id);
registUserWindow.binder.set("manage_group_text", returnGroup[0].original.text);
}
}<file_sep>/EDMS3/src/kr/co/exsoft/process/service/ProcessService.java
package kr.co.exsoft.process.service;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import kr.co.exsoft.common.vo.SessionVO;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.ui.Model;
/**
* Process 서비스 인터페이스
* @author 패키지 개발팀
* @since 2015.03.12
* @version 3.0
*
*/
@Transactional
public interface ProcessService {
/**
*
* <pre>
* 1. 개용 : 좌측 메뉴 상단엣 문서에 대한 count를 가져오기
* 2. 처리내용 : 사용자 ID기준으로 협업 관련 문서 count를 가져오기
* </pre>
* @Method Name : processCount
* @param map
* @return
* @throws Exception Map<String,Object>
*/
public Map<String, Object> processCount(HashMap<String, Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : 협업 메뉴별 목록 가져오기
* 2. 처리내용 : type기준으로 협업 메뉴별 목록 가져오기
* </pre>
* @Method Name : processList
* @param map
* @return
* @throws Exception Map<String,Object>
*/
public Map<String, Object> processList(HashMap<String, Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : 최근 협업 등록 목록 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : processRecentlyList
* @param map
* @return
* @throws Exception Map<String,Object>
*/
public Map<String, Object> processRecentlyList(HashMap<String, Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : 최근 협업 등록 목록에서 기본 정보 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : selectProcessRecently
* @param map
* @return
* @throws Exception Map<String,Object>
*/
public Map<String, Object> selectProcessRecently(HashMap<String, Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : 협업 신규 등록
* 2. 처리내용 :
* </pre>
* @Method Name : processWrite
* @param sessionVO
* @param model
* @param map
* @param request
* @return
* @throws Exception Map<String,Object>
*/
@Transactional(propagation = Propagation.REQUIRED,rollbackFor ={ Exception.class,SQLException.class})
public Map<String, Object> processWrite(SessionVO sessionVO, Model model, HashMap<String, Object> map, HttpServletRequest request) throws Exception;
/**
*
* <pre>
* 1. 개용 : 협업 상세 정보
* 2. 처리내용 :
* </pre>
* @Method Name : processDetail
* @param map
* @return
* @throws Exception Map<String,Object>
*/
public Map<String, Object> processDetail(HashMap<String, Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : 협업 승인 요청
* 2. 처리내용 :
* </pre>
* @Method Name : approveAction
* @param map
* @param sessionVO
* @return
* @throws Exception Map<String,Object>
*/
@Transactional(propagation = Propagation.REQUIRED,rollbackFor ={ Exception.class,SQLException.class})
public Map<String, Object> approveAction(HashMap<String, Object> map, SessionVO sessionVO) throws Exception;
}
<file_sep>/EDMS3/src/kr/co/exsoft/process/service/ProcessServiceImpl.java
package kr.co.exsoft.process.service;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import kr.co.exsoft.common.dao.CommonDao;
import kr.co.exsoft.common.service.CacheService;
import kr.co.exsoft.common.service.CommonService;
import kr.co.exsoft.common.vo.CommentVO;
import kr.co.exsoft.common.vo.RecentlyObjectVO;
import kr.co.exsoft.common.vo.SessionVO;
import kr.co.exsoft.document.dao.DocumentDao;
import kr.co.exsoft.document.service.DocumentService;
import kr.co.exsoft.document.vo.DocumentVO;
import kr.co.exsoft.document.vo.PageVO;
import kr.co.exsoft.eframework.configuration.Constant;
import kr.co.exsoft.eframework.handler.ExsoftServiceExceptionHandler;
import kr.co.exsoft.eframework.library.ExsoftAbstractServiceImpl;
import kr.co.exsoft.eframework.util.CommonUtil;
import kr.co.exsoft.eframework.util.PagingAjaxUtil;
import kr.co.exsoft.folder.dao.FolderDao;
import kr.co.exsoft.folder.vo.FolderVO;
import kr.co.exsoft.permission.dao.AclDao;
import kr.co.exsoft.process.dao.ProcessDao;
import kr.co.exsoft.process.vo.ProcessExecutorVO;
import kr.co.exsoft.process.vo.ProcessVO;
import kr.co.exsoft.statistics.dao.StatisticsDao;
import kr.co.exsoft.statistics.vo.DocumentUserHtVO;
import kr.co.exsoft.user.dao.GroupDao;
import oracle.jdbc.Const;
import org.apache.commons.collections.map.CaseInsensitiveMap;
import org.apache.ibatis.session.SqlSession;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Service;
import org.springframework.ui.Model;
/**
* Process 서비스 구현 부분
* @author 패키지 개발팀
* @since 2015.03.12
* @version 3.0
*
*/
@Service("processService")
public class ProcessServiceImpl extends ExsoftAbstractServiceImpl implements ProcessService {
@Autowired
@Qualifier("sqlSession")
private SqlSession sqlSession;
@Autowired
private CacheService cacheService;
@Autowired
private CommonService commonService;
@Autowired
private DocumentService documentService;
/**
* public method 구현
*/
/**
*
* <pre>
* 1. 개용 : 협업 목록에서 사용할 tooltip set
* 2. 처리내용 :
* </pre>
* @Method Name : setProcessTooltip
* @param processList
* @param map
* @throws Exception void
*/
public void setProcessTooltip(List<ProcessVO> processList, HashMap<String, Object> map) throws Exception{
ProcessDao processDao = sqlSession.getMapper(ProcessDao.class);
List<String> processIdList = new ArrayList<String>();
for(ProcessVO processVo : processList){
processIdList.add(processVo.getProcess_id());
}
if( processIdList.size() == 0){
//throw new Exception("[ProcessServiceImpl.setProcessTooltip] process_id not found Exception!!");
return; // 데이터가 없을 경우
}
map.put("processIdList", processIdList);
List<ProcessExecutorVO> peVoList = processDao.processExcutorList(map);
// 협업 리스트, 협업 단계조회에서 사용됨
setExecutorInfo(processList, peVoList);
}
/**
*
* <pre>
* 1. 개용 : 협업 목록, 협업단계 조회, 협업 상세조회에서 사용할 승인, 열람 정보를 set
* 2. 처리내용 :
* </pre>
* @Method Name : setExecutorInfo
* @param processList
* @param peVoList
* @throws Exception void
*/
public void setExecutorInfo(List<ProcessVO> processList, List<ProcessExecutorVO> peVoList) throws Exception{
int wirteCntIng=0, approveCntIng=0, receiveCntIng=0;
int wirteCntEnd=0, approveCntEnd=0, receiveCntEnd=0;
List<ProcessExecutorVO> tempList = new ArrayList<ProcessExecutorVO>();
tempList.addAll(peVoList);
for(int i=0; i < processList.size(); i++) {
ProcessVO tempProcessVo = new ProcessVO();
tempProcessVo = processList.get(i);
for(Iterator<ProcessExecutorVO> iter = tempList.iterator(); iter.hasNext();){
ProcessExecutorVO tempVo = iter.next();
if(tempProcessVo.getProcess_id().equals(tempVo.getProcess_id())){
switch (tempVo.getType()) {
case Constant.PROCESS_TYPE_AUTHOR: tempProcessVo.setAuthor_nm(tempVo.getExecutor_name()); //공동작성자와 같이 처리 한다.
case Constant.PROCESS_TYPE_COAUTHOR:{
if(tempVo.getStatus().equals(Constant.PROCESS_EXECUTOR_END)){
wirteCntEnd++; //완료
tempProcessVo.getWrite_list().add(tempVo.getExecutor_name()+"|작성완료");
tempProcessVo.setWrite_list(tempProcessVo.getWrite_list());
}else{
wirteCntIng++; //미완료
tempProcessVo.getWrite_list().add(tempVo.getExecutor_name()+"|작성중");
tempProcessVo.setWrite_list(tempProcessVo.getWrite_list());
}
};break;
case Constant.PROCESS_TYPE_APPROVER:{
if(tempVo.getStatus().equals(Constant.PROCESS_EXECUTOR_END)){
approveCntEnd++; //완료
tempProcessVo.getApproval_list().add(tempVo.getExecutor_name()+"|승인완료");
tempProcessVo.setApproval_list(tempProcessVo.getApproval_list());
}else{
approveCntIng++; //미완료
tempProcessVo.getApproval_list().add(tempVo.getExecutor_name()+"|승인대기");
tempProcessVo.setApproval_list(tempProcessVo.getApproval_list());
}
};break;
case Constant.PROCESS_TYPE_RECEIVER:{
if(tempVo.getStatus().equals(Constant.PROCESS_EXECUTOR_END)){
receiveCntEnd++; //완료
tempProcessVo.getReceiver_list().add(tempVo.getExecutor_name()+"|열람완료");
tempProcessVo.setReceiver_list(tempProcessVo.getReceiver_list());
}else{
receiveCntIng++; //미완료
tempProcessVo.getReceiver_list().add(tempVo.getExecutor_name()+"|열람대기");
tempProcessVo.setReceiver_list(tempProcessVo.getReceiver_list());
}
};break;
default:
break;
}
iter.remove(); // 사용한 peVoList 값은 remove 시킨다.
}
} // 실행자 for end...
tempProcessVo.setWrite_count(wirteCntEnd+"/"+(wirteCntIng+wirteCntEnd));
tempProcessVo.setApproval_count(approveCntEnd+"/"+(approveCntIng+approveCntEnd));
tempProcessVo.setReceiver_count(receiveCntEnd+"/"+(receiveCntIng+receiveCntEnd));
// 기존 내용 변경 시 add가 아닌 set을 이용한다.
processList.set(i, tempProcessVo);
} // 협업 for end..
}
/**
* Interface override
*/
@Override
public Map<String, Object> processCount(HashMap<String, Object> map) throws Exception {
Map<String, Object> resultMap = new HashMap<String, Object>();
ProcessDao processDao = sqlSession.getMapper(ProcessDao.class);
int count = processDao.processCount(map);
resultMap.put("count",count);
resultMap.put("result",Constant.RESULT_TRUE);
return resultMap;
}
@Override
public Map<String, Object> processList(HashMap<String, Object> map) throws Exception {
Map<String, Object> resultMap = new HashMap<String, Object>();
List<ProcessVO> processList = new ArrayList<ProcessVO>();
int total = 0;
ProcessDao processDao = sqlSession.getMapper(ProcessDao.class);
// 1. 문서 목록을 가져 온다
total = processDao.processListCount(map);
processList = processDao.processList(map);
// 2. 승인, 열람 tooltip을 set. 엑셀저장은 매핑 안함
if(processList.size() > 0) {
setProcessTooltip(processList, map);
}
resultMap.put("result",Constant.RESULT_TRUE);
resultMap.put("page",map.get("nPage").toString());
resultMap.put("records",total);
resultMap.put("total",CommonUtil.getTotPageSize(total,Integer.parseInt(map.get("page_size").toString())));
resultMap.put("list",processList);
// Ajax Paging
String strLink = "javascript:exsoftProcessFunc.event.gridPage";
String contextRoot = map.get("contextRoot") != null ? map.get("contextRoot").toString() : "";
PagingAjaxUtil pagingInfo = new PagingAjaxUtil(Integer.parseInt(map.get("nPage").toString()),total,Integer.parseInt(map.get("page_size").toString()),10,strLink,contextRoot);
resultMap.put("pagingInfo",pagingInfo);
return resultMap;
}
@Override
public Map<String, Object> processRecentlyList(HashMap<String, Object> map) throws Exception {
Map<String, Object> resultMap = new HashMap<String, Object>();
List<ProcessVO> processList = new ArrayList<ProcessVO>();
ProcessDao processDao = sqlSession.getMapper(ProcessDao.class);
processList = processDao.processRecentlyRegistList(map);
resultMap.put("result",Constant.RESULT_TRUE);
resultMap.put("list",processList);
return resultMap;
}
@Override
public Map<String, Object> selectProcessRecently(HashMap<String, Object> map) throws Exception {
Map<String, Object> resultMap = new HashMap<String, Object>();
List<ProcessExecutorVO> processExecutorList = new ArrayList<ProcessExecutorVO>();
ProcessDao processDao = sqlSession.getMapper(ProcessDao.class);
GroupDao groupDao = sqlSession.getMapper(GroupDao.class);
FolderDao folderDao = sqlSession.getMapper(FolderDao.class);
// 1. 업무명 가져오기
ProcessVO processInfo = processDao.processInfo(map);
// 1-1 폴더_id 가져 오기
map.put("doc_id", processInfo.getDoc_root_id());
String processName = processInfo.getName();
String folderId = processDao.processFolderIdByDocId(map);
map.put("folder_id", folderId);
FolderVO folderVo = folderDao.folderDetail(map);
// 2. 협업자 정보 가져오기
processExecutorList = processDao.processExcutorList(map);
// 3. 협업자 정보를 이용하여 협업자의 부서명 가져오기
List<String> userIdList = new ArrayList<String>();
for(ProcessExecutorVO processExecutorVO : processExecutorList){
userIdList.add(processExecutorVO.getExecutor_id());
}
map.put("userIdList", userIdList);
List<CaseInsensitiveMap> groupList = groupDao.groupInfoByUserId(map);
HashMap<String, String> groupInfoMap = new HashMap<String, String>();
for(CaseInsensitiveMap tempMap : groupList){
groupInfoMap.put((String)tempMap.get("user_id"), (String)tempMap.get("group_nm"));
}
for(int i=0; i<processExecutorList.size(); i++){
processExecutorList.get(i).setGroup_nm(groupInfoMap.get((processExecutorList.get(i).getExecutor_id())));
}
resultMap.put("result",Constant.RESULT_TRUE);
resultMap.put("processName",processName);
resultMap.put("full_path",cacheService.getFolderFullpathNameByFolderId(folderId, true));
resultMap.put("folder_id",folderId);
resultMap.put("map_id",folderVo.getMap_id());
resultMap.put("acl_id",folderVo.getAcl_id());
resultMap.put("list",processExecutorList);
return resultMap;
}
@Override
public Map<String, Object> processWrite(SessionVO sessionVO, Model model, HashMap<String, Object> map, HttpServletRequest request) throws Exception {
ProcessDao processDao = sqlSession.getMapper(ProcessDao.class);
Map<String, Object> resultMap = new HashMap<String, Object>();
int process_id = commonService.commonNextVal(Constant.COUNTER_ID_PROCESS);
int recently_id = commonService.commonNextVal(Constant.COUNTER_ID_RECENTLY);
String strProcessId = CommonUtil.getStringID(Constant.ID_PREFIX_PROCESS, process_id);
String strContent = map.get("content") != null ? map.get("content").toString() : "";
// 1. documentVo set
DocumentVO documentVo = new DocumentVO();
//documentVo.setDoc_id(strDocumentId);
documentVo.setDoc_name(map.get("name").toString());
documentVo.setAcl_id(map.get("acl_id").toString());
documentVo.setDoc_type(map.get("doc_type").toString());
documentVo.setFolder_id(map.get("folder_id").toString());
documentVo.setAccess_grade(Constant.DOCUMENT_DEFALUT_ACCESSGRADE);
documentVo.setSecurity_level(Constant.DOCUMENT_DEFALUT_SECURITY_LEVEL);
documentVo.setDoc_status(Constant.DOCUMENT_STATUS_PROCESS_ING); //process 진행단계는 P, 완료 후 C
// vailidation check를 위한 추가
map.put("isType", Constant.INSERT);
map.put("version_type", Constant.VERSION_NEW_DOCUMENT);
// 1-1. 확장속성이 존재하는 경우
List<HashMap<String,Object>> attrList = new ArrayList<HashMap<String,Object>>();
if(map.get("is_extended") != null && map.get("is_extended").toString().equals(Constant.T)) {
attrList = documentService.docExtendedAttrList(request,documentVo.getDoc_type());
}
// 1-2 doc_id, 확장 권한(aclExItem_list), 첨부파일(fileList) 등 set
documentService.writeDocValid(map, documentVo, sessionVO);
// 1-3 documentService.writeDocProc(); 호출
documentService.writeDocProc(map, documentVo, attrList, sessionVO);
// 2. processVo set
ProcessVO processVo = new ProcessVO();
processVo.setProcess_id(strProcessId);
processVo.setDoc_root_id(documentVo.getDoc_id());
processVo.setCreator_id(sessionVO.getSessId());
processVo.setCreator_name(sessionVO.getSessName());
processVo.setName(map.get("name").toString()); // 업무명
processVo.setStatus(Constant.PROCESS_STATUS_WRITE);
//processVo.setExpect_date(map.get("expect_date").toString()); // 완료 예정일
processVo.setExpect_dateDB(CommonUtil.getCurruentTimeByDate(map.get("expect_date").toString()));
processVo.setContent(strContent);
// 2-1 process Dao 처리
processDao.insertProcess(processVo);
// 3. processExecutorVo set
// 3-1 작성자, 공동작성자, 승인자, 수신자
List<ProcessExecutorVO> proExecutorList = CommonUtil.jsonArrayToProcessExecutorList(map);
// 3-2 요청자
ProcessExecutorVO processExecutorVo = new ProcessExecutorVO();
processExecutorVo.setType(Constant.PROCESS_TYPE_REQUESTOR);
processExecutorVo.setExecutor_id(map.get("requestorId").toString());
processExecutorVo.setExecutor_name(map.get("requestorName").toString());
processExecutorVo.setStatus(Constant.PROCESS_EXECUTOR_END);
processExecutorVo.setSort_index(0);
proExecutorList.add(processExecutorVo);
for(ProcessExecutorVO tempVo : proExecutorList){
String execute_id = CommonUtil.getStringID(Constant.ID_PREFIX_PROCESS_EXECUTOR, commonService.commonNextVal(Constant.COUNTER_ID_PROCESS_EXECUTOR));
tempVo.setExecute_id(execute_id);
tempVo.setProcess_id(strProcessId);
tempVo.setDoc_root_id(documentVo.getDoc_id());
// 날짜 및 상태값
if(tempVo.getType().equals(Constant.PROCESS_TYPE_AUTHOR) || tempVo.getType().equals(Constant.PROCESS_TYPE_COAUTHOR)){
if(tempVo.getType().equals(Constant.PROCESS_TYPE_AUTHOR)){
documentVo.setOwner_id(tempVo.getExecutor_id());
}
tempVo.setStatus(Constant.PROCESS_EXECUTOR_START);
tempVo.setStart_dateDB(CommonUtil.getCurruentTime()); // 오늘 날짜
}else{
tempVo.setStatus(Constant.PROCESS_EXECUTOR_WAIT);
tempVo.setStart_dateDB(CommonUtil.getCurruentTimeByDate("9999-01-01")); // 임시 날짜
}
// processExecutor Dao 처리
processDao.insertProcessExecutor(tempVo);
}
// 4. recently set
RecentlyObjectVO recentlyObjectVo = new RecentlyObjectVO();
recentlyObjectVo.setIdx(CommonUtil.getStringID(Constant.ID_PREFIX_RECENTLY, recently_id));
recentlyObjectVo.setUser_id(sessionVO.getSessId());
recentlyObjectVo.setTarget_id(processVo.getProcess_id());
recentlyObjectVo.setTarget_type(Constant.RECENTLY_TYPE_PROCESS);
// 4-1 recently Dao 처리
commonService.insertRecentlyObject(recentlyObjectVo);
resultMap.put("result",Constant.RESULT_TRUE);
return resultMap;
}
@Override
public Map<String, Object> processDetail(HashMap<String, Object> map) throws Exception {
ProcessDao processDao = sqlSession.getMapper(ProcessDao.class);
Map<String, Object> resultMap = new HashMap<String, Object>();
ProcessVO processVo = new ProcessVO();
List<ProcessVO> tempProcessList = new ArrayList<ProcessVO>();
List<ProcessExecutorVO> executorList = new ArrayList<ProcessExecutorVO>();
processVo = processDao.processInfo(map);
executorList = processDao.processExcutorList(map);
tempProcessList.add(processVo);
setExecutorInfo(tempProcessList, executorList);
resultMap.put("processVo",tempProcessList.get(0));
resultMap.put("processExecutorList",executorList);
resultMap.put("result",Constant.RESULT_TRUE);
return resultMap;
}
@Override
public Map<String, Object> approveAction(HashMap<String, Object> map, SessionVO sessionVO) throws Exception {
ProcessDao processDao = sqlSession.getMapper(ProcessDao.class);
DocumentDao documentDao = sqlSession.getMapper(DocumentDao.class);
Map<String, Object> resultMap = new HashMap<String, Object>();
String actionType = map.get("actionType").toString(); // null check 안함.(null point exception)
String process_id = map.get("process_id").toString();
boolean isNextExecutor = true;
// 1. xr_comment 추가 :: com_step은 증가, com_order는 default(0) 즉, 답글 형태로 구성 안함
map.put("doc_root_id", process_id);
CommentVO commentVo = new CommentVO();
commentVo.setCom_id( CommonUtil.getStringID(Constant.ID_PREFIX_COMMENT, commonService.commonNextVal(Constant.COUNTER_ID_COMMENT)));
commentVo.setDoc_root_id(process_id);
commentVo.setCom_step(String.valueOf(documentDao.checkMaxStep(map)+1)); // doc_root_id => process_id
commentVo.setCreator_id(sessionVO.getSessId());
commentVo.setCreator_name(sessionVO.getSessName());
commentVo.setParent_creator_name(sessionVO.getSessName());
commentVo.setContent(map.get("content").toString());
documentDao.docCommentWrite(commentVo);
// 2. xr_process 승인 단계로 변경
ProcessVO processVo = new ProcessVO(); // process_id, status를 제외한 DB값은 빈값
processVo.setProcess_id(process_id);
processVo.setComplete_dateDB(CommonUtil.getCurruentTime());
// 3. xr_process_executor 요청한 실행자 상태값 변경
ProcessExecutorVO processExecutorVo = new ProcessExecutorVO();
processExecutorVo.setUpdateDBType(actionType);
processExecutorVo.setProcess_id(process_id);
if(actionType.equals(Constant.PROCESS_ACTION_APPROVEREJECT)){
// 반려
isNextExecutor = false;
processVo.setStatus(Constant.PROCESS_STATUS_MODIFY);
processExecutorVo.setStatus(Constant.PROCESS_EXECUTOR_WAIT);
processExecutorVo.setEnd_dateDB(CommonUtil.getCurruentTimeByDate("8888-01-01"));
}else{
// 승인요청, 승인
processExecutorVo.setStatus(Constant.PROCESS_EXECUTOR_END);
processExecutorVo.setEnd_dateDB(CommonUtil.getCurruentTime());
}
// [3]action 요청한 실행자 정보 update
processDao.updateProcessExecutor(processExecutorVo);
// 4. xr_process_executor 첫번째 승인자 승인 시작으로 변경 :: status(S), start_date(sysdate)
if(isNextExecutor){
ProcessExecutorVO currentApprover = processDao.currentApproverInfo(map);
if(currentApprover != null){
processVo.setStatus(Constant.PROCESS_STATUS_APPROVAL);
processExecutorVo = new ProcessExecutorVO();
processExecutorVo.setExecute_id(currentApprover.getExecute_id());
processExecutorVo.setStatus(Constant.PROCESS_EXECUTOR_START);
processExecutorVo.setStart_dateDB(CommonUtil.getCurruentTime());
// 다음 승인자 update
processDao.updateProcessExecutor(processExecutorVo);
}else{
processVo.setStatus(Constant.PROCESS_STATUS_END);
}
}else if(processVo.getStatus().equals(Constant.PROCESS_STATUS_MODIFY)){
processExecutorVo = new ProcessExecutorVO();
processExecutorVo.setUpdateDBType(Constant.PROCESS_ACTION_APPROVEREQUEST);
processExecutorVo.setStatus(Constant.PROCESS_EXECUTOR_START);
processExecutorVo.setStart_dateDB(CommonUtil.getCurruentTime());
processExecutorVo.setEnd_dateDB(CommonUtil.getCurruentTimeByDate("8888-01-01"));
// 다음 승인자 update
processDao.updateProcessExecutor(processExecutorVo);
}
// [2]업무(협업) 단계 update
processDao.updateProcess(processVo);
// public static final String PROCESS_ACTION_APPROVEREQUEST = "APPROVEREQUEST"; // 승인요청
// public static final String PROCESS_ACTION_APPROVE = "APPROVE"; // 승인
// public static final String PROCESS_ACTION_APPROVEREJECT = "APPROVEREJECT"; // 반려
///////
//// 승인요청 저리의 조건
// 0. 승인요청 내용 xr_comment 등록
// 1. 작성자, 공동작성자 상태값 E, 완료일 지정
// 2. 프로세스 단계 승인(A)으로 변경
// 3. 다음 승인 처리자 처리
//////
////////
//// 승인 처리의 조건
// 0. 승인 내용 xr_comment 등록
// 1. 승인처리한 승인자의 상태값 E, 완료일 지정
// 2. 다음 승인 처리자가 있는지 확인
// 2-1 : 다음 승인자가 존재할 경우 상태값 S, 시작일 지정
// 2-2 : 다음 승인자가 없을 경우 프로세스 단계 완료(E) 처리
////////
///////
//// 반려 처리의 조건
// 0. 반려 내용 xr_comment 등록
// 1. 모든 승인자 상태값 N으로 변경
// 2. 프로세스 단계 보완(M) 단계로 변경
// 3. 작성자, 공동작성자 상태값 S로 변경
///////
resultMap.put("result",Constant.RESULT_TRUE);
return resultMap;
}
}
<file_sep>/EDMS3/WebContent/js/common/initbind.js
/**
* 공통화면에 대한 bind 처리
*/
$(function(){
//우클릭 방지 관련 : 퍼블리싱 완료되면 주석 제거
document.oncontextmenu=function(){return false;}
exsoft.common.bind.leftMenuToggle(); // 좌측 메뉴 펼치기/숨기기
exsoft.common.bind.quickMenuToggle(); // 퀵메뉴 펼치기/숨기기
exsoft.common.bind.layoutViewToggle(); // 화면분활 메뉴 선택 펼치기/숨기기
exsoft.common.bind.layoutViewDivide(); // 화면 상하좌우 분화에 따라 화면 보이기
exsoft.common.bind.event.layoutDragHorizontal(); // 마우스 drag에 따라 화면 좌우 화면 비율 설정
exsoft.common.bind.event.layoutDragVertical(); // 마우스 drag에 따라 화면 상하 화면 비율 설정
exsoft.common.bind.event.commentCentextMenu(); // 문서 상세조회 의견 contextMenu
exsoft.common.bind.event.divDropDown(); // div dropDown(메뉴쳘치기/숨기기)
exsoft.common.bind.event.urlEmailClose(); //url화면 닫기
exsoft.common.bind.event.docDetailWindowClose(); //문서 상세조회 탭 닫기
//일자 달력 설정(달력 한글 표시)
$.datepicker.regional['ko'] = {
closeText: '닫기',
prevText: '이전달',
nextText: '다음달',
currentText: '오늘',
monthNames: ['1월','2월','3월','4월','5월','6월',
'7월','8월','9월','10월','11월','12월'],
monthNamesShort: ['1월','2월','3월','4월','5월','6월',
'7월','8월','9월','10월','11월','12월'],
dayNames: ['일','월','화','수','목','금','토'],
dayNamesShort: ['일','월','화','수','목','금','토'],
dayNamesMin: ['일','월','화','수','목','금','토'],
buttonImageOnly: false,
weekHeader: 'Wk',
dateFormat: 'yy-mm-dd',
firstDay: 0,
isRTL: false,
duration:200,
showAnim:'show',
showMonthAfterYear: false
};
$.datepicker.setDefaults($.datepicker.regional['ko']);
});<file_sep>/EDMS3/src/kr/co/exsoft/common/service/CommonService.java
package kr.co.exsoft.common.service;
import java.util.List;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.collections.map.CaseInsensitiveMap;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.transaction.annotation.Propagation;
import java.sql.SQLException;
import kr.co.exsoft.common.vo.CodeVO;
import kr.co.exsoft.common.vo.MenuAuthVO;
import kr.co.exsoft.common.vo.HistoryVO;
import kr.co.exsoft.common.vo.RecentlyObjectVO;
import kr.co.exsoft.common.vo.SessionVO;
import kr.co.exsoft.document.vo.PageVO;
/**
* 메뉴/코드 서비스 인터페이스
* @author <NAME>
* @since 2014.07.17
* @version 3.0
*
*/
@Transactional
public interface CommonService {
/**
*
* <pre>
* 1. 개용 : 카운터테이블 증가값 가져오기 :: nextValTable / nextVal 통합처리
* 2. 처리내용 :
* </pre>
* @Method Name : commonNextVal
* @param counter_id
* @return
* @throws Exception int
*/
@Transactional(propagation = Propagation.REQUIRES_NEW,rollbackFor ={ Exception.class,SQLException.class})
public int commonNextVal(String counter_id) throws Exception ;
/**
*
* <pre>
* 1. 개요 : 함수 이용해서 현재값 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : currentVal
* @param counter_id
* @return int
* @throws Exception
*/
public int currentVal(String counter_id) throws Exception ;
/**
*
* <pre>
* 1. 개요 : 테이블 이용해서 다음값 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : nextValTable
* @param String : counter_id
* @return int
* @throws Exception
*/
@Transactional(propagation = Propagation.REQUIRES_NEW,rollbackFor ={ Exception.class,SQLException.class})
public int nextValTable(String counter_id) throws Exception;
/**
*
* <pre>
* 1. 개요 : 코드 정보 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : codeDetail
* @param map
* @return
* @throws Exception
*/
public CodeVO codeDetail(HashMap<String,Object> map) throws Exception;
/**
*
* <pre>
* 1. 개요 : 코드 목록 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : codeList
* @param map
* @return
* @throws Exception
*/
public List<CodeVO> codeList(HashMap<String,Object> map) throws Exception;
/**
*
* <pre>
* 1. 개요 : 다음에디터 등록 처리 샘플
* 2. 처리내용 :
* </pre>
* @Method Name : editorWrite
* @param map
* @return
* @throws Exception
*/
public int editorWrite(HashMap<String,Object> map) throws Exception;
/**
*
* <pre>
* 1. 개요 : 다음에디터 등록 내용 출력
* 2. 처리내용 :
* </pre>
* @Method Name : editorDetailInfo
* @param map
* @return CaseInsensitiveMap
* @throws Exception
*/
public CaseInsensitiveMap editorDetailInfo(HashMap<String,Object> map) throws Exception;
/**
*
* <pre>
* 1. 개요 : EDMS 사용자 라이센스 유효성 체크
* 2. 처리내용 :
* </pre>
* @Method Name : checkUserLicense
* @return boolean
* @throws Exception
*/
public boolean checkUserLicense() throws Exception;
/**
*
* <pre>
* 1. 개요 : 코드리스트 페이지 목록으로 가져오기.
* 2. 처리내용 :
* </pre>
* @Method Name : codePageList
* @param map
* @return Map
* @throws Exception
*/
public Map<String, Object> codePageList(HashMap<String,Object> map) throws Exception;
/**
*
* <pre>
* 1. 개요 : 메뉴권한 목록 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : menuAuthList
* @param map
* @return List
* @throws Exception
*/
public Map<String, Object> menuAuthList(HashMap<String,Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : 메뉴권한 목록 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : adminMenuAuthList
* @param map
* @return
* @throws Exception List<MenuAuthVO>
*/
public List<MenuAuthVO> adminMenuAuthList(HashMap<String,Object> map) throws Exception;
/**
*
* <pre>
* 1. 개요 : 코드 등록/수정/삭제처리 - 다국어 Base를 위해 @Transactional 선언
* 2. 처리내용 :
* </pre>
* @Method Name : codeManager
* @param codeVO
* @param map
* @return
* @throws Exception
*/
@Transactional(propagation = Propagation.REQUIRES_NEW,rollbackFor ={ Exception.class,SQLException.class})
public Map<String, Object> codeManager(CodeVO codeVO,HashMap<String,Object> map) throws Exception;
/**
*
* <pre>
* 1. 개요 : 메뉴권한 등록/수정/삭제처리
* 2. 처리내용 :
* </pre>
* @Method Name : menuAuthManager
* @param menuAuthVO
* @param map
* @return Map
* @throws Exception
*/
@Transactional(propagation = Propagation.REQUIRES_NEW,rollbackFor ={ Exception.class,SQLException.class})
public Map<String, Object> menuAuthManager(List<MenuAuthVO> menuAuthList,HashMap<String,Object> map) throws Exception;
/**
*
* <pre>
* 1. 개요 : 메뉴권한 처리(수정/삭제) 공통 파라미터 처리 부분
* 2. 처리내용 :
* </pre>
* @Method Name : setMenuAuthParam
* @param inputStr
* @return Str
*/
public List<MenuAuthVO> setMenuAuthParam(String[] inputStr);
/**
*
* <pre>
* 1. 개요 : 메뉴권한 처리(등록) 공통 파라미터 처리 부분
* 2. 처리내용 :
* </pre>
* @Method Name : setMenuAuthParam
* @param inputStr
* @param map
* @return List
*/
public List<MenuAuthVO> setMenuAuthParam(String[] inputStr,HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개요 : 메뉴권한 메뉴목록 리스트 출력 처리.
* 2. 처리내용 :
* </pre>
* @Method Name : menuList
* @param map
* @return
* @throws Exception
*/
public Map<String, Object> menuList(HashMap<String,Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : 관리자 접속 메뉴 권한 정보 가져오기 -- 삭제대상
* 2. 처리내용 :
* </pre>
* @Method Name : getMenuAuth
* @param map
* @return
* @throws Exception MenuAuthVO
*/
public String getMenuAuth(HashMap<String,Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : 폴더/문서유형/권한이력 등록처리
* 2. 처리내용 :
* </pre>
* @Method Name : historyWrite
* @param historyVO
* @throws Exception void
*/
public int historyWrite(HistoryVO historyVO) throws Exception;
/**
*
* <pre>
* 1. 개용 : 시스템 환경설정 조회
* 2. 처리내용 :
* </pre>
* @Method Name : confDetail
* @param map
* @return
* @throws Exception Map<String,Object>
*/
public Map<String, Object> confDetail(HashMap<String,Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : 휴지통관리 정책 가져오기 :: 개인/시스템
* 2. 처리내용 :
* </pre>
* @Method Name : trashConfig
* @param map
* @return
* @throws Exception List<HashMap<String,Object>>
*/
public List<HashMap<String,Object>> trashConfig(HashMap<String,Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : 감사설정 정보 조회 : 배치프로그램
* 2. 처리내용 :
* </pre>
* @Method Name : auditConfig
* @param map
* @return
* @throws Exception Map<String,Object>
*/
public Map<String, Object> auditConfig(HashMap<String,Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : 시스템 환경설정 수정 처리
* 2. 처리내용 :
* </pre>
* @Method Name : confProc
* @param map
* @return
* @throws Exception Map<String,Object>
*/
public Map<String, Object> confProc(HashMap<String,Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : 페이지 네비게이션 메뉴 정보 가져오기. -- 삭제대상
* 2. 처리내용 :
* </pre>
* @Method Name : pageMenuInfo
* @param map
* @return
* @throws Exception Map<String,Object>
*/
public Map<String, Object> pageMenuInfo(HashMap<String,Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : 권한 그룹 리스트 목록 가져오기.
* 2. 처리내용 :
* </pre>
* @Method Name : authGroupList
* @param map
* @return
* @throws Exception List<String>
*/
public List<String> authGroupList(String part,SessionVO sessionVO) throws Exception;
/**
*
* <pre>
* 1. 개용 : 폴더 FullPath 경로 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : folderFullPath
* @param map
* @return
* @throws Exception String
*/
public String folderFullPath(HashMap<String, Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : 첨부파일 목록 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : setPageList
* @param map
* @return
* @throws Exception List<PageVO>
*/
public List<PageVO> setPageList(HashMap<String, Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : 첨부파일 목록 가져오기 Appliance
* 2. 처리내용 :
* </pre>
* @Method Name : setPageLocalList
* @param map
* @return
* @throws Exception
*/
public List<PageVO> setPageLocalList(HashMap<String, Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : 첨부파일 이력처리하기
* 2. 처리내용 :
* </pre>
* @Method Name : pageHtWrite
* @param pageList
* @param sessionVO
* @throws Exception void
*/
public void pageHtWrite(List<PageVO> pageList,SessionVO sessionVO) throws Exception;
/**
*
* <pre>
* 1. 개용 : 관리자 화면단 메뉴정보 및 관리범위 공통처리
* 2. 처리내용 :
* </pre>
* @Method Name : setPageToModel
* @param map
* @param menuInfo
* @param partInfo
* @param sessionVO
* @throws Exception void
*/
public void setPageToModel(HashMap<String, Object> map,Map<String, Object> menuInfo,Map<String, Object> partInfo,SessionVO sessionVO) throws Exception;
/**
*
* <pre>
* 1. 개용 : URL 복사 첨부파일 정보 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : urlPageInfo
* @param map
* @return
* @throws Exception PageVO
*/
public PageVO urlPageInfo(HashMap<String, Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : 최근 등록(문서,협업,폴더)에 대한 목록 삭제
* 2. 처리내용 :
* </pre>
* @Method Name : deleteRecently
* @param map
* @return
* @throws Exception Map<String,Object>
*/
public Map<String, Object> deleteRecently(HashMap<String, Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : 첨부파일 삭제 처리
* 2. 처리내용 : 문서, 협업 등록 실패 시 exRep ECM에 등록된 파일은 XR_DELETEFILE_QUEUE에 삽입 후
* 배치 작업으로 해당 파일을 삭제 한다.
* </pre>
* @Method Name : insertDeleteFileQueue
* @param map
* @return int
* @throws Exception
*/
public int insertDeleteFileQueue(HashMap<String, Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : 관리자 환경 설정에 설정된 파일 정보
* 2. 처리내용 : 등록파일제한, 첨부파일갯수제한, 첨부파일 사이즈 제한, 첨부파일 총 사이즈 제한
* </pre>
* @Method Name : configFileInfo
* @param map
* @return
* @throws Exception Map<String,Object>
*/
public Map<String, Object> configFileInfo(HashMap<String, Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : 최근 등록 현황을 개인별 등록
* 2. 처리내용 : 문서, 폴더, 협업에 대한 개인별 최근 등록 현황
* </pre>
* @Method Name : insertRecentlyObject
* @param recentlyVo
* @return
* @throws Exception int
*/
public int insertRecentlyObject(RecentlyObjectVO recentlyVo) throws Exception;
}
<file_sep>/EDMS3/WebContent/js/rgate/rgate.js
/**
* RGATE 관련 스크립트
*/
var exsoftAdminRgateFunc = {
contentType : "",
init : {
// 환경설정 초기 페이지
initPage : function(contentType) {
exsoftAdminRgateFunc.contentType = contentType; // 현 페이지 정보
exsoft.util.common.ddslick('#strIndex', 'rGate_policy', '', 98, function(divId){}); // SELECT BOX 플러그인 적용
},
},
open : {
// 부서,사용자 추가 :: 페이지별로 분기개발한다.
addGroupWrite : function() {
exsoft.util.layout.divLayerOpen('rGate_deptUserPolicy_wrapper','rGate_deptUserPolicy');
// 각 페이지에 맞게 분기개발처리한다. TODO - 문서중앙화 고도화시 구현
},
// 확장자 추가 :: 구현시 삭제처리하며 전사추가 OR 정책수정시 바로 호출한다.
addExtManager : function() {
exsoft.util.layout.divLayerOpen('rGate_extension_wrapper','rGate_extension');
},
// 저장허용프로그램 목록 & 선택
selectProcManager : function() {
exsoft.util.layout.divLayerOpen('rGate_appList_wrapper','rGate_appList');
},
// 프로그램 추가
addProcManager : function() {
exsoft.util.layout.divLayerOpen('rGate_registApplication_wrapper','rGate_registApplication');
},
// 프로그램 추가
selectExceptProcManager : function() {
exsoft.util.layout.divLayerOpen('rGate_application_wrapper','rGate_application');
},
// IP목록관리
selectIPList : function() {
exsoft.util.layout.divLayerOpen('rGate_netList_wrapper','rGate_netList');
},
// IP추가
addIP : function() {
exsoft.util.layout.divLayerOpen('rGate_registIPAddr_wrapper','rGate_registIPAddr');
},
configPasswd : function() {
exsoft.util.layout.divLayerOpen('rGate_setPwd_wrapper','rGate_setPwd');
}
},
layer : {
},
close : {
},
event : {
},
ui : {
},
callback : {
}
}<file_sep>/EDMS3/WebContent/js/workspace/workDocList.js
/**
* workDocList.js
*/
var workDocList = {
/**
* member variables
*/
workType : Constant.WORK_MYDEPT,
folderId : null,
tree : {
mydeptTree : null,
allDeptTree : null,
projectTree : null
},
// 0. 초기화
init : {
isInitialized : false,
//page 초기화
initPage : function(pageSize){
// 트리 초기화
workDocList.treeFunctions.initTree(Constant.WORK_MYDEPT);
// UI 초기화
workDocList.init.initUi(pageSize);
},
// 화면 구성 요소 초기화 (ddslick 등)
initUi : function(pageSize) {
if (!workDocList.init.isInitialized) {
//검색 selectbox
exsoft.util.common.ddslick('#workDoc_select', 'srch_type1', '', 79, function(divId, selectedData){
});
// 목록 출력 갯수
exsoft.util.common.ddslick('#workDocListRowCount', 'srch_type1', '', 68, function(divId, selectedData){
$("#workDocList").setGridParam({page:1, rowNum:selectedData.selectedData.value}).trigger("reloadGrid");
});
/**
* Tab index 클릭 이벤트 초기화
*/
$(".tree_menu_list li").each(function(idx) {
$(this).on("click", workDocList.event.tabClick);
})
//depth navigation
$('.depth_navi > span').mouseover(function(){
var path = $(this).parent().find(".depth_navi_path");
if(!path.is(":visible")) {
path.removeClass('hide');
}
}).mouseout(function(){
var path = $(this).parent().find(".depth_navi_path");
if(path.is(":visible")) {
path.addClass('hide');
}
});
workDocList.grid.pageSize = pageSize;
$("#treeRefresh").on("click", function() {
workDocList.treeFunctions.refresh();
});
// 페이지목록 값 설정
exsoft.util.layout.setSelectBox('workDocListRowCount',workDocList.grid.pageSize);
// 쿠키값에 설정된 화면 상하좌우 분활 자동으로 보이기
exsoft.common.bind.doFunction.layoutViewCookie();
isInitialized = true;
}
}
},
treeContextAction : {
// Tree Context : 폴더 생성 Callback
createFolder : function(node) {
exsoft.util.layout.divLayerOpen(folderWindow.wrapperClass, folderWindow.layerClass);
folderWindow.callback = workDocList.callback.refreshTree;
folderWindow.initForm(node)
},
// Tree Context : 폴더 수정 Callback
modifyFolder : function(node) {
exsoft.util.layout.divLayerOpen(folderWindow.wrapperClass, folderWindow.layerClass);
folderWindow.callback = workDocList.callback.refreshTree;
folderWindow.initForm(node, node.id)
},
// Tree Context : 폴더 이동 Callback
moveFolder : function(node) {
// selectSingleFolderWindow.callback = workDocList.callback.refreshTree;
selectSingleFolderWindow.init(workDocList.callback.moveFolder);
},
// Tree Context : 폴더 삭제 Callback
deleteFolder : function(node) {
selectSingleFolderWindow.callback = workDocList.callback.refreshTree;
var jo = {
folder_id : node.id,
folder_name_ko : node.text,
type : "DELETE"
}
jConfirm("폴더를 삭제 하시겠습니까?", "폴더 삭제", 0, function(r) {
if (r) {
exsoft.util.ajax.ajaxDataFunctionWithCallback(jo, exsoft.contextRoot+"/folder/folderControl.do", "", function(data) {
if (data.result == "true") {
workDocList.treeFunctions.refresh();
} else {
jAlert(data.message);
}
});
}
});
},
// Tree Context : 즐겨찾기 추가 callback
addFavoriteFolder : function(node) {
// 1. 이미 추가된 폴더인지 체크
exsoft.util.ajax.ajaxDataFunctionWithCallback({folder_id : node.id, type : "CHECK_EXISTS"}, exsoft.contextRoot+"/folder/favoriteControl.do", "", function(data, param) {
if (data.result == "true") {
var jsonObject = {
folder_id : node.id,
folder_nm : node.text,
mode : "ADD_FAVORITE",
only_virtual : "Y"
}
// 즐겨찾기 부모 선택창 팝업 (selectFavoriteFolderWindow.jsp 필요함)
selectFavoriteFolderWindow.init(jsonObject, false, function(returnObject) {
returnObject.type = "ADD_TO_FAVORITES";
exsoft.util.ajax.ajaxDataFunctionWithCallback(returnObject, exsoft.contextRoot+"/folder/favoriteControl.do", "", function(data, param) {
if (data.result == "true") {
jAlert("즐겨찾기 폴더 등록 완료");
} else {
jAlert(data.message);
}
});
});
} else {
jAlert("이미 즐겨찾기 폴더로 등록 됐습니다.");
return;
}
});
}
},
functions : {
updateWorkType : function (workType) {
switch (workType) {
case "mydeptTree" :
workDocList.workType = Constant.WORK_MYDEPT;
break;
case "alldeptTree" :
workDocList.workType = Constant.WORK_ALLDEPT;
break;
case "projectTree" :
workDocList.workType = Constant.WORK_PROJECT;
break;
}
}
},
grid : {
// page : 1,
pageSize : 10,
initGrid : function() {
// 초기화 여부
if ($("#workDocList")[0].grid != undefined) {
$('#workDocList').jqGrid('GridUnload');
}
var _postData = {
folder_id : workDocList.folderId,
strIndex : exsoft.util.layout.getSelectBox('workDoc_select','option'),
strKeyword1 : $("#strKeyword1").val(),
}
// Grid 세팅
$('#workDocList').jqGrid({
url: exsoft.contextRoot + '/document/workDocumentList.do',
mtype:"post",
datatype:'json',
jsonReader:{
page:'page',total:'total',root:'list'
},
colNames : ['doc_id','page_cnt','relation_doc','is_locked','doc_name','type_name','creator_name','create_date',
'acl_create','acl_changePermission','acl_checkoutCancel','root_id','doc_type','lock_date','lock_owner','is_inherit_acl','lock_status', 'folder_id','acl_level'],
colModel : [
{name:'doc_id', index:'doc_id', width:1, editable:false, sortable:false, key:true, align:'center', hidden:true},
{name:'page_cnt', index:'page_cnt', width:10, editable:false, sortable:false, resizable:true, align:'center',
formatter : function(cellValue, option, rowObject) {
return cellValue > 0 ? "<li class='icon' id='file_"+rowObject.doc_id+"'><img src='"+ exsoft.contextRoot +"/img/icon/attach.png' class='attach_file'></li>" : "";
}
},
{name:'relation_doc', index:'relation_doc', width:10, editable:false, sortable:false, resizable:true, align:'center',
formatter : function(cellValue, option, rowObject) {
return cellValue > 0 ? "<li class='icon' id='relation _"+rowObject.doc_id+"'><img src='"+ exsoft.contextRoot +"/img/icon/link.png' class='relative_docs'></li>" : "";
}
},
{name:'is_locked', index:'is_locked', width:10, editable:false, sortable:false, resizable:true, align:'center',
formatter : function(cellValue, option) {
return cellValue == 'T' ? "<li class='icon'><img src='"+ exsoft.contextRoot +"/img/icon/lock1.png' alt='' class='doc_lock'></li>" : "";
},
cellattr : function(rowId, cellValue, rowObject) {
var tooltip = '반출자 : '+rowObject.lock_owner+'\n';
tooltip += '반출일시 : '+rowObject.lock_date+'\n';
return rowObject.is_locked == 'T' ? ' title="'+tooltip+'"' : "";
}
},
{name:'doc_name', index:'doc_name', width:150, editable:false, sortable:true, resizable:true, title:true,
formatter : function(cellValue, option, rowObject){
return "<img src='{0}{1}' class='extension'>".format(exsoft.contextRoot, rowObject.page_extension_img) +
"<a href='#' onclick='exsoft.preview.event.getPreview(\"{0}\")'>{1}</a>".format(rowObject.doc_id, cellValue) +
"<a href='#' onclick='workDocList.event.popDocDetail(\"{0}\")'><img src='{1}/img/icon/new_window.png'></a>".format(rowObject.doc_id, exsoft.contextRoot);
},
cellattr : function(rowId, cellValue, rowObject) {
return ' title="'+rowObject.doc_name+'"';
}
},
{name:'type_name', index:'type_name', width:20, editable:false, sortable:true, resizable:true, align:'center'},
{name:'creator_name', index:'creator_name', width:30, editable:false, sortable:true, resizable:true, align:'center'},
{name:'create_date', index:'create_date', width:30, editable:false, sortable:true, resizable:true, align:'center'},
{name:'acl_level', index:'acl_level', width:20, editable:false, sortable:false, resizable:true, align:'center',
formatter : function(cellValue, option) {
return "<li class='previlege'><img src='"+ exsoft.contextRoot +"/img/icon/prev_"+ (cellValue.toLowerCase()).substring(0,1) +".png' class='previlege_grade'><label class='hide'>" + exsoft.util.grid.getAclItemTitle(cellValue) + "</label</li>";
},
cellattr: function (rowId, cellValue, rowObject) {
var tooltip = '소유자 : '+rowObject.owner_name+'\n';
tooltip += '기본권한 : '+ exsoft.util.grid.getAclItemTitle(rowObject.acl_level) + '\n';
tooltip += '반출취소 : '+(rowObject.acl_checkoutCancel == 'T' ? "가능" : "없음")+'\n';
tooltip += '권한변경 : '+(rowObject.acl_changePermission == 'T' ? "가능" : "없음");
return ' title="'+tooltip+'"';
}
},
{name:'acl_create', index:'acl_create', width:1, editable:false, sortable:false, align:'center', hidden:true},
{name:'acl_changePermission', index:'acl_changePermission', width:1, editable:false, sortable:false, align:'center', hidden:true},
{name:'acl_checkoutCancel', index:'acl_checkoutCancel', width:1, editable:false, sortable:false, align:'center', hidden:true},
{name:'root_id', index:'root_id', width:1, editable:false, sortable:false, align:'center', hidden:true},
{name:'doc_type', index:'doc_type', width:1, editable:false, sortable:false, align:'center', hidden:true},
{name:'lock_date', index:'lock_date', width:1, editable:false, sortable:false, align:'center', hidden:true},
{name:'lock_owner', index:'lock_owner', width:1, editable:false, sortable:false, align:'center', hidden:true},
{name:'is_inherit_acl', index:'is_inherit_acl', width:1, editable:false, sortable:false, align:'center', hidden:true},
{name:'lock_status',index:'lock_status',width:1,editable:false,sortable:false,align:'center',hidden:true},
{name:'folder_id',index:'folder_id',width:1, editable:false,sortable:false,align:'center',hidden:true},
],
autowidth:true,viewrecords: true,multikey: "ctrlKey",multiselect:true,sortable: true,shrinkToFit:true,gridview: true,
height:"auto",
sortname : "create_date",
sortorder:"desc",
scrollOffset: 0,
viewsortcols:'vertical',
rowNum : workDocList.grid.pageSize,
emptyDataText: "데이터가 없습니다.",
caption:'문서목록',
postData : _postData,
onCellSelect : function(rowid, iCol, cellcontent, e) {
var setCol = "";
var preview = 'doc_preview';
var file = 'attach_file';
var relation = 'relative_docs';
var lock = 'doc_lock';
if(~cellcontent.indexOf(preview)){
setCol = preview;
} else if(~cellcontent.indexOf(file)) {
setCol = file;
} else if (~cellcontent.indexOf(relation)) {
setCol = relation;
} else if (~cellcontent.indexOf(lock)) {
setCol = lock;
}
if(iCol == 0){
// 체크시 row값을 set한다.(선택시 : rowid셋팅, 해제시 : rowid제거)
$("#workDocList").jqGrid('setSelection',rowid);
} else if(setCol == preview){
} else if(setCol == file && cellcontent != ''){
var row = $("#workDocList").getRowData(rowid);
documentListLayerWindow.open.openAttachWindow(row);
} else if(setCol == relation && cellcontent != ''){
var row = $("#workDocList").getRowData(rowid);
documentListLayerWindow.open.openRelationWindow(row);
} else if(setCol == lock && cellcontent != ''){
}
// if(iCol != 0){
// // 선택된 row '>' 표시
// $("#select_list").remove();
// $("#"+rowid).find('#doc_preview').prepend("<span id='select_list' class='select_list_icon'></span>");
// }
},
loadBeforeSend: function() {
exsoft.util.grid.gridTitleBarHide('workDocList');
exsoft.util.grid.gridNoDataMsgInit('workDocList');
}
,loadComplete: function(data) {
if ($("#workDocList").getGridParam("records")==0) {
exsoft.util.grid.gridNoDataMsg("workDocList","nolayer_data");
}else {
exsoft.util.grid.gridViewRecords('workDocList');
}
exsoft.util.grid.gridInputInit(false);
exsoft.util.grid.gridPager("#workDocPager",data);
$("tr.jqgrow", this).contextMenu('documentListLayer_context_menu', {
bindings: {
// 수정
'documentListLayer_update' : function(trigger) {
var row = $("#workDocList").getRowData(trigger.id);
var aclLevel = exsoft.util.common.getAclItemTitleEn(exsoft.util.common.stripHtml(row.acl_level));
if(exsoft.util.common.getAclLevel(aclLevel) < exsoft.util.common.getAclLevel("UPDATE")) {
jAlert("문서 수정 권한이 없습니다.", "수정", 0);
return false;
}
// documentUpdate(trigger.id, fRefreshDocumentList);
jAlert("문서 수정 연동 해야함.");
},
// 삭제
'documentListLayer_delete': function(trigger) {
var row = $("#workDocList").getRowData(trigger.id);
var aclLevel = exsoft.util.common.getAclItemTitleEn(exsoft.util.common.stripHtml(row.acl_level));
var jsonArr = [{
doc_id : row.doc_id
, root_id : row.root_id
, is_locked : row.lock_status
, doc_type : row.doc_type
}];
if(exsoft.util.common.getAclLevel(aclLevel) < exsoft.util.common.getAclLevel("DELETE")) {
jAlert("문서 삭제 권한이 없습니다.", "삭제", 0);
return false;
}
documentListLayerWindow.gObjectID = "workDocList";
documentListLayerWindow.event.documentDeleteSend(jsonArr, "ONLY");
},
// 이동
'documentListLayer_move': function(trigger) {
var row = $("#workDocList").getRowData(trigger.id);
var aclLevel = exsoft.util.common.getAclItemTitleEn(exsoft.util.common.stripHtml(row.acl_level));
var jsonArr = [{
doc_id : row.doc_id
, doc_name : exsoft.util.common.stripHtml(row.doc_name)
, is_locked : row.lock_status
, root_id : row.root_id
, doc_type : row.doc_type
, is_inherit_acl : row.is_inherit_acl
, folder_id : workDocList.folderId
}];
if(exsoft.util.common.getAclLevel(aclLevel) < exsoft.util.common.getAclLevel("UPDATE")) {
jAlert("문서 이동 권한이 없습니다.", "이동", 0);
return false;
}
documentListLayerWindow.gObjectID = "workDocList";
documentListLayerWindow.gWorkType = null;
documentListLayerWindow.event.documentMove("ONLY", jsonArr);
},
// 복사
'documentListLayer_copy': function(trigger) {
var row = $("#workDocList").getRowData(trigger.id);
var aclLevel = exsoft.util.common.getAclItemTitleEn(exsoft.util.common.stripHtml(row.acl_level));
var jsonArr = [{
doc_id : row.doc_id
, doc_name : exsoft.util.common.stripHtml(row.doc_name)
, is_locked : row.lock_status
, root_id : row.root_id
, doc_type : row.doc_type
, is_inherit_acl : row.is_inherit_acl
, folder_id : workDocList.folderId
}];
if(exsoft.util.common.getAclLevel(aclLevel) < exsoft.util.common.getAclLevel("UPDATE")) {
jAlert("문서 복사 권한이 없습니다.", "복사", 0);
return false;
}
documentListLayerWindow.gObjectID = "workDocList";
documentListLayerWindow.gWorkType = null;
documentListLayerWindow.event.documentCopy("ONLY", jsonArr);
} ,
// 즐겨찾기 추가
'documentListLayer_favorite_add' : function(trigger) {
var row = $('#workDocList').getRowData(trigger.id);
var jsonArr = [{
doc_id : row.doc_id
,root_id : row.root_id
}];
documentListLayerWindow.event.documentAddFavoriteSend(jsonArr);
},
// 작업카트 추가
'documentListLayer_work_add': function(trigger) {
var row = $("#workDocList").getRowData(trigger.id);
var jsonArr = [{
doc_id : row.doc_id
, root_id : row.root_id
, is_locked : row.lock_status
}];
documentListLayerWindow.event.documentTempworkSend(jsonArr);
} ,
// 체크아웃 취소
'documentListLayer_checkout_cancel':function(trigger) {
var row = $('#workDocList').getRowData(trigger.id);
var jsonArr = [{
doc_id : row.doc_id
, root_id : row.root_id
, is_locked : row.lock_status
, doc_type : row.doc_type
}];
documentListLayerWindow.gObjectID = "workDocList";
documentListLayerWindow.event.documentCancelCheckoutSend(jsonArr, "ONLY");
},
},
onContextMenu: function(event) {
var row = $('#workDocList').getRowData(event.currentTarget.id);
$("#documentListLayer_update").removeClass('hide');
$("#documentListLayer_delete").removeClass('hide');
$("#documentListLayer_move").removeClass('hide');
$("#documentListLayer_copy").removeClass('hide');
$("#documentListLayer_favorite_add").removeClass('hide');
$("#documentListLayer_work_add").removeClass('hide');
if (row.lock_status == "T")
$("#documentListLayer_checkout_cancel").removeClass('hide');
else
$("#documentListLayer_checkout_cancel").addClass("hide");
return true;
}
});
}
,loadError:function(xhr, status, error) {
exsoft.util.error.isErrorChk(xhr);
}
});
// 컬럼 헤더 정렬 및 다국어 변경 처리
var headerData = '{"doc_id":"doc_id","page_cnt":"<img src=\'{0}/img/icon/attach.png\' class=\'attach_file\'>","relation_doc":"<img src=\'{0}/img/icon/link.png\' class=\'relative_docs\'>","is_locked":"<img src=\'{0}/img/icon/lock.png\' class=\'doc_lock\'>","doc_name":"제목","type_name":"문서유형","creator_name":"등록자","create_date":"등록일","acl_level":"권한"}'.format(exsoft.contextRoot);
exsoft.util.grid.gridColumHeader('workDocList',headerData,'center');
headerData = null;
},
refresh : function(page) {
$("#workDocList").setGridParam({page:page}).trigger("reloadGrid");
}
},
treeFunctions : {
initTree : function(workType) {
var treeOption = {
context : exsoft.contextRoot,
contextAction : workDocList.treeContextAction,
url : "/folder/folderList.do",
};
switch(workType) {
case Constant.WORK_MYDEPT : // 부서 문서함
if (workDocList.tree.mydeptTree === null) {
treeOption.divId = "#mydeptTree";
treeOption.mapId = Constant.MAP_MYDEPT;
treeOption.workType = Constant.WORK_MYDEPT;
workDocList.tree.mydeptTree = new XFTree(treeOption);
workDocList.tree.mydeptTree.template_context(); // 우클릭 메뉴
workDocList.tree.mydeptTree.callbackSelectNode = workDocList.callback.selectTreeNode;
workDocList.tree.mydeptTree.init(); //부서 rootId는 서버에서 처리
} else {
// 해당 폴더 문서목록 새로고침
workDocList.folderId = workDocList.tree.mydeptTree.getCurrentNodeId();
// $('#folder_title').html(workDocList.mydeptTree.getCurrentNodeName());
// fDocumentListByfolderId();
}
break;
case Constant.WORK_ALLDEPT : // 전사 문서함
if (workDocList.tree.allDeptTree === null) {
treeOption.divId = "#alldeptTree";
treeOption.mapId = Constant.MAP_MYDEPT;
treeOption.workType = Constant.WORK_ALLDEPT;
workDocList.tree.allDeptTree = new XFTree(treeOption);
workDocList.tree.allDeptTree.template_context(); // 우클릭 메뉴
workDocList.tree.allDeptTree.callbackSelectNode = workDocList.callback.selectTreeNode;
workDocList.tree.allDeptTree.init();
} else {
// 해당 폴더 문서목록 새로고침
workDocList.folderId = workDocList.tree.allDeptTree.getCurrentNodeId();
// $('#folder_title').html(gAlldeptFolderTree.getCurrentNodeName());
// fDocumentListByfolderId();
}
break;
case Constant.WORK_PROJECT : // 프로젝트 함
if (workDocList.tree.projectTree === null) {
treeOption.divId = "#projectTree";
treeOption.mapId = Constant.MAP_PROJECT;
treeOption.workType = Constant.WORK_PROJECT;
workDocList.tree.projectTree = new XFTree(treeOption);
workDocList.tree.projectTree.template_context(); // 우클릭 메뉴
workDocList.tree.projectTree.callbackSelectNode = workDocList.callback.selectTreeNode;
workDocList.tree.projectTree.init();
} else {
// 해당 폴더 문서목록 새로고침
workDocList.folderId = workDocList.tree.projectTree.getCurrentNodeId();
// $('#folder_title').html(gProjectFolderTree.getCurrentNodeName());
// fDocumentListByfolderId();
}
break;
default :
console.error("[workDocList] workType : {0} 이 올바르지 않습니다. ".format(workType));
break;
}
},
getCurrentTree : function() {
switch(workDocList.workType) {
case Constant.WORK_MYDEPT :
return workDocList.tree.mydeptTree;
case Constant.WORK_ALLDEPT :
return workDocList.tree.allDeptTree;
case Constant.WORK_PROJECT :
return workDocList.tree.projectTree;
default :
console.error("[workDocList] workType : {0} 이 올바르지 않습니다. ".format(workType));
}
},
refresh : function() {
workDocList.treeFunctions.getCurrentTree().refresh();
}
},
ui : {
switchTab : function(obj) {
var siblings = $(obj).parent().children();
siblings.each(function(idx) {
if (obj === this) {
$(this).children().addClass("focus");
// 현재 선택된 Tab의 WorkType을 갱신
workDocList.functions.updateWorkType($(this).children().data("id"));
} else {
$(this).children().removeClass("focus");
}
})
},
switchTreeDiv : function() {
$("[data-group=TREE_DIV]").each(function(idx) {
if ($(this).attr("id") == workDocList.ui.getCurrentTabId()) {
$(this).removeClass("hide");
} else {
$(this).addClass("hide");
}
})
},
getCurrentTabId : function() {
var retID = "";
$(".tree_menu_list li").each(function(idx) {
if ($(this).children().hasClass("focus")) {
retID = $(this).children().data("id");
return false;
}
});
return retID;
},
setNavigationText : function(nodeTitle, path) {
$("#nav_title").text(nodeTitle);
$("#nav_fullpath").text(path);
}
},
event : {
// 탭 변경
tabClick : function() {
// UI 변경
workDocList.ui.switchTab(this);
// 선택한 WorkType의 Tree를 Show
workDocList.ui.switchTreeDiv();
// 선택한 WorkType의 Tree를 초기화 (초기화가 이미 됐을경우 Refresh)
workDocList.treeFunctions.initTree(workDocList.workType);
},
// 검색
searchDocument : function() {
var _post = {
strIndex:exsoft.util.common.getDdslick("#workDoc_select"),
strKeyword1:$("#strKeyword1").val(),
folder_id:workDocList.folderId,
is_search:'true'
};
exsoft.util.grid.gridPostDataInitRefresh("workDocList", exsoft.contextRoot + "/document/workDocumentList.do", _post);
},
popDocDetail : function(docId) {
exsoft.document.layer.docCommonFrm('doc_detail_wrapper', 'doc_detail', docId);
}
},
callback : {
// registFolderWindow.js 에서 등록/수정 후 호출하는 callback
refreshTree : function (e, data) {
workDocList.treeFunctions.refresh();
},
moveFolder : function (parentFolder) {
var _tree = workDocList.treeFunctions.getCurrentTree();
// 1. 이동 대상 폴더가 현재 폴더와 동일한 위치인지
if (parentFolder.original.parentId == _tree.getCurrentNodeParentId()) {
jAlert("동일한 위치로 이동할 수 없습니다.");
return;
}
for (var i = 0; i < parentFolder.parentIdList.length; i++) {
if (parentFolder.parentIdList[i] == _tree.getCurrentNodeId() || parentFolder.id == _tree.getCurrentNodeParentId()) {
jAlert("현재 폴더 및 현재 폴더 하위로 이동할 수 없습니다.");
return;
}
}
var targetRootFolder = parentFolder.mapId == "PROJECT" && parentFolder.parentGroup == null ? parentFolder.id : parentFolder.parentGroup.id;
var changeRootFolder = _tree.getFolderGroupId(_tree.selectedNode[0]).id != targetRootFolder ? "T" : "F";
var jsonObject = {
type : "MOVE",
folder_id : _tree.getCurrentNodeId(),
folder_name_ko : _tree.getCurrentNodeName(),
parent_id : parentFolder.id,
map_id : parentFolder.mapId,
parentGroup_id : targetRootFolder,
root_folder_change : changeRootFolder
};
exsoft.util.ajax.ajaxDataFunctionWithCallback(jsonObject, exsoft.contextRoot+"/folder/folderControl.do", "", function(data, param) {
if (data.result == "true") {
workDocList.treeFunctions.refresh();
}
});
},
selectTreeNode : function (e, data) {
workDocList.folderId = data.node.id;
workDocList.grid.page = 1;
documentListLayerWindow.gCurrentFolderId = data.node.id;
docDetailSearch.functions.changeFolderId(data.node.id);
// 상세검색 postdata세팅
docDetailSearch.folder_id = workDocList.folderId;
docDetailSearch.url = "/document/workDocumentList.do";
// 1. 목록 조회
workDocList.grid.initGrid();
// 2. Navigation 변경
workDocList.ui.setNavigationText(data.node.text, data.node.full_path.join(" < "));
},
}
}
<file_sep>/EDMS3/src/kr/co/exsoft/eframework/handler/DefaultTraceHandler.java
package kr.co.exsoft.eframework.handler;
/***
* Handler 구현 클래스
* @author 패키지 개발팀
* @since 2014.07.15
* @version 3.0
*
*/
public class DefaultTraceHandler implements ExsoftTraceHandler{
public void todo(Class<?> clazz, String message) {
System.out.println("[DefaultTraceHandler] run...............");
}
}
<file_sep>/EDMS3/src/kr/co/exsoft/folder/controller/FolderAdminController.java
package kr.co.exsoft.folder.controller;
import java.util.Map;
import java.util.HashMap;
import java.util.Locale;
import org.springmodules.validation.commons.DefaultBeanValidator;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.MessageSource;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.SessionAttributes;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import kr.co.exsoft.common.service.CommonService;
import kr.co.exsoft.common.vo.SessionVO;
import kr.co.exsoft.document.service.TypeService;
import kr.co.exsoft.eframework.configuration.Constant;
import kr.co.exsoft.eframework.exception.BizException;
import kr.co.exsoft.eframework.library.LocaleLibrary;
import kr.co.exsoft.eframework.util.CommonUtil;
import kr.co.exsoft.eframework.util.ConfigData;
import kr.co.exsoft.folder.service.FolderService;
import kr.co.exsoft.permission.service.AclService;
/**
* Folder Admin Controller
* @author <NAME>
* @since 2014.07.17
* @version 3.0
*
*/
@Controller
@SessionAttributes("sessionVO")
@RequestMapping("/admin")
public class FolderAdminController {
@Autowired
private CommonService commonService;
@Autowired
private FolderService folderService;
@Autowired
private AclService aclService;
@Autowired
private TypeService typeService;
@Autowired
private MessageSource messageSource;
@Autowired
private DefaultBeanValidator beanValidator;
protected static final Log logger = LogFactory.getLog(FolderAuthController.class);
/**
*
* <pre>
* 1. 개용 : 폴더관리
* 2. 처리내용 :
* </pre>
* @Method Name : folderAdminMainPage
* @param sessionVO
* @param model
* @param map
* @return String
*/
@RequestMapping("folderManager.do")
public String folderAdminMainPage(@ModelAttribute SessionVO sessionVO,Model model,@RequestParam HashMap<String,Object> map) {
@SuppressWarnings("unused")
Locale locale = LocaleLibrary.setLocale(sessionVO.getSessLanguage() != null ? sessionVO.getSessLanguage() : ConfigData.getString("LANGUAGE"));
Map<String, Object> menuInfo = new HashMap<String, Object>();
Map<String, Object> partInfo = new HashMap<String, Object>();
try {
// 관리자 ROLE 접근권한 및 페이지 네비게이션 :: 상위메뉴명 / 현재메뉴명
commonService.setPageToModel(map,menuInfo,partInfo,sessionVO);
}catch(BizException e){
logger.error(e.getMessage());
CommonUtil.setErrorMsg(model, Constant.ERROR_403, e.getMessage(),sessionVO.getSessContextRoot());
return "error/message";
}catch(Exception e) {
logger.error(e.getMessage());
CommonUtil.setErrorMsg(model, Constant.ERROR_505, e.getMessage(),sessionVO.getSessContextRoot());
return "error/message";
}
CommonUtil.setSessionToModel(model, sessionVO); // call by reference
model.addAttribute("part",partInfo.get("part").toString());
model.addAttribute("menuInfo",menuInfo);
model.addAttribute("topSelect",Constant.TOPMENU_DOCUMENT);
model.addAttribute("subSelect",Constant.DOCUMENT_FOLDERMANAGER);
return "docadmin/folderManager";
}
}
<file_sep>/EDMS3/src/kr/co/exsoft/user/dao/UserDao.java
package kr.co.exsoft.user.dao;
import java.util.List;
import java.util.HashMap;
import org.springframework.stereotype.Repository;
import org.apache.commons.collections.map.CaseInsensitiveMap;
import kr.co.exsoft.user.vo.QuickMenuVO;
import kr.co.exsoft.user.vo.UserVO;
import kr.co.exsoft.user.vo.ConnectLogVO;
import kr.co.exsoft.user.vo.LoginLogVO;
/**
* User 매퍼클래스
* @author 패키지 개발팀
* @since 2014.07.17
* @version 3.0
*
*/
@Repository(value = "userDao")
public interface UserDao {
/**
* 사용자 정보 조회 Sample
* @param map - user_id
* @return UserVO
*/
public UserVO userDetail(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개요 : 사용자 상세 정보 및 그룹 정보 조회
* 2. 처리내용 :
* </pre>
* @Method Name : userGroupDetail
* @param map
* @return
*/
public UserVO userGroupDetail(HashMap<String, Object> map);
/**
*
* <pre>
* 1. 개요 : 사용자 상세 정보 등록처리
* 2. 처리내용 :
* </pre>
* @Method Name : userDetailWrite
* @param userVO
* @return int
*/
public int userDetailWrite(UserVO userVO);
/**
*
* <pre>
* 1. 개요 : 사용자 기본 정보 등록처리
* 2. 처리내용 :
* </pre>
* @Method Name : userDefaultWrite
* @param userVO
* @return int
*/
public int userDefaultWrite(UserVO userVO);
/**
*
* <pre>
* 1. 개요 : 사용자 히스토리 등록
* 2. 처리내용 :
* </pre>
* @Method Name : userHistoryWrite
* @param userVO
* @return
*/
public int userHistoryWrite(UserVO userVO);
/**
*
* <pre>
* 1. 개요 : 사용자 접속 로그 등록 처리.
* 2. 처리내용 :
* </pre>
* @Method Name : connectLogWrite
* @param connectLogVO
* @return int
*/
public int connectLogWrite(ConnectLogVO connectLogVO);
/**
*
* <pre>
* 1. 개요 : 사옹자 로그인 세션 정보 등록 처리.
* 2. 처리내용 :
* </pre>
* @Method Name : loginLogWrite
* @param loginLogVO
* @return int
*/
public int loginLogWrite(LoginLogVO loginLogVO);
/**
*
* <pre>
* 1. 개요 : 사용자 접속로그 카운트
* 2. 처리내용 :
* </pre>
* @Method Name : connectLogCnt
* @param map
* @return int
*/
public int connectLogCnt(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개요 : 사용자 접속로그 리스트
* 2. 처리내용 :
* </pre>
* @Method Name : connectLogList
* @param map
* @return List
*/
public List<ConnectLogVO> connectLogList(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개요 : 사용자 로그인 세션 정보 조회
* 2. 처리내용 :
* </pre>
* @Method Name : loginLogDetail
* @param map
* @return LoginLogVO
*/
public LoginLogVO loginLogDetail(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개요 : 사용자 로그인 세션 정보 삭제
* 2. 처리내용 :
* </pre>
* @Method Name : loginLogDelete
* @param map
* @return int
*/
public int loginLogDelete(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개요 : 사용자 리스트 카운트
* 2. 처리내용 :
* </pre>
* @Method Name : userExists
* @param map
* @return int
*/
public int userExists(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개요 : 사용자 리스트
* 2. 처리내용 :
* </pre>
* @Method Name : userList
* @param map
* @return List
*/
public List<UserVO> userList(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개요 : 사용자 로그인 처리
* 2. 처리내용 :
* </pre>
* @Method Name : userLogin
* @param userVO
* @return UserVO
*/
public UserVO userLogin(UserVO userVO);
/**
*
* <pre>
* 1. 개요 : Named 사용자 수를 얻는다.
* 2. 처리내용 :
* </pre>
* @Method Name : namedUserCount
* @return int
*/
public int namedUserCount();
/**
*
* <pre>
* 1. 개용 : 부서내 사용자 리스트
* 2. 처리내용 :
* </pre>
* @Method Name : groupUserList
* @param map
* @return List<UserVO>
*/
public List<UserVO> groupUserList(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 사용자/부서 공통 검색 리스트
* 2. 처리내용 :
* </pre>
* @Method Name : userGroupSearch
* @param map
* @return List<CaseInsensitiveMap>
*/
public List<CaseInsensitiveMap> userGroupSearch(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 사용자 리스트 검색
* 2. 처리내용 :
* </pre>
* @Method Name : searchUserList
* @param map
* @return List<UserVO>
*/
public List<UserVO> searchUserList(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 내용 : 사용자 수정
* 2. 처리내용 :
* </pre>
* @Method Name : userUpdate
* @param userVO
* @return
*/
public int userUpdate(UserVO userVO);
/**
*
* <pre>
* 1. 개용 : 사용자 상세정보 조회
* 2. 처리내용 :
* </pre>
* @Method Name : userDetailInfo
* @param map
* @return UserVO
*/
public UserVO userDetailInfo(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 내용 : 사용자 상세내용 수정
* 2. 처리내용 :
* </pre>
* @Method Name : userDetailUpdate
* @param userVO
* @return
*/
public int userDetailUpdate(UserVO userVO);
/**
* <pre>
* 1. 내용 : 사용자 삭제
* 2. 처리내용 :
* </pre>
* @Method Name : userDelete
* @param map
* @return
*/
public int userDelete(HashMap<String,Object> map);
/**
* <pre>
* 1. 내용 : 사용자 상세내용 삭제
* 2. 처리내용 :
* </pre>
* @Method Name : userDetailDelete
* @param map
* @return
*/
public int userDetailDelete(HashMap<String,Object> map);
/**
* <pre>
* 1. 개용 : 사용자 환경설정 삭제
* 2. 처리내용 :
* </pre>
* @Method Name : userConfigDelete
* @param map
* @return int
*/
public int userConfigDelete(HashMap<String,Object> map);
/**
* <pre>
* 1. 내용 : 사용자 관리부서 변경
* 2. 처리내용 :
* </pre>
* @Method Name : updateManageGroup
* @param map
* @return
*/
public int updateManageGroup(HashMap<String, Object> map);
/**
*
* <pre>
* 1. 개용 : 사용자 환경설정 및 비밀번호 변경처리 -
* 2. 처리내용 :
* </pre>
* @Method Name : userConfig
* @param map
* @return int
*/
public int userConfig(HashMap<String, Object> map);
/**
* <pre>
* 1. 개용 : 사용자 스토리지 할당량 변경
* 2. 처리내용 :
* </pre>
* @Method Name : updateUserStorageQuota
* @param userVO
* @return
*/
public int updateUserStorageQuota(UserVO userVO);
/**
*
* <pre>
* 1. 개용 : 퀵메뉴 정보 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : quickMenuInfo
* @param map
* @return QuickMenuVO
*/
public List<QuickMenuVO> quickMenuInfo(HashMap<String, Object> map);
/**
*
* <pre>
* 1. 개용 : TOP 메뉴 정보 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : topQuickMenu
* @param map
* @return List<QuickMenuVO>
*/
public List<QuickMenuVO> topQuickMenu(HashMap<String, Object> map);
/**
*
* <pre>
* 1. 개용 : 사용자별 퀵메뉴 정보 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : userQuickMenu
* @param map
* @return List<HashMap<String,Object>>
*/
public List<CaseInsensitiveMap> userQuickMenu(HashMap<String, Object> map);
/**
*
* <pre>
* 1. 개용 : 퀵메뉴 삭제처리
* 2. 처리내용 :
* </pre>
* @Method Name : quickMenuDelete
* @param map
* @return int
*/
public int quickMenuDelete(HashMap<String, Object> map);
/**
*
* <pre>
* 1. 개용 : 사용자별 퀵메뉴 설정하기
* 2. 처리내용 :
* </pre>
* @Method Name : quickMenuWrite
* @param map
* @return int
*/
public int quickMenuWrite(HashMap<String, Object> map);
/**
* <pre>
* 1. 개용 : 유저 config (사용자 개인 환경설정) 정보 등록
* 2. 처리내용 :
* </pre>
* @Method Name : userConfigWrite
* @param userVO
* @return int
*/
public int userConfigWrite(UserVO userVO);
}
<file_sep>/EDMS3/src/kr/co/exsoft/permission/service/AclService.java
package kr.co.exsoft.permission.service;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import kr.co.exsoft.common.vo.SessionVO;
import kr.co.exsoft.permission.vo.AclExItemVO;
import kr.co.exsoft.permission.vo.AclItemListVO;
import kr.co.exsoft.permission.vo.AclItemVO;
import kr.co.exsoft.permission.vo.AclVO;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
/**
* Acl 서비스 인터페이스
* @author 패키지 개발팀
* @since 2014.07.17
* @version 3.0
*
*/
@Transactional
public interface AclService {
/**
*
* <pre>
* 1. 개용 : ACL List 가져오기
* 2. 처리내용 : 관리자 화면, 사용자 화면에 따른 조건 분리는 map에 담는다.
* </pre>
* @Method Name : aclList
* @param map
* @return
* @throws Exception List<AclVO>
*/
public Map<String, Object> aclList(HashMap<String,Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : ACL 정보 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : aclDetail
* @param map
* @return
* @throws Exception AclVO
*/
public Map<String, Object> aclDetail(HashMap<String,Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : ACL 상세 정보에서 접근자 목록
* 2. 처리내용 :
* </pre>
* @Method Name : aclItemList
* @param List<AclItemListVO>
* @return
* @throws Exception Map<String,Object>
*/
public List<AclItemListVO> aclItemList(HashMap<String,Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : 문서 추가 접근자 목록
* 2. 처리내용 :
* </pre>
* @Method Name : aclItemList
* @param List<AclExItemVO>
* @return
* @throws Exception Map<String,Object>
*/
public List<AclExItemVO> exAclItemList(HashMap<String,Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : 권한 정보 수정
* 2. 처리내용 :
* </pre>
* @Method Name : aclUpdate
* @param map
* @param aclVO
* @param aclItemList
* @param sessionVO
* @return
* @throws Exception Map<String,Object>
*/
@Transactional(propagation = Propagation.REQUIRED,rollbackFor ={ Exception.class,SQLException.class})
public Map<String, Object> aclUpdate(HashMap<String,Object> map,AclVO aclVO,List<AclItemVO> aclItemList,SessionVO sessionVO) throws Exception;
/**
*
* <pre>
* 1. 개용 : ACL 신규 등록
* 2. 처리내용 :
* </pre>
* @Method Name : aclWrite
* @param map
* @param aclVO
* @param aclItemList
* @param sessionVO
* @return
* @throws Exception Map<String,Object>
*/
@Transactional(propagation = Propagation.REQUIRED,rollbackFor ={ Exception.class,SQLException.class})
public Map<String, Object> aclWrite(HashMap<String,Object> map,AclVO aclVO,List<AclItemVO> aclItemList,SessionVO sessionVO) throws Exception;
/**
*
* <pre>
* 1. 개용 : 권한 삭제처리 Valid
* 2. 처리내용 :
* </pre>
* @Method Name : aclDeleteValid
* @param map
* @return
* @throws Exception List<String>
*/
public List<String> aclDeleteValid(HashMap<String,Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : ACL 삭제
* 2. 처리내용 : 등록된 문서(휴지통) 또는 폴더가 있을 경우 삭제 금지
* ACL 속성 값, ACLItem List 객체
* </pre>
* @Method Name : aclDelete
* @param map
* @param delList
* @param sessionVO
* @return
* @throws Exception Map<String,Object>
*/
@Transactional(propagation = Propagation.REQUIRED,rollbackFor ={ Exception.class,SQLException.class})
public Map<String, Object> aclDelete(HashMap<String,Object> map,List<String> delList,SessionVO sessionVO) throws Exception;
/**
*
* <pre>
* 1. 개용 : ACLITEM 정보 가져오기 - 문서등록/수정시
* 2. 처리내용 :
* </pre>
* @Method Name : aclItem
* @param map
* @return
* @throws Exception Map<String,Object>
*/
public Map<String, Object> aclItem(HashMap<String,Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : 문서 또늘 폴더 권한변경 작업 시 상위권한 정보를 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : aclInheritDetail
* @param map
* @return
* @throws Exception Map<String,Object>
*/
public Map<String, Object> aclInheritDetail(HashMap<String,Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : 문서 추가 접근자 등록
* 2. 처리내용 :
* </pre>
* @Method Name : aclExItemWrite
* @param map
* @param aclExItemVO
* @param sessionVO
* @return
* @throws Exception Map<String,Object>
*/
@Transactional(propagation = Propagation.REQUIRED,rollbackFor ={ Exception.class,SQLException.class})
public Map<String, Object> aclExItemWrite(HashMap<String,Object> map, AclExItemVO aclExItemVO, SessionVO sessionVO) throws Exception;
/**
*
* <pre>
* 1. 개용 : 문서 추가 접근자 삭제
* 2. 처리내용 : 문서ID 기준으로 추가 접근자 삭제
* </pre>
* @Method Name : aclExItemDelete
* @param map
* @param delDoc_id
* @param sessionVO
* @return
* @throws Exception Map<String,Object>
*/
@Transactional(propagation = Propagation.REQUIRED,rollbackFor ={ Exception.class,SQLException.class})
public Map<String, Object> aclExItemDelete(HashMap<String,Object> map, String delDoc_id, SessionVO sessionVO) throws Exception;
}
<file_sep>/EDMS3/WebContent/js/popup/relDocWindow.js
var gMultiFolder_list = new Array(); // 현재 선택된 폴더 목록
var gMultiFolder_isValidation = false; // 유효성 검사 여부
var gMultiFolder_currentTreeObject; // 현재 선택된 XFTree Object
var gMultiFolder_currentTreeDivID; // 현재 선택된 XFTree Div ID
var gMultiFolder_map_id = "MYDEPT"; // 현재 선택된 맵
var selectrelativeDocWindow = {
currentMapId : "", // 현재 선택된 맵
currentWorkType : "", // 현재 선택된 부서함
currentFolderID : "", // 현재 선택된 폴더 ID
isValidation : false, // 유효성 검사 여부
docType : "", // 문서 등록/이동 등등 연계 사용시 해당 문서의 Type
callback : null, // 확인버튼 클릭시 결과를 반환할 함수
treeDiv : {
mypage : "#relativeAddMypageTree",
mydept : "#relativeAddMydeptTree",
alldept : "#relativeAddAlldeptTree",
project : "#relativeAddProjectTree"
},
open : function() {
exsoft.util.layout.divLayerOpen("doc_relativeAdd_wrapper", "doc_relativeAdd");
},
close : function() {
exsoft.util.layout.divLayerClose("doc_relativeAdd_wrapper", "doc_relativeAdd");
},
tree : {
mypageTree : null, // 개인 문서함
mydeptTree : null, // 부서 문서함
allDeptTree : null, // 전사 문서함
projectTree : null // 프로젝트 함
},
treeFunctions : {
initTree : function(workType) {
var treeOption = {
context : exsoft.contextRoot,
url : "/folder/folderList.do",
};
switch(workType) {
case Constant.WORK_MYDEPT : // 부서 문서함
treeOption.divId = selectrelativeDocWindow.treeDiv.mydept;; //"#multiFolderMydeptTree";
treeOption.mapId = Constant.MAP_MYDEPT;
treeOption.workType = Constant.WORK_MYDEPT;
selectrelativeDocWindow.ui.activeTreeDiv("mydept");
selectrelativeDocWindow.currentMapId = Constant.MAP_MYDEPT;
selectrelativeDocWindow.currentWorkType = Constant.WORK_MYDEPT;
if (selectrelativeDocWindow.tree.mydeptTree == null) {
selectrelativeDocWindow.tree.mydeptTree = new XFTree(treeOption);
selectrelativeDocWindow.tree.mydeptTree.callbackSelectNode = selectrelativeDocWindow.event.relDoc_selectNode_callback;
selectrelativeDocWindow.tree.mydeptTree.init();
} else {
selectrelativeDocWindow.tree.mydeptTree.refresh();
}
break;
case Constant.WORK_ALLDEPT : // 전사 문서함
treeOption.divId = selectrelativeDocWindow.treeDiv.alldept;// "#multiFolderAlldeptTree";
treeOption.mapId = Constant.MAP_MYDEPT;
treeOption.workType = Constant.WORK_ALLDEPT;
selectrelativeDocWindow.ui.activeTreeDiv("alldept");
selectrelativeDocWindow.currentMapId = Constant.MAP_MYDEPT;
selectrelativeDocWindow.currentWorkType = Constant.WORK_ALLDEPT;
if (selectrelativeDocWindow.tree.allDeptTree == null) {
selectrelativeDocWindow.tree.allDeptTree = new XFTree(treeOption);
selectrelativeDocWindow.tree.allDeptTree.callbackSelectNode = selectrelativeDocWindow.event.relDoc_selectNode_callback;
selectrelativeDocWindow.tree.allDeptTree.init();
} else {
selectrelativeDocWindow.tree.allDeptTree.refresh();
}
break;
case Constant.WORK_PROJECT : // 프로젝트 함
treeOption.divId = selectrelativeDocWindow.treeDiv.project; //"#multiFolderProjectTree";
treeOption.mapId = Constant.MAP_PROJECT;
treeOption.workType = Constant.WORK_PROJECT;
selectrelativeDocWindow.ui.activeTreeDiv("project");
selectrelativeDocWindow.currentMapId = Constant.MAP_PROJECT;
selectrelativeDocWindow.currentWorkType = Constant.WORK_PROJECT;
if (selectrelativeDocWindow.tree.projectTree == null) {
selectrelativeDocWindow.tree.projectTree = new XFTree(treeOption);
selectrelativeDocWindow.tree.projectTree.callbackSelectNode = selectrelativeDocWindow.event.relDoc_selectNode_callback;
selectrelativeDocWindow.tree.projectTree.init();
} else {
selectrelativeDocWindow.tree.projectTree.refresh();
}
break;
default :
console.error("[selectrelativeDocWindow] workType : {0} 이 올바르지 않습니다. ".format(workType));
break;
}
},
getCurrentTree : function() {
switch(selectrelativeDocWindow.currentWorkType) {
case Constant.WORK_MYDEPT :
return selectrelativeDocWindow.tree.mydeptTree;
case Constant.WORK_ALLDEPT :
return selectrelativeDocWindow.tree.allDeptTree;
case Constant.WORK_PROJECT :
return selectrelativeDocWindow.tree.projectTree;
default :
console.error("[multiSingleFolderWindow] workType : {0} 이 올바르지 않습니다. ".format(selectrelativeDocWindow.currentWorkType));
}
}
},
ui : {
activeTreeDiv : function(activeDivId) {
// 1. Tree Div
var keys = Object.keys(selectrelativeDocWindow.treeDiv);
$(keys).each(function(idx) {
if (this == activeDivId) {
$(selectrelativeDocWindow.treeDiv[this]).removeClass("hide");
} else {
$(selectrelativeDocWindow.treeDiv[this]).addClass("hide");
}
});
var _title = $("#lb_multiFolderWorkspace");
var _selectOption = $("#doc_multifolder_list");
// 2. Titles
if (activeDivId == "mypage") {
_title.text("개인함");
_selectOption.hide();
} else {
_title.text("문서함");
_selectOption.show();
}
},
activeTitle : function(activeDivId) {
}
},
init : function(callback, mapId, workType, isValidation, docType) {
selectrelativeDocWindow.open();
selectrelativeDocWindow.callback = callback;
selectrelativeDocWindow.currentMapId = mapId == undefined ? Constant.MAP_MYDEPT : mapId;
selectrelativeDocWindow.currentWorkType = (workType == undefined || workType == null || workType == "null") ? Constant.WORK_MYDEPT : workType;
selectrelativeDocWindow.isValidation = isValidation == undefined ? false : isValidation;
selectrelativeDocWindow.docType = docType == undefined ? "" : docType;
selectrelativeDocWindow.treeFunctions.initTree(selectrelativeDocWindow.currentWorkType);
/*//검색 selectbox
exsoft.util.common.ddslick('#doc_folder_list', 'doc_folder_list', '', 262, function(divId,selectedData){
// 콤보박스 이벤트
selectSingleFolderWindow.event.changeMap(selectedData.selectedData.value);
});*/
},
event : {
// 폴더 트리 클릭 시 callback
relDoc_selectNode_callback : function(e, data) {
//1. 문서목록에 폴더명 title 변경
$('#relDoc_folderTitle').html(data.node.text);
//2. acl에 의한 문서 목록
gRelDoc_currentFolderId = data.node.id;
if ($("#relDoc_document_gridList").getGridParam("records") == undefined) {
$("#relDoc_document_gridList").jqGrid('GridUnload');
$("#relDoc_select_gridList").jqGrid('GridUnload');
selectrelativeDocWindow.event.relDoc_initDocumentGridList(gRelDoc_currentFolderId);
} else {
selectrelativeDocWindow.event.relDoc_documentListByFolderId();
}
},
//문서 목록 초기화
relDoc_initDocumentGridList : function(folderId) {
// 1.문서 목록
if ($("#relDoc_document_gridList")[0].grid == undefined) {
$('#relDoc_document_gridList').jqGrid({
url:exsoft.contextRoot+'/document/workDocumentList.do',
mtype:"post",
datatype:'json',
postData : {folder_id:folderId},
jsonReader:{
page:'page',total:'total',root:'list'
},
cmTemplate: { title: false }, // false시 jqgrid tooltip 사용 안함
colNames:['doc_id','제목','문서유형','등록자','등록일','acl_create','acl_changePermission','acl_checkoutCancel','root_id','doc_type','lock_date','lock_owner'],
colModel:[
/** 표시할 컬럼**/
{name:'doc_id',index:'doc_id',width:0, align:'center',editable:false,sortable:false,key:true,hidden:true},
{name:'doc_name',index:'doc_name',width:50, editable:false,sortable:true,resizable:true,title:true},
{name:'type_name',index:'type_name',width:20, editable:false,sortable:true,resizable:true,align:'center'},
{name:'creator_name',index:'creator_name',width:15, editable:false,sortable:true,resizable:true,align:'center'},
{name:'create_date',index:'create_date',width:15, editable:false,sortable:true,resizable:true,align:'center'},
/** 비활성 변수 **/
{name:'acl_create',index:'acl_create',width:0, align:'center',editable:false,sortable:false,key:true,hidden:true},
{name:'acl_changePermission',index:'acl_changePermission',width:0, align:'center',editable:false,sortable:false,key:true,hidden:true},
{name:'acl_checkoutCancel',index:'acl_checkoutCancel',width:0, align:'center',editable:false,sortable:false,key:true,hidden:true},
{name:'root_id',index:'root_id',width:0, align:'center',editable:false,sortable:false,key:true,hidden:true},
{name:'doc_type',index:'doc_type',width:0, align:'center',editable:false,sortable:false,key:true,hidden:true},
{name:'lock_date',index:'lock_date',width:0, align:'center',editable:false,sortable:false,key:true,hidden:true},
{name:'lock_owner',index:'lock_owner',width:0, align:'center',editable:false,sortable:false,key:true,hidden:true},
],
autowidth:true,
viewrecords: true,multiselect:true,sortable: true,shrinkToFit:true,gridview: true,
sortname : "create_date",
sortorder:"desc",
scrollOffset : 0,
viewsortcols:'vertical',
//rowNum : parseInt('${pageSize}',10),
rowNum : 10,
rowList:exsoft.util.grid.listArraySize(),
pager:'#relDoc_documentGridPager',
emptyDataText: "데이터가 없습니다.",
caption:'문서목록'
,gridComplete : function() {
//tooltip 개행 처리
$("td[title]").each(function(){
$(this).tooltip({
track : true, // tooltip이 마우스 따라 다닌다.
content: $(this).attr("title"),
tooltipClass: "table_tooltip" // table_tooltip :: custom css
});
});
}
,loadBeforeSend: function() {
exsoft.util.grid.gridNoDataMsgInit('relDoc_document_gridList');
exsoft.util.grid.gridTitleBarHide('relDoc_document_gridList');
}
,loadComplete: function() {
if ($("#relDoc_document_gridList").getGridParam("records")==0) {
exsoft.util.grid.gridPagerViewHide('relDoc_document_gridList');
exsoft.util.grid.gridNoDataMsg('relDoc_document_gridList','nolayer_data');
exsoft.util.grid.gridPagerHide('relDoc_document_gridList');
// 상세화면 데이터없음 이미지
}else {
exsoft.util.grid.gridPagerViewHide('detaildocHistoryList');
exsoft.util.grid.gridPagerShow('relDoc_document_gridList');
// 조회화면 DISPLAY
}
exsoft.util.grid.gridInputInit(false); // 페이지 창 숫자만
exsoft.util.grid.gridResize('relDoc_document_gridList','relDoc_targetDocumentGrid',55); //페이지 div 맨 하단에
}
,loadError:function(xhr, status, error) {
exsoft.util.error.isErrorChk(xhr);
}
,onPaging: function (pgButton) {
// 사용자 입력한 페이지 숫자
var pagerId = this.p.pager.substr(1);
var inputPage = $('input.ui-pg-input', "#pg_" + $.jgrid.jqID(pagerId)).val();
exsoft.util.grid.onPager('relDoc_document_gridList',inputPage,pgButton);
}
});
// 컬럼 헤더 정렬 및 다국어 변경 처리
var headerData = '{"doc_name":"제목","type_name":"문서유형","creator_name":"등록자","create_date":"등록일"}';
exsoft.util.grid.gridColumHeader('relDoc_document_gridList',headerData,'center');
}
// 2. 선택 목록
// 3. 그룹의 기존 멤버를 설정함
if ($("#relDoc_select_gridList")[0].grid == undefined) {
$('#relDoc_select_gridList').jqGrid({
url:exsoft.contextRoot+'/document/workDocumentList.do',
mtype:"post",
datatype:'json',
postData : {folder_id:folderId},
jsonReader:{
page:'page',total:'total',root:'list'
},
colNames:['doc_id','제목','문서유형','등록자','등록일','root_id'],
colModel:[
/** 표시할 컬럼**/
{name:'doc_id',index:'doc_id',width:0, align:'center',editable:false,sortable:false,key:true,hidden:true},
{name:'doc_name',index:'doc_name',width:50, editable:false,sortable:true,resizable:true,title:true},
{name:'type_name',index:'type_name',width:20, editable:false,sortable:true,resizable:true,align:'center'},
{name:'creator_name',index:'creator_name',width:15, editable:false,sortable:true,resizable:true,align:'center'},
{name:'create_date',index:'create_date',width:15, editable:false,sortable:true,resizable:true,align:'center'},
{name:'root_id',index:'root_id',width:0, align:'center',editable:false,sortable:false,key:true,hidden:true},
],
autowidth:true,
height:"auto",
viewrecords: true,multiselect:true,sortable: true,shrinkToFit:true,gridview: true,
sortname : "create_date",
sortorder:"desc",
rowNum : 10,
rowList : exsoft.util.grid.listArraySize(),
emptyDataText: "데이터가 없습니다.",
caption:'사용자 목록',
pagerpos: 'center',
pginput: true,
loadError:function(xhr, status, error) {
exsoft.util.error.isErrorChk(xhr);
}
,loadBeforeSend: function() {
exsoft.util.grid.gridNoDataMsgInit('relDoc_select_gridList');
exsoft.util.grid.gridTitleBarHide('relDoc_select_gridList');
}
,loadComplete: function() {
// 기존 데이터를 삭제함
$("#relDoc_select_gridList").jqGrid("clearGridData");
exsoft.util.grid.gridResize('relDoc_select_gridList','relDoc_targetSelectGrid',55); //페이지 div 맨 하단에
}
});
// Grid 컬럼정렬 처리
var headerData = '{"group_nm":"그룹명","user_name_ko":"사용자명", "user_id":"사용자 ID"}';
exsoft.util.grid.gridColumHeader('relDoc_select_gridList',headerData,'center');
// headerData = null;
} else {
// 기존 데이터를 삭제함
$("#relDoc_select_gridList").jqGrid("clearGridData");
}
},
// 현재 폴더 초기 페이지
relDoc_documentListByFolderId : function () {
try {
var postData = {folder_id:gRelDoc_currentFolderId,page_init:'true'} ; //page 1번으로 이동으하기 위해 page_init:'true'
// 기존 postData는 전체 초기화 시키는 function 호출
exsoft.util.grid.gridPostDataInitRefresh('relDoc_document_gridList',exsoft.contextRoot+'/document/workDocumentList.do',postData);
}finally{
postData = null;
}
},
// 맵 변경 시 트리 초기화(or 갱신 한다)
relDoc_changeMap : function(workType) {
// 선택한 탭을 설정한다.
selectrelativeDocWindow.currentWorkType = workType;
// 트리를 초기화 or 갱신 한다.
selectrelativeDocWindow.treeFunctions.initTree(workType);
},
// 관련문서 리스트에 추가
relDoc_addDocument : function () {
var documentList = $("#relDoc_document_gridList").getGridParam("selarrrow");
var selectedDocumentList = exsoft.util.grid.gridSelectArrayDataAllRow("relDoc_select_gridList", "doc_id", "doc_id");
// 선택된 문서를 추가한다
$(documentList).each(function() {
var row = $("#relDoc_document_gridList").getRowData(this);
var isDuplicate = false;
// 중복 문서 체크
$(selectedDocumentList).each(function() {
if (this.doc_id == row.doc_id) {
isDuplicate = true;
}
});
if (!isDuplicate) {
$("#relDoc_select_gridList").jqGrid("addRowData", row.doc_id, row);
}
});
},
submit : function() {
// 선택된 목록이 있는지 확인
var docIdList = $("#relDoc_select_gridList").jqGrid("getDataIDs");
var returnDataList = new Array();
if (docIdList.length == 0) {
jAlert("추가된 항목이 없습니다.");
return;
}
// 선택된한 문서 목록 구성
$(docIdList).each(function(index) {
if (this != "") {
var row = $("#relDoc_select_gridList").getRowData(this);
returnDataList.push(row);
}
});
// 콜백함수로 전달
//exsoft.document.callback.relDocWindow();
selectrelativeDocWindow.callback(returnDataList);
// 팝업화면 숨기기
//base.layerClose("popRelDocWindow");
},
},//event END
callback : {
}
}
<file_sep>/EDMS3/src/kr/co/exsoft/common/service/CacheService.java
package kr.co.exsoft.common.service;
import org.springframework.transaction.annotation.Transactional;
/**
* 메모리 캐쉬 구현부분
*
* @author <NAME>
* @since 2014. 11. 4.
* @version 1.0
*
*/
@Transactional
public interface CacheService {
/**
*
* <pre>
* 1. 개용 : 메모리에 저장된 캐쉬 정보를 가져 온다.
* 2. 처리내용 :
* </pre>
* @Method Name : getCache
* @param cacheName : ehcache-default.xml에 설정된 cache name
* @param cacheKey : cacheName에 저장된 cache를 가져오기 위한 key
* @return
* @throws Exception Object
*/
public Object getCache(String cacheName, String cacheKey) throws Exception;
/**
*
* <pre>
* 1. 개용 : 기존 캐쉬값을 새로 캐쉬값으로 변경
* 2. 처리내용 :
* </pre>
* @Method Name : replaceCache
* @param cacheName : ehcache-default.xml에 설정된 cache name
* @param cacheKey : cacheName에 저장된 cache를 가져오기 위한 key
* @param obj : 변경할 캐쉬 값
* @throws Exception void
*/
public void replaceCache(String cacheName, String cacheKey, Object obj) throws Exception;
/**
*
* <pre>
* 1. 개용 : 선택한 폴더가 관리대상 폴더인지 체크
* 2. 처리내용 :
* </pre>
* @Method Name : menuAuthByFolderID
* @param folder_id
* @param group_id
* @return
* @throws Exception boolean
*/
public boolean menuAuthByFolderID(String folder_id, String group_id) throws Exception;
/**
*
* <pre>
* 1. 개용 : 폴더ID 기준으로 폴더의 전체 경로명을 가져 온다.
* 2. 처리내용 :
* </pre>
* @Method Name : getFolderFullpathNameByFolderId
* @param folder_id
* @param isFristSlash
* @return
* @throws Exception String
*/
public String getFolderFullpathNameByFolderId(String folder_id, boolean isFristSlash) throws Exception;
}
<file_sep>/EDMS3/src/kr/co/exsoft/note/service/NoteService.java
package kr.co.exsoft.note.service;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import kr.co.exsoft.common.vo.SessionVO;
import kr.co.exsoft.note.vo.NoteVO;
import org.springframework.transaction.annotation.Transactional;
/**
* Note 서비스 인터페이스
* @author 패키지 개발팀
* @since 2015.03.02
* @version 3.0
*
*/
@Transactional
public interface NoteService {
/**
*
* <pre>
* 1. 개용 : 쪽지 등록
* 2. 처리내용 :
* </pre>
* @Method Name : noteListForInserting
* @param map
* @return map
*/
public Map<String, Object> noteListForInserting(List<HashMap<String, Object>> noteList,HashMap<String, Object> map, SessionVO sessionVO) throws Exception;
/**
*
* <pre>
* 1. 개용 : 쪽지 답장 등록
* 2. 처리내용 :
* </pre>
* @Method Name : noteListForInserting
* @param map
* @return map
*/
public Map<String, Object> noteListForReInserting(HashMap<String, Object> map, SessionVO sessionVO) throws Exception;
/**
*
* <pre>
* 1. 개용 : 읽지 않은 최신쪽지 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : noteNewTopNInfoList
* @param map
* @return map
*/
public Map<String, Object> noteNewTopNInfoList(HashMap<String, Object> map);
/**
*
* <pre>
* 1. 개용 : 쪽지목록 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : noteAllReceiveInfoList
* @param map
* @return map
*/
public Map<String, Object> noteAllReceiveSendInfoList(HashMap<String, Object> map);
/**
*
* <pre>
* 1. 개용 : 쪽지 보관함에 저장
* 2. 처리내용 :
* </pre>
* @Method Name : noteSaveUpdate
* @param map
* @return map
*/
public int noteSaveUpdate(HashMap<String, Object> map, SessionVO sessionVO) throws Exception;
/**
*
* <pre>
* 1. 개용 : 쪽지 보관함에 저장
* 2. 처리내용 :
* </pre>
* @Method Name : noteSaveUpdate
* @param map
* @return map
*/
public int noteReadUpdate(HashMap<String, Object> map, SessionVO sessionVO) throws Exception;
/**
*
* <pre>
* 1. 개용 : 쪽지 삭제
* 2. 처리내용 :
* </pre>
* @Method Name : noteDelete
* @param map
* @return int
*/
public int noteDelete(HashMap<String, Object> map, SessionVO sessionVO) throws Exception;
/**
*
* <pre>
* 1. 개용 : 대화함목록 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : noteTalkList
* @param map
* @return map
*/
public Map<String, Object> noteTalkList(HashMap<String, Object> map);
/**
*
* <pre>
* 1. 개용 : 대화함목록 상세 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : noteTalkDetailList
* @param map
* @return map
*/
public Map<String, Object> noteTalkDetailList(HashMap<String, Object> map);
/**
*
* <pre>
* 1. 개용 : 문서처리(삭제/완전삭제/휴지통비우기) 대상 목록 구하기.
* 2. 처리내용 :
* </pre>
* @Method Name : noteValidList
* @param map
* @return
* @throws Exception List<HashMap<String,Object>>
*/
public List<HashMap<String, Object>> noteValidList(HashMap<String,Object> map) throws Exception;
}
<file_sep>/EDMS3/src/kr/co/exsoft/common/vo/SessionVO.java
package kr.co.exsoft.common.vo;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import kr.co.exsoft.eframework.configuration.Constant;
import kr.co.exsoft.eframework.util.ConfigData;
/**
* 세션 VO
* @author <NAME>
* @since 2014.07.16
* @version 3.0
*
*/
public class SessionVO implements Serializable {
private static final long serialVersionUID = -6551289232514106558L;
private String sessionId; // 새션 객체ID
private String sessId; // 사용자ID
private String sessName; // 사용자명
private String sessJobtitle; // 직위
private String sessRole_id; // 권한
private String sessRole_nm; // 권한명
private String sessGroup_id; // 기본 소속 부서
private String sessGroup_nm; // 기본 소속 부서명
private String sessTheme; // 테마스킨
private String sessPage_size; // 기본 목록 사이즈
private String sessLocation; // 사용자/관리자 접속 = U-사용자 A-관리자
private String sessLanguage; // 기본 로케일(KO-한국어,EN-영어,JA-일본어,ZH-중국어)
private String sessRemoteIp; // 접속IP
private String sessContent; // 세션Content
private String sessMenuContent; // 마지막 접속 메뉴
private String sessContextRoot; // contextRootPath
private List<String> sessProjectGroup; // 사용자 소속 프로젝트 부서 리스트
private List<String> sessParentGroup; // 사용자 상위 부서 목록 :: ACL 리스트에서 DEPT 타입에서 사용 함.
private List<MenuAuthVO> sessMenuAuth; // XR_MENU_AUTH에서 사용자의 role_id에 해당하는 모든 인증 정보를 보관 함
private String sessSearchYear; // 사용자 문서목록 기본 검색기간
private String sessStartDt; // 사용자 문서목록 기본 검색시작일
private String sessEndDt; // 사용자 문서목록 기본 검색종료일
private String sessManage_group; // 관리부서 ID
private String sessManage_group_nm; // 관리부서 명
private String sessDocSearch; // 나의문서 메뉴 문서 표시 기간
private String sessViewType; // 문서목록 미리보기 타입
private String sessIconPrewiew; // 문서목록 대표첨부 미리보기 여부
private String sessEmail; // 사용자 Email
public SessionVO() {
this.sessionId = "";
this.sessId = "";
this.sessName = "";
this.sessJobtitle = "";
this.sessRole_id = "";
this.sessRole_nm = "";
this.sessGroup_id = "";
this.sessGroup_nm = "";
this.sessTheme = "";
this.sessPage_size = "";
this.sessLanguage = ConfigData.getString("LANGUAGE");
this.sessLocation = "";
this.sessRemoteIp = "";
this.sessContent = "";
this.sessMenuContent = "";
this.sessContextRoot = "";
this.sessProjectGroup = new ArrayList<String>();
this.sessParentGroup = new ArrayList<String>();
this.sessMenuAuth = new ArrayList<MenuAuthVO>();
this.sessSearchYear = "";
this.sessStartDt = "";
this.sessEndDt = "";
this.sessManage_group = "";
this.sessDocSearch = "";
this.sessViewType = Constant.PREVIEW_LIST;
this.sessIconPrewiew = Constant.NO;
this.sessEmail = "";
}
public String getSessViewType() {
return sessViewType;
}
public void setSessViewType(String sessViewType) {
this.sessViewType = sessViewType;
}
public String getSessIconPrewiew() {
return sessIconPrewiew;
}
public void setSessIconPrewiew(String sessIconPrewiew) {
this.sessIconPrewiew = sessIconPrewiew;
}
public String getSessDocSearch() {
return sessDocSearch;
}
public void setSessDocSearch(String sessDocSearch) {
this.sessDocSearch = sessDocSearch;
}
public String getSessRole_nm() {
return sessRole_nm;
}
public void setSessRole_nm(String sessRole_nm) {
this.sessRole_nm = sessRole_nm;
}
public List<String> getSessProjectGroup() {
return sessProjectGroup;
}
public void setSessProjectGroup(List<String> sessProjectGroup) {
this.sessProjectGroup = sessProjectGroup;
}
public String getSessSearchYear() {
return sessSearchYear;
}
public void setSessSearchYear(String sessSearchYear) {
this.sessSearchYear = sessSearchYear;
}
public String getSessStartDt() {
return sessStartDt;
}
public void setSessStartDt(String sessStartDt) {
this.sessStartDt = sessStartDt;
}
public String getSessEndDt() {
return sessEndDt;
}
public void setSessEndDt(String sessEndDt) {
this.sessEndDt = sessEndDt;
}
public String getSessionId() {
return sessionId;
}
public void setSessionId(String sessionId) {
this.sessionId = sessionId;
}
/**
*
* <pre>
* 1. 개용 : 로그인 사용자 ID
* 2. 처리내용 :
* </pre>
* @Method Name : getSessId
* @return String
*/
public String getSessId() {
return sessId;
}
public void setSessId(String sessId) {
this.sessId = sessId;
}
/**
*
* <pre>
* 1. 개용 : 로그인 사용자 명
* 2. 처리내용 :
* </pre>
* @Method Name : getSessName
* @return String
*/
public String getSessName() {
return sessName;
}
public void setSessName(String sessName) {
this.sessName = sessName;
}
public String getSessJobtitle() {
return sessJobtitle;
}
public void setSessJobtitle(String sessJobtitle) {
this.sessJobtitle = sessJobtitle;
}
public String getSessRole_id() {
return sessRole_id;
}
public void setSessRole_id(String sessRole_id) {
this.sessRole_id = sessRole_id;
}
public String getSessGroup_id() {
return sessGroup_id;
}
public void setSessGroup_id(String sessGroup_id) {
this.sessGroup_id = sessGroup_id;
}
public String getSessGroup_nm() {
return sessGroup_nm;
}
public void setSessGroup_nm(String sessGroup_nm) {
this.sessGroup_nm = sessGroup_nm;
}
public String getSessTheme() {
return sessTheme;
}
public void setSessTheme(String sessTheme) {
this.sessTheme = sessTheme;
}
public String getSessPage_size() {
return sessPage_size;
}
public void setSessPage_size(String sessPage_size) {
this.sessPage_size = sessPage_size;
}
public String getSessLocation() {
return sessLocation;
}
public void setSessLocation(String sessLocation) {
this.sessLocation = sessLocation;
}
public String getSessLanguage() {
return sessLanguage;
}
public void setSessLanguage(String sessLanguage) {
this.sessLanguage = sessLanguage;
}
public String getSessRemoteIp() {
return sessRemoteIp;
}
public void setSessRemoteIp(String sessRemoteIp) {
this.sessRemoteIp = sessRemoteIp;
}
public String getSessContent() {
return sessContent;
}
public void setSessContent(String sessContent) {
this.sessContent = sessContent;
}
public String getSessMenuContent() {
return sessMenuContent;
}
public void setSessMenuContent(String sessMenuContent) {
this.sessMenuContent = sessMenuContent;
}
public String getSessContextRoot() {
return sessContextRoot;
}
public void setSessContextRoot(String sessContextRoot) {
this.sessContextRoot = sessContextRoot;
}
public static long getSerialversionuid() {
return serialVersionUID;
}
public List<String> getSessParentGroup() {
return sessParentGroup;
}
public void setSessParentGroup(List<String> sessParentGroup) {
this.sessParentGroup = sessParentGroup;
}
public String getSessEmail() {
return sessEmail;
}
public void setSessEmail(String sessEmail) {
this.sessEmail = sessEmail;
}
/**
*
* <pre>
* 1. 개용 : 관리자 및 사용자 메뉴 접근권한 정보
* 2. 처리내용 :
* </pre>
* @Method Name : getSessMenuAuth
* @return List<MenuAuthVO>
*/
public List<MenuAuthVO> getSessMenuAuth() {
return sessMenuAuth;
}
public void setSessMenuAuth(List<MenuAuthVO> sessMenuAuth) {
this.sessMenuAuth = sessMenuAuth;
}
/**
*
* <pre>
* 1. 개용 : 관리부서 ID를 가져온다.
* 2. 처리내용 :
* </pre>
* @Method Name : getSessManage_group_id
* @return String
*/
public String getSessManage_group() {
return sessManage_group;
}
public void setSessManage_group(String sessManage_group) {
this.sessManage_group = sessManage_group;
}
public String getSessManage_group_nm() {
return sessManage_group_nm;
}
public void setSessManage_group_nm(String sessManage_group_nm) {
this.sessManage_group_nm = sessManage_group_nm;
}
}
<file_sep>/EDMS3/src/kr/co/exsoft/document/controller/PageAdminController.java
package kr.co.exsoft.document.controller;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import kr.co.exsoft.common.service.CommonService;
import kr.co.exsoft.common.vo.SessionVO;
import kr.co.exsoft.document.service.PageService;
import kr.co.exsoft.eframework.configuration.Constant;
import kr.co.exsoft.eframework.exception.BizException;
import kr.co.exsoft.eframework.library.LocaleLibrary;
import kr.co.exsoft.eframework.util.CommonUtil;
import kr.co.exsoft.eframework.util.ConfigData;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.MessageSource;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.SessionAttributes;
import org.springmodules.validation.commons.DefaultBeanValidator;
/**
* Page Admin Controller
*
* @author 패키지팀
* @since 2014. 9. 16.
* @version 1.0
*
*/
@Controller
@SessionAttributes("sessionVO")
@RequestMapping("/admin")
public class PageAdminController {
@Autowired
private MessageSource messageSource;
@Autowired
private PageService pageService;
@Autowired
private CommonService commonService;
@Autowired
private DefaultBeanValidator beanValidator;
protected static final Log logger = LogFactory.getLog(TypeAdminController.class);
/**
*
* <pre>
* 1. 개용 : 중복파일 관리
* 2. 처리내용 :
* </pre>
* @Method Name : duplicateManager
* @param sessionVO
* @param model
* @param map
* @return String
*/
@RequestMapping("duplicateManager.do")
public String duplicateManager(@ModelAttribute SessionVO sessionVO,Model model,@RequestParam HashMap<String,Object> map) {
Locale locale = LocaleLibrary.setLocale(sessionVO.getSessLanguage() != null ? sessionVO.getSessLanguage() : ConfigData.getString("LANGUAGE"));
HashMap<String,Object> param = new HashMap<String,Object>();
Map<String, Object> menuInfo = new HashMap<String, Object>();
param.put("menu_cd",map.get("menu_cd") != null ? map.get("menu_cd").toString() : "" );
param.put("role_id",sessionVO.getSessRole_id());
String part = ""; // ALL/GROUP/TEAM - 관리자 페이지마다 다르게 적용처리한다.
try {
// 1. 관리자 ROLE 접근권한
part = commonService.getMenuAuth(param);
// 2. 페이지 네비게이션 :: 상위메뉴명 / 현재메뉴명
menuInfo = commonService.pageMenuInfo(param);
}catch(BizException e){
logger.error(e.getMessage());
}catch(Exception e) {
logger.error(e.getMessage());
}
// 접근권한이 없거나 메뉴코드가 없는 경우 403 ERROR 페이지 이동 처리
if(part.equals("") || param.get("menu_cd").toString().equals("")) {
CommonUtil.setErrorMsg(model, Constant.ERROR_403, messageSource.getMessage("common.connect.error",new Object[0],locale),sessionVO.getSessContextRoot());
return "error/message";
}
CommonUtil.setSessionToModel(model, sessionVO);
model.addAttribute("part",part);
model.addAttribute("menuInfo",menuInfo);
model.addAttribute("topSelect",Constant.TOPMENU_DOCUMENT);
model.addAttribute("subSelect",Constant.DOCUMENT_DUPLICATEMANAGER);
return "docadmin/duplicateManager";
}
/**
*
* <pre>
* 1. 개용 : 중복파일 목록 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : duplicatePage
* @param model
* @param sessionVO
* @param map
* @param request
* @return Map<String,Object>
*/
@RequestMapping(value="/duplicatePage.do", method=RequestMethod.POST)
@ResponseBody
public Map<String,Object> duplicatePage(Model model, @ModelAttribute SessionVO sessionVO, @RequestParam HashMap<String,Object> map,
HttpServletRequest request) {
Locale locale = LocaleLibrary.setLocale(sessionVO.getSessLanguage() != null ? sessionVO.getSessLanguage() : ConfigData.getString("LANGUAGE"));
Map<String, Object> resultMap = new HashMap<String, Object>();
HashMap<String, Object> param = new HashMap<String, Object>();
// 입력 파라미터 유효성 체크
param.put("contextRoot",sessionVO.getSessContextRoot());
param.put("strKeyword",map.get("strKeyword") != null ? map.get("strKeyword") : "" );
param.put("orderCol",map.get("sidx") != null ? map.get("sidx") : "page_name");
param.put("orderType",map.get("sord") != null ? map.get("sord") : "ASC");
param.put("page_size",map.get("rows") != null ? map.get("rows") : sessionVO.getSessPage_size());
param.put("sdate",map.get("sdate") != null ? map.get("sdate").toString() : "");
param.put("edate",map.get("edate") != null ? map.get("edate").toString() : "");
param.put("nPage",CommonUtil.getPage(map));
try {
// 1.중복수인 경우 orderCol 변경처리
if(param.get("orderCol").toString().toLowerCase().equals("page_count")) {
param.put("orderCol","COUNT(P.PAGE_NAME)");
}
// 2.중복파일 목록 가져오기
resultMap = pageService.duplicatePageList(param) ;
}catch(BizException e){
resultMap.put("result",Constant.RESULT_FALSE);
resultMap.put("message",e.getMessage());
}catch(Exception e) {
resultMap.put("result",Constant.RESULT_FALSE);
resultMap.put("message",messageSource.getMessage("common.system.error",new Object[0],locale));
}
return resultMap;
}
}
<file_sep>/EDMS3/src/kr/co/exsoft/eframework/util/CharConversion.java
package kr.co.exsoft.eframework.util;
import java.io.CharConversionException;
import java.io.UnsupportedEncodingException;
/**
* 캐릭터셋 변환 클래스
* @author 패키지 개발팀
* @since 2014.07.15
* @version 3.0
*
*/
public final class CharConversion {
/**
* Don't let anyone instantiate this class
*/
private CharConversion() {
}
/**
* 8859_1 --> KSC5601.
*/
public static String E2K( String english ){
String korean = null;
if (english == null ) return null;
//if (english == null ) return "";
try {
korean = new String(english.getBytes("8859_1"), "KSC5601");
}catch( UnsupportedEncodingException e ){
korean = english;
}
return korean;
}
/**
* KSC5601 --> 8859_1.
*/
public static String K2E( String korean ){
String english = null;
if (korean == null ) return null;
try {
english = new String(korean.getBytes("KSC5601"), "8859_1");
}catch( UnsupportedEncodingException e ){
english = korean;
}
return english;
}
public static String K2U( String korean ){
String english = null;
if (korean == null ) return null;
try {
english = new String(korean.getBytes("KSC5601"), "UTF-8");
}catch( UnsupportedEncodingException e ){
english = korean;
}
return english;
}
public static String U2K( String korean ){
String english = null;
if (korean == null ) return null;
try {
english = new String(korean.getBytes("UTF-8"), "KSC5601");
}catch( UnsupportedEncodingException e ){
english = korean;
}
return english;
}
public static String E2U( String korean ){
String english = null;
if (korean == null ) return null;
try {
english = new String(korean.getBytes("ISO-8859-1"), "UTF-8");
}catch( UnsupportedEncodingException e ){
english = korean;
}
return english;
}
public static String U2E( String korean ) throws CharConversionException{
String english = null;
if (korean == null ) return null;
try {
english = new String(korean.getBytes("UTF-8"), "ISO-8859-1");
}catch( UnsupportedEncodingException e ){
english = korean;
}catch( Exception e ){
english = korean;
}
return english;
}
public static String EUC2U( String korean ){
String english = null;
if (korean == null ) return null;
try {
english = new String(korean.getBytes("EUC-KR"), "UTF-8");
}catch( UnsupportedEncodingException e ){
english = korean;
}
return english;
}
public static String U2EUC( String korean ){
String english = null;
if (korean == null ) return null;
try {
english = new String(korean.getBytes("UTF-8"), "ISO-8859-1");
}catch( UnsupportedEncodingException e ){
english = korean;
}
return english;
}
public static String U28859( String korean ){
String english = null;
if (korean == null ) return null;
try {
english =new String(korean.getBytes("UTF-8"), "8859_1");
}catch( UnsupportedEncodingException e ){
english = korean;
}
return english;
}
}
<file_sep>/EDMS3/src/kr/co/exsoft/folder/dao/FolderDao.java
package kr.co.exsoft.folder.dao;
import java.util.HashMap;
import java.util.List;
import kr.co.exsoft.common.vo.RecentlyObjectVO;
import kr.co.exsoft.folder.vo.FavoriteFolderVO;
import kr.co.exsoft.folder.vo.FolderVO;
import org.apache.commons.collections.map.CaseInsensitiveMap;
import org.springframework.stereotype.Repository;
/**
* Folder 매퍼클래스
* @author 패<NAME>
* @since 2014.07.21
* @version 3.0
*
*/
@Repository(value = "folderDao")
public interface FolderDao {
/**
*
* <pre>
* 1. 개용 : 권한이 사용중인지 체크
* 2. 처리내용 :
* </pre>
* @Method Name : isUsingAcl
* @param map
* @return int
*/
public int isUsingAcl(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : MapID, ParentID 기준의 root 폴더 목록
* 2. 처리내용 :
* </pre>
* @Method Name : rootFolderList
* @param map
* @return List<FolderVO>
*/
public List<FolderVO> rootFolderList(HashMap<String, Object> map);
/**
*
* <pre>
* 1. 개용 : 하위 부서 목록 조회
* 2. 처리내용 :
* </pre>
* @Method Name : childFolderList
* @param map
* @return List<FolderVO>
*/
public List<FolderVO> childFolderList(HashMap<String, Object> map);
/**
*
* <pre>
* 1. 개용 : 폴더 상세 정보 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : folderDetail
* @param map
* @return FolderVO
*/
public FolderVO folderDetail(HashMap<String, Object> map);
/**
*
* <pre>
* 1. 개용 : 폴더 등록
* 2. 처리내용 :
* </pre>
* @Method Name : folderWrite
* @param folderVO
* @return int
*/
public int folderWrite(FolderVO folderVO);
/**
*
* <pre>
* 1. 개용 : 폴더 수정
* 2. 처리내용 :
* </pre>
* @Method Name : folderUpdate
* @param folderVO
* @return int
*/
public int folderUpdate(FolderVO folderVO);
/**
*
* <pre>
* 1. 개용 : 폴더 삭제
* 2. 처리내용 : 하위 폴더 및 현재 폴더에 문서 또는 개인 휴지통에 현재 폴더에 대한 문서가 존재할 경우 삭제 불가
* </pre>
* @Method Name : folderDelete
* @param map
* @return int
*/
public int folderDelete(HashMap<String, Object> map);
/**
*
* <pre>
* 1. 개용 : 하위폴더 존재 여부 확인
* 2. 처리내용 :
* </pre>
* @Method Name : existChildFolder
* @param map
* @return int
*/
public int existChildFolder(HashMap<String, Object> map);
/**
*
* <pre>
* 1. 개용 : 현재폴더에 문서 존재 여부 및 개인 휴지통에 현재 폴더에 대한 문서 존재 여부 확인
* 2. 처리내용 :
* </pre>
* @Method Name : existDocument
* @param map
* @return int
*/
public int existDocument(HashMap<String, Object> map);
/**
*
* <pre>
* 1. 개용 : 동일한 상위 폴더 하위에 동일한 이름의 폴더가 있는지 확인한다.
* 2. 처리내용 :
* </pre>
* @Method Name : selectFolderIsExistByFolderNameAndParentID
* @param folderVO
* @return
*/
public int folderIsExistByFolderNameAndParentID(FolderVO folderVO);
/**
*
* <pre>
* 1. 개용 : 폴더-문서 링크 정보 삭제처리
* 2. 처리내용 :
* </pre>
* @Method Name : xrLinkedDelete
* @param map
* @return int
*/
public int xrLinkedDelete(HashMap<String, Object> map);
/**
*
* <pre>
* 1. 개용 : ROLE 권한자 관리대상 폴더여부 체크
* 2. 처리내용 :
* </pre>
* @Method Name : isAuthFolder
* @param map
* @return int
*/
public int isAuthFolder(HashMap<String, Object> map);
/**
* <pre>
* 1. 개용 : 폴더 이름 조회
* 2. 처리내용: 폴더 ID로 폴더 이름 조회
* </pre>
* @param map
* @return
*/
public List<FolderVO> getFolderName(HashMap<String, Object> map);
/**
*
* <pre>
* 1. 개용 : 폴더-문서 관련 정보 등록 :: 다중분류체계 포함
* 2. 처리내용 :
* </pre>
* @Method Name : writeXrLinked
* @param map
* @return int
*/
public int writeXrLinked(HashMap<String, Object> map);
/**
* <pre>
* 1. 개용 : 폴더 ACL 조회
* 2. 처리내용 : 폴더 ID로 폴더ACL 조회
* </pre>
* @Method Name : getFolderAcl
* @param folderVO
* @return folderVO
*/
public FolderVO getFolderAcl(FolderVO folderVO);
/**
*
* <pre>
* 1. 개용 : 개인문서함을 제외함 모든 폴더 정보 가져오기
* 2. 처리내용 : folder_id, parent_id 리스트를 가져 온다.
* </pre>
* @Method Name : folderIdsList
* @return List<CaseInsensitiveMap>
*/
public List<CaseInsensitiveMap> folderIdsList();
/**
*
* <pre>
* 1. 개용 : 소유권 이전대상 폴더 체크하기
* 2. 처리내용 :
* </pre>
* @Method Name : getFolderInfo
* @param map
* @return CaseInsensitiveMap
*/
public CaseInsensitiveMap getFolderInfo(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 내용 : 공유받은 폴더 루트 목록
* 2. 처리내용 :
* </pre>
* @Method Name : rootSharefolderList
* @param map
* @return
*/
public List<FolderVO> rootShareFolderList(HashMap<String, Object> map);
/**
* <pre>
* 1. 내용 : 즐겨찾는 루트 폴더 목록
* 2. 처리내용 :
* </pre>
* @Method Name : rootFavoriteFolderList
* @param map
* @return
*/
public List<FavoriteFolderVO> rootFavoriteFolderList(HashMap<String, Object> map);
/**
* <pre>
* 1. 내용 : 즐겨찾는 자식 폴더 목록
* 2. 처리내용 :
* </pre>
* @Method Name : childFavoriteFolderList
* @param map
* @return
*/
public List<FavoriteFolderVO> childFavoriteFolderList(HashMap<String, Object> map);
/**
* <pre>
* 1. 내용 : 즐겨찾는 폴더 추가
* 2. 처리내용 :
* </pre>
* @Method Name : rootFavoriteFolderList
* @param favoriteFolderVO
* @return
*/
public int writeFavoriteFolder(FavoriteFolderVO favoriteFolderVO);
/**
* <pre>
* 1. 내용 : 즐겨찾는 폴더 수정
* 2. 처리내용 :
* </pre>
* @Method Name : updateFavoriteFolder
* @param favoriteFolderVO
* @return
*/
public int updateFavoriteFolder(HashMap<String, Object> map);
/**
* <pre>
* 1. 내용 : 즐겨찾는 폴더 삭제
* 2. 처리내용 :
* </pre>
* @Method Name : deleteFavoriteFolder
* @param map
* @return
*/
public int deleteFavoriteFolder(HashMap<String, Object> map);
/**
* <pre>
* 1. 내용 : 즐겨찾는 폴더의 자식폴더 갯수 카운트
* 2. 처리내용 :
* </pre>
* @Method Name : favoriteFolderChildCount
* @param map
* @return
*/
public int favoriteFolderChildCount(HashMap<String, Object> map);
/**
*
* <pre>
* 1. 개용 : 하위 폴더 ID 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : childFolderIds
* @param map
* @return List<String>
*/
public List<String> childFolderIds(HashMap<String,Object> map);
/**
* <pre>
* 1. 개용 : 즐겨찾기 폴더 중복 체크
* 2. 처리내용 :
* </pre>
* @Method Name : existsFavoriteFolder
* @param map
* @return
*/
public int existsFavoriteFolder(HashMap<String, Object> map);
/**
* <pre>
* 1. 개용 : 즐겨찾기 폴더 Index 스왑
* 2. 처리내용 :
* </pre>
* @Method Name : swapFavoriteFolderIndex
* @param map
* @return
*/
public int updateFavoriteFolderIndex(HashMap<String, Object> map);
/**
* <pre>
* 1. 개용 : 즐겨찾기 폴더 상세조회
* 2. 처리내용 :
* </pre>
* @Method Name : favoriteFolderDetail
* @param map
* @return
* @throws Exception
*/
public FavoriteFolderVO favoriteFolderDetail(HashMap<String, Object> map);
/**
* <pre>
* 1. 개용 : 즐겨찾기 폴더 상세조회
* 2. 처리내용 :
* </pre>
* @Method Name : folderCountByCreatorId
* @param creatorId
* @return
*/
public int folderCountByCreatorId(String creatorId);
/**
* <pre>
* 1. 개용 : 사용자의 모든 즐겨찾기폴더 삭제
* 2. 처리내용 :
* </pre>
* @Method Name : deleteFavoriteFolderOfUser
* @param map
* @return
*/
public int deleteFavoriteFolderOfUser(HashMap<String, Object> map);
/**
* <pre>
* 1. 개용 : 폴더 Storage 정보 조회
* 2. 처리내용 : 폴더 ID로 폴더 Storage Quota/Usage 조회
* </pre>
* @Method Name : getFolderStorage
* @param folderVO
* @return folderVO
*/
public FolderVO getFolderStorage(FolderVO folderVO);
/**
* <pre>
* 1. 개용 : map_id와 folder_type으로 폴더 정보 조회
* 2. 처리내용 :
* </pre>
* @param folderVO
* @return
*/
public FolderVO getRootFolderId(FolderVO folderVO);
/**
* <pre>
* 1. 개용 : user_id로 최근 사용한 폴더 목록 조회
* 2. 처리내용 :
* </pre>
* @param map
* @return
*/
public List<RecentlyObjectVO> recentlyFolderList(HashMap<String, Object> map);
/**
* <pre>
* 1. 개용 : user_id로 최근 사용한 폴더 삭제
* 2. 처리내용 :
* </pre>
* @param map
* @return
*/
public int recentlyFolderDelete(HashMap<String, Object> map);
}
<file_sep>/EDMS3/src/kr/co/exsoft/eframework/library/ExsoftAbstractServiceImpl.java
package kr.co.exsoft.eframework.library;
import java.util.Locale;
import javax.annotation.Resource;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.context.MessageSource;
import org.springframework.context.i18n.LocaleContextHolder;
import kr.co.exsoft.eframework.trace.ExsoftLeaveTrace;
import kr.co.exsoft.eframework.exception.BizException;
/**
* 비즈니스 서비스 구현체가 상속받는 추상추상클래스
* <p><b>NOTE:</b> 비즈니스 서비스 구현시 디폴드로 Exception 발생을 위한 processException 메소드와
* leaveaTrace 메소드를 가지고 있다. processException / leaveaTrace 를
* 여러스타일의 파라미터를 취할 수 있도록 제공하고 있다.</b>
* @author 패키지 개발팀
* @since 2014.07.15
* @version 3.0
*
*/
public abstract class ExsoftAbstractServiceImpl {
protected Log log = LogFactory.getLog(this.getClass());
@Resource(name = "messageSource")
private MessageSource messageSource;
@Resource(name = "leaveaTrace")
private ExsoftLeaveTrace traceObj;
/**
* BizException 발생을 위한 메소드
* @param msgKey 메세지리소스에서 제공되는 메세지의 키값
* @return Exception EgovBizException 객체
*/
protected Exception processException(final String msgKey) {
return processException(msgKey, new String[] {});
}
/**
* BizException 발생을 위한 메소드
* @param msgKey 메세지리소스에서 제공되는 메세지의 키값
* @param e exception 발생한 Exception(내부적으로 취하고 있다가 에러핸들링시 사용)
* @return Exception EgovBizException 객체
*/
protected Exception processException(final String msgKey, Exception e) {
return processException(msgKey, new String[] {}, e);
}
/**
* BizException 발생을 위한 메소드
* @param msgKey 메세지리소스에서 제공되는 메세지의 키값
* @param msgArgs msgKey의 메세지에서 변수에 취환되는 값들
* @return Exception EgovBizException 객체
*/
protected Exception processException(final String msgKey, final String[] msgArgs) {
return processException(msgKey, msgArgs, null);
}
/**
* BizException 발생을 위한 메소드
* @param msgKey 메세지리소스에서 제공되는 메세지의 키값
* @param msgArgs msgKey의 메세지에서 변수에 취환되는 값들
* @param e exception 발생한 Exception(내부적으로 취하고 있다가 에러핸들링시 사용)
* @return Exception EgovBizException 객체
*/
protected Exception processException(final String msgKey, final String[] msgArgs, final Exception e) {
return processException(msgKey, msgArgs, e, LocaleContextHolder.getLocale());
}
/**
* BizException 발생을 위한 메소드
* @param msgKey 메세지리소스에서 제공되는 메세지의 키값
* @param msgArgs msgKey의 메세지에서 변수에 취환되는 값들
* @param e exception 발생한 Exception(내부적으로 취하고 있다가 에러핸들링시 사용)
* @param locale 명시적 국가/언어지정
* @return Exception EgovBizException 객체
*/
protected Exception processException(final String msgKey, final String[] msgArgs, final Exception e, Locale locale) {
return processException(msgKey, msgArgs, e, locale, null);
}
/**
* BizException 발생을 위한 메소드
* @param msgKey 메세지리소스에서 제공되는 메세지의 키값
* @param msgArgs msgKey의 메세지에서 변수에 취환되는 값들
* @param e exception 발생한 Exception(내부적으로 취하고 있다가 에러핸들링시 사용)
* @param locale 명시적 국가/언어지정
* @param exceptionCreator 외부에서 별도의 Exception 생성기 지정
* @return Exception EgovBizException 객체
*/
protected Exception processException(final String msgKey, final String[] msgArgs, final Exception e,
final Locale locale, ExceptionCreator exceptionCreator) {
ExceptionCreator eC = null;
if (exceptionCreator == null) {
eC = new ExceptionCreator() {
public Exception createBizException(MessageSource messageSource) {
return new BizException(messageSource, msgKey, msgArgs, locale, e);
}
};
}
return eC.createBizException(messageSource);
}
protected interface ExceptionCreator {
Exception createBizException(MessageSource messageSource);
}
/**
* Exception 발생없이 후처리로직 실행을 위한 메소드
* @param msgKey 메세지리소스에서 제공되는 메세지의 키값
*/
protected void leaveaTrace(String msgKey) {
leaveaTrace(msgKey, new String[] {});
}
/**
* Exception 발생없이 후처리로직 실행을 위한 메소드
* @param msgKey 메세지리소스에서 제공되는 메세지의 키값
* @param msgArgs msgKey의 메세지에서 변수에 취환되는 값들
*/
protected void leaveaTrace(String msgKey, String[] msgArgs) {
leaveaTrace(msgKey, msgArgs, null);
}
/**
* Exception 발생없이 후처리로직 실행을 위한 메소드
* @param msgKey 메세지리소스에서 제공되는 메세지의 키값
* @param msgArgs msgKey의 메세지에서 변수에 취환되는 값들
* @param locale 명시적 국가/언어지정
*/
protected void leaveaTrace(String msgKey, String[] msgArgs, Locale locale) {
traceObj.trace(this.getClass(), messageSource, msgKey, msgArgs, locale, log);
}
}
<file_sep>/EDMS3/WebContent/js/note/noteMain.js
/**
* 쪽지 관련 스크립트
*/
$(function(){
//탭 요소 클릭 시 폼 변경
$('.tab_element').bind("click", function(){
var idx = $(this).index();
var targetFrm = $(this).parent().parent().parent().find('div[class^="tab_form"]');
targetFrm.addClass('hide');
targetFrm.eq(idx).removeClass('hide');
$('.tab_element').removeClass('selected');
$(this).addClass('selected');
});
//쪽지검색 검색분류항목
$('#myNote_srch_type').ddslick({
width:93,
background:"rgba(255, 255, 255, 0)",
onSelected: function(selectedData){}
});
//쪽지 쓰기
/*
$('.myNote_compose').bind("click", function(){
//작업카트 - URL 메일 송부
$('.myNoteWrite').removeClass('hide');
//$('.myNoteWrite').prev().removeClass('hide');
//lyrPopupWindowResize($('.myNoteWrite'));
});
*/
$('.note_reply').bind("click", function(){
var tabIdx = $('[class^="tab_element"][class$="selected"]').index();
//답장을 보낼 대상자 정보를 받아서
//뿌려주고 show 시킴
//tabIdx == 2 : 보낸 쪽지함
//보낸쪽지함은 전달로, 나머지는 답장 창 호출
if(tabIdx == 2) {
$('.myNoteForward').removeClass('hide');
} else {
$('.myNoteReply').removeClass('hide');
}
});
//쪽지관리 - 창 닫기
/*
$('.myNote_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.myNote').addClass('hide');
$('.myNote_wrapper').addClass('hide');
});
*/
/*
//쪽지관리 - 창 닫기 : 음영진 부분 클릭 시 닫기
$('.myNote_wrapper').bind("click", function(){
$(this).addClass('hide');
$('.myNote').addClass('hide');
});
*/
//쪽지쓰기 - 창 닫기
/*
$('.myNoteWrite_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.myNoteWrite').addClass('hide');
$('.myNoteWrite_wrapper').addClass('hide');
});
*/
/*
//쪽지쓰기 - 창 닫기 : 음영진 부분 클릭 시 닫기
$('.myNoteWrite_wrapper').bind("click", function(){
$(this).addClass('hide');
$('.myNoteWrite').addClass('hide');
});
*/
//쪽지 사용자 선택 - 창 닫기
/*
$('.note_choose_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.note_choose').addClass('hide');
//$('.note_choose_wrapper').addClass('hide');
});
*/
/*
//쪽지 사용자 선택 - 창 닫기 : 음영진 부분 클릭 시 닫기
$('.note_choose_wrapper').bind("click", function(){
$(this).addClass('hide');
$('.note_choose').addClass('hide');
});
*/
//쪽지 전달 - 창 닫기
$('.myNoteForward_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.myNoteForward').addClass('hide');
// $('.note_choose_wrapper').addClass('hide');
});
//쪽지 답장 - 창 닫기
$('.myNoteReply_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.myNoteReply').addClass('hide');
// $('.note_choose_wrapper').addClass('hide');
});
//쪽지 관리 - 대화함 내용 지우기
$('.myNote_cnts').find('a.delete_myNote_chat').bind("click", function(e){
e.preventDefault();
jConfirm("전체 대화 내역을 삭제하시겠습니까?", "확인", 6, function(r){
});
});
//쪽지 관리 - 쪽지 지우기
$('.myNote_cnts').find('a.delete_myNote').bind("click", function(e){
e.preventDefault();
jConfirm("쪽지를 삭제하시겠습니까?", "확인", 6, function(r){
});
});
//쪽지 관리 - 쪽지 보관
$('.myNote_cnts').find('a.inbox_myNote').bind("click", function(e){
e.preventDefault();
jConfirm("선택한 쪽지를 보관함에 보관 하시겠습니까?", "확인", 6, function(r){
});
});
});
var jsonArr = [];
var exsoftNoteFunc = {
initAction : {
url : exsoft.contextRoot,
//선택탭 영역에 따른 액션 분기
noteSelectAction : function(index) {
$("#searchKeyword").val("");
if(index==0){
exsoftNoteFunc.event.talkboxInit("","");
}else if(index==1){
exsoftNoteFunc.event.reciveInit("","");
}else if(index==2){
exsoftNoteFunc.event.sendInit("","");
}else if(index==3){
exsoftNoteFunc.event.saveboxInit("","");
}
},
//선택된 탭의 액션 리프레쉬
noteRefresh : function() {
$("#searchKeyword").val("");
var select_index = ($('span[class*=selected]').index());
if(select_index==0){
exsoftNoteFunc.event.talkboxInit("","");
}else if(select_index==1){
exsoftNoteFunc.event.reciveInit("","");
}else if(select_index==2){
exsoftNoteFunc.event.sendInit("","");
}else if(select_index==3){
exsoftNoteFunc.event.saveboxInit("","");
}
},
},
//NOTE 관련 Action.
event : {
url : exsoft.contextRoot,
//받은 편지함 초기화
reciveInit : function(content,rsender_name) {
$('#noteRecive').empty();
$('#noteSend').empty();
$('#noteSave').empty();
$('#noteTalk').empty();
var buffer = "";
exsoft.util.ajax.ajaxDataFunctionWithCallback({note_name:"Receive",content:content,rsender_name:rsender_name}, url+"/note/noteReceiveSendList.do", "note", function(data, e) {
//exsoft.util.ajax.ajaxDataFunctionWithCallback({new_count:"ALL"}, url+"/note/noteTopNSelect.do", "note", function(data, e) {
$(data.list).each(function(index){
//serial =index;
//$("#newNoteCnt").html((data.list[0].newnote_cnt));
buffer += "<tr>";
buffer += "<td class='tooltip'><img src='"+url+"/img/icon/mynote_receive.png' alt='' title=''></td>";
buffer += "<input type='hidden' id='note_save' name='note_save' value='"+data.list[index].note_save+"'>";
buffer += "<input type='hidden' id='note_manageid' name='note_manageid' value='"+data.list[index].manage_id+"'>";
buffer += "<input type='hidden' id='note_id' name='note_id' value='"+data.list[index].note_id+"'>";
buffer += "<input type='hidden' id='root_id' name='root_id' value='"+data.list[index].root_id+"'>";
buffer += "<input type='hidden' id='creator_id' name='creator_id' value='"+data.list[index].creator_id+"'>";
buffer += "<input type='hidden' id='rsender_name' name='rsender_name' value='"+data.list[index].rsender_name+"'>";
buffer += "<td class='subject' colspan='3' onclick=\"javascript:exsoftNoteFunc.show_hide.showContent("+index+");\">";
buffer += "<span class='title' >["+data.list[index].rsender_name+"]</span>";
//보여지는 내용은 10 까지만
var content ="";
if(data.list[index].content.length > 10) {
content = data.list[index].content.substring(0,10) + "...";
}else {
content = data.list[index].content;
}
//읽지 않은 쪽지 Bold
if(data.list[index].note_read=='Y') {
buffer += "<span class='abbr_contents' onclick=\"javascript:exsoftNoteFunc.event.updateNoteRead("+index+");\">"+content+"</span>";
}else{
buffer += "<span class='abbr_contents' onclick=\"javascript:exsoftNoteFunc.event.updateNoteRead("+index+");\"><b>"+content+"</b></span>";
}
buffer += "</td>";
buffer += "<td class='noteDate'>"+data.list[index].create_date+"</td>";
buffer += "<td class='listMenu'>";
//buffer += "<img src='"+url+"/img/icon/note_compose.png' alt='' title='답장' onclick=\"javascript:exsoftNoteFunc.newOpen.noteCommonFrm('myNoteReply','noteReplyFrm','RE','"+data.list[index].rsender_name+"');\" >";
buffer += "<img src='"+url+"/img/icon/note_compose.png' alt='' title='답장' onclick=\"javascript:exsoftNoteFunc.newOpen.noteCommonFrm('myNoteReply','noteReplyFrm','RE','"+index+"');\" >";
buffer += " <img src='"+url+"/img/icon/note_inbox.png' alt='' title='보관함이동' onclick=\"javascript:exsoftNoteFunc.event.updateNoteSave('"+index+"');\" >";
buffer += " <img src='"+url+"/img/icon/note_bin.png' alt='' title='삭제' onclick=\"javascript:exsoftNoteFunc.event.deleteSelectNote('"+index+"','Receive');\" >";
buffer += "</td>";
buffer += "</tr>";
buffer += "<tr id='trhide' class='preview hide '>";
buffer += "<td class='tooltip'></td>";
buffer += "<td class='subject' colspan='5'>";
buffer += "<span class='abbr_contents' onclick=\"javascript:exsoftNoteFunc.newOpen.noteCommonFrm('myNoteWrite','noteWriteFrm');\">"+data.list[index].content+"</span>";
buffer += "</td>";
buffer += "</tr>";
});
$('#noteRecive').empty().append(buffer);
// Paging Navigation
/*$("#recivePageing").empty();
$("#recivePageing").append(data.pagingInfo.strLinkPagePrev)
.append(data.pagingInfo.strLinkPageList)
.append(data.pagingInfo.strLinkPageNext);*/
//paging
var options ={
pageSize: 20,
naviSize: 10,
currentPage: 1,
holder: "#recivePageing",
select: $("#noteRecive")
};
exsoftNoteFunc.event.quickPager(options);
});
}, //받은 편지함 초기화 END
quickPager : function(options) {
$(options.holder).empty();
var defaults = {
pageSize: 6,
naviSize: 2,
currentPage: 1,
holder: "",
select:""
};
var options = $.extend(defaults, options);
//leave this
var selector = options.select; //$("#noteRecive");
var totalRecords = selector.children().length;//$(this).children().length;
//var totalRecords =(selector.children().length)/2;// selector.children().length;;//$(this).children().length;
var pageCounter = 1;
selector.children().each(function(i){
if(i < pageCounter*options.pageSize && i >= (pageCounter-1)*options.pageSize) {
$(this).addClass("page"+pageCounter);
}
else {
$(this).addClass("page"+(pageCounter+1));
pageCounter ++;
}
});
//show/hide the appropriate regions
selector.children().hide();
$(".page"+options.currentPage).show();
$(".preview").hide(); //내용보기 라인 숨기기
$(".chat_view").hide();
//first check if there is more than one page. If so, build nav
if(pageCounter > 1) {
//Build pager navigation
var pageNav = "<ul class='pg_navi'>";
for (i=1;i<=pageCounter;i++){
if (i==options.currentPage) {
pageNav += "<li class=\"currentPage curr pageNav"+i+"\"'><a rel='"+i+"' href='#'>"+i+"</a></li>";
} else {
pageNav += "<li class='pageNav"+i+"'><a rel='"+i+"' href='#'>"+i+"</a></li>";
}
// 1 2 ... 4 5 next prev 6 ... 10 next prev 11.... 20 next prev ... next ...
if (0 == (i % options.naviSize)) {
pageNav += "<li class='next-"+(i+1)+"'><a rel='"+(i+1)+"' href='#'>next</a></li>";
pageNav += "<li class='prev-"+i+"'><a rel='"+i+"' href='#'>prev</a></li>";
}
}
pageNav += "</ul>";
if(options.holder == "") {
selector.after(pageNav);
}
else {
$(options.holder).append(pageNav);
}
var start = 1;
var end = options.naviSize;
// all hide and show start to end page with navigation.
//$('.pageNav').children().hide();
$(".pg_navi").find('a').hide();
for (i=start; i<=end; i++) {
if (i == end) {
$('.prev-'+i).find('a').hide();
$('.next-'+(i+1)).find('a').show();
} else if (i == start) {
$('.next-'+(i+1)).find('a').hide();
$('.prev-'+i).find('a').show();
}
$('.pageNav'+i).find('a').show();
//$('.pageNav'+i).show();
}
$('.curr').find('a').show();
//pager navigation behaviour
//$('.myNote_cnts').find('a.inbox_myNote').bind("click", function(e){
//$(".pageNav a").bind("click", function() {
$(".pg_navi").find('a').bind("click", function() {
//pagemove: function(options) {
//grab the REL attribute
var clickedLink = $(this).attr("rel");
/*if(clickedLink <= (totalRecords/naviSize)){
}*/
if($(this).text()=='prev') {
end = Math.ceil(clickedLink / options.naviSize) * options.naviSize;
start = end - options.naviSize+1;
//$('.pageNav').children().hide();
$(".pg_navi").find('a').hide();
for (var i=start; i<=end; i++) {
if (i == end) {
$('.prev-'+i).find('a').hide();
$('.next-'+(i+1)).find('a').show();
} else if (i == start) {
$('.next-'+(i+1)).find('a').hide();
$('.prev-'+(i-1)).find('a').show();
}
$('.pageNav'+i).find('a').show();
}
} else if ($(this).text() == 'next') {
start = Math.floor(clickedLink / options.naviSize) * options.naviSize + 1;
end = start + (options.naviSize-1);
//$('.pageNav').children().hide();
$(".pg_navi").find('a').hide();
for (i=start; i<=end; i++) {
if (i == end) {
$('.prev-'+i).find('a').hide();
//if( (totalRecords/naviSize) != end){
$('.next-'+(i+1)).find('a').show();
//}
} else if (i == start) {
$('.next-'+(i+1)).find('a').hide();
$('.prev-'+(i-1)).find('a').show();
}
$('.pageNav'+i).find('a').show();
}
}
options.currentPage = clickedLink;
//remove current current (!) page
//$("li.currentPage").removeClass("currentPage");
$("li.curr").removeClass("curr");
//Add current page highlighting
//$("ul.pg_navi").find("a[rel='"+clickedLink+"']").parent("li").addClass("currentPage");
$("ul.pg_navi").find("a[rel='"+clickedLink+"']").parent("li").addClass("curr");
//hide and show relevant links
selector.children().hide();
selector.find(".page"+clickedLink).show();
$(".preview").hide();//내용보기 라인 숨기기
$(".chat_view").hide();
return false;
});
}
},//paging END
//보낸편지함 초기화
sendInit : function(content,note_from) {
$('#noteRecive').empty();
$('#noteSend').empty();
$('#noteSave').empty();
$('#noteTalk').empty();
var buffer = "";
exsoft.util.ajax.ajaxDataFunctionWithCallback({note_name:"Send",content:content,note_from:note_from}, url+"/note/noteReceiveSendList.do", "note", function(data, e) {
$(data.list).each(function(index){
buffer += "<tr>";
buffer += "<td class='tooltip'><img src='"+url+"/img/icon/mynote_send.png' alt='' title=''></td>";
buffer += "<input type='hidden' id='note_save' name='note_save' value='"+data.list[index].note_save+"'>";
buffer += "<input type='hidden' id='note_manageid' name='note_manageid' value='"+data.list[index].manage_id+"'>";
buffer += "<input type='hidden' id='note_id' name='note_id' value='"+data.list[index].note_id+"'>";
buffer += "<input type='hidden' id='root_id' name='root_id' value='"+data.list[index].root_id+"'>";
buffer += "<input type='hidden' id='content' name='content' value='"+data.list[index].content+"'>";
buffer += "<td class='subject' onclick=\"javascript:exsoftNoteFunc.show_hide.showContent("+index+");\">";
buffer += "<span class='title' >[ "+(data.list[index].note_from).replace(/;/gi," ")+"]</span>";
var content ="";
if(data.list[index].content.length > 10) {
content = data.list[index].content.substring(0,10) + "...";
}else {
content = data.list[index].content;
}
buffer += "<span class='abbr_contents'>"+content+"</span>";
buffer += "</td>";
buffer += "<td class='noteDate'>"+data.list[index].create_date+"</td>";
buffer += "<td class='listMenu'>";
buffer += "<img src='"+url+"/img/icon/note_compose.png' alt='' title='전달' onclick=\"javascript:exsoftNoteFunc.newOpen.noteCommonFrm('myNoteForward','noteForwardFrm','Trans','"+index+"');\" >";
buffer += " <img src='"+url+"/img/icon/note_inbox.png' alt='' title='보관함이동' onclick=\"javascript:exsoftNoteFunc.event.updateNoteSave('"+index+"');\" >";
buffer += " <img src='"+url+"/img/icon/note_bin.png' alt='' title='삭제' onclick=\"javascript:exsoftNoteFunc.event.deleteSelectNote('"+index+"','Send');\" >";
buffer += "</td>";
buffer += "</tr>";
buffer += "<tr class='preview hide'>";
buffer += "<td class='tooltip'></td>";
buffer += "<td class='subject' colspan='3'>";
buffer += "<span class='abbr_contents' >"+data.list[index].content+"</span>";
buffer += "</td>";
buffer += "</tr>";
});
$('#noteSend').empty().append(buffer);
//paging
var options ={
pageSize: 20,
naviSize: 10,
currentPage: 1,
holder: "#sendPageing",
select: $("#noteSend")
};
exsoftNoteFunc.event.quickPager(options);
});
},//보낸편지함 초기화 END
//쪽지 보관함 초기화
saveboxInit : function(content,rsender_name) {
$('#noteRecive').empty();
$('#noteSend').empty();
$('#noteSave').empty();
$('#noteTalk').empty();
var buffer = "";
exsoft.util.ajax.ajaxDataFunctionWithCallback({note_name:"Save",content:content,rsender_name:rsender_name}, url+"/note/noteReceiveSendList.do", "note", function(data, e) {
/*if($(data.list).size()==0){
buffer += "검색결과가 존재하지 않습니다.";
}*/
$(data.list).each(function(index){
var note_type= data.list[index].note_type;
buffer += "<tr>";
buffer += "<td class='tooltip'>";
if(note_type=="S"){
buffer += "<img src='"+url+"/img/icon/mynote_send.png' alt='' title=''></td>";
}else{
buffer += "<img src='"+url+"/img/icon/mynote_receive.png' alt='' title=''>";
}
buffer += "</td>";
buffer += "<input type='hidden' id='note_save' name='note_save' value='"+data.list[index].note_save+"'>";
buffer += "<input type='hidden' id='note_manageid' name='note_manageid' value='"+data.list[index].manage_id+"'>";
buffer += "<input type='hidden' id='note_id' name='note_id' value='"+data.list[index].note_id+"'>";
buffer += "<input type='hidden' id='root_id' name='root_id' value='"+data.list[index].root_id+"'>";
buffer += "<td class='subject' onclick=\"javascript:exsoftNoteFunc.show_hide.showContent("+index+");\">";
buffer += "<span class='title' >["+data.list[index].rsender_name+"]</span>";
var content ="";
if(data.list[index].content.length > 10) {
content = data.list[index].content.substring(0,10) + "...";
}else {
content = data.list[index].content;
}
buffer += "<span class='abbr_contents'>"+content+"</span>";
buffer += "</td>";
buffer += "<td class='noteDate'>"+data.list[index].create_date+"</td>";
buffer += "<td class='listMenu'>";
buffer += "<img src='"+url+"/img/icon/note_compose.png' alt='' title='답장' onclick=\"javascript:exsoftNoteFunc.newOpen.noteCommonFrm('myNoteWrite','noteWriteFrm','RE','"+data.list[index].rsender_name+"');\" >";
buffer += " <img src='"+url+"/img/icon/note_bin.png' alt='' title='삭제' onclick=\"javascript:exsoftNoteFunc.event.deleteSelectNote('"+index+"','Save');\" >";
buffer += "</td>";
buffer += "</tr>";
buffer += "<tr class='preview hide'>";
buffer += "<td class='tooltip' ></td>";
buffer += "<td class='subject' colspan='3' >";
buffer += "<span class='abbr_contents' >"+data.list[index].content+"</span>";
buffer += "</td>";
buffer += "</tr>";
});
$('#noteSave').empty().append(buffer);
//paging
var options ={
pageSize: 20,
naviSize: 10,
currentPage: 1,
holder: "#savePageing",
select: $("#noteSave")
};
exsoftNoteFunc.event.quickPager(options);
});
},//쪽지 보관함 초기화 END
//대화함 초기화
talkboxInit : function(content,rsender_name) {
$('#noteRecive').empty();
$('#noteSend').empty();
$('#noteSave').empty();
$('#noteTalk').empty();
var buffer = "";
exsoft.util.ajax.ajaxDataFunctionWithCallback({note_name:"talk",content:content,rsender_name:rsender_name}, url+"/note/noteSelectTalk.do", "note", function(data, e) {
var note_id ="";
$(data.list).each(function(index){
note_id = data.list[index].note_id;
if(data.list[index].note_id == data.list[index].root_id){
buffer += "<tr>";
buffer += "<td class='tooltip'><img src='"+url+"/img/icon/mynote_tooltip.png' alt='' title=''></td>";
buffer += "<td class='subject' onclick=\"javascript:exsoftNoteFunc.show_hide.showTalkContent('show_cnt"+data.list[index].root_id+"');\">";
//buffer += "<td class='subject' onclick=\"javascript:exsoftNoteFunc.show_hide.showTalkContent('"+index+"');\">";
buffer += "<span class='title' >["+(data.list[index].note_from).replace(/;/gi," ")+"]</span>";
var content ="";
if(data.list[index].content.length > 10) {
content = data.list[index].content.substring(0,10) + "...";
}else {
content = data.list[index].content;
}
buffer += "<span class='abbr_contents'>"+content+"</span>";
buffer += "</td>";
buffer += "<td class='noteDate'>"+data.list[index].create_date+"</td>";
buffer += "<td class='listMenu'>";
buffer += "<a href=\"#\" class=\"note_reply\">";
buffer += "<img src='"+url+"/img/icon/note_compose.png' alt='' title='답장' onclick=\"javascript:exsoftNoteFunc.newOpen.noteCommonFrm('myNoteWrite','noteWriteFrm','RE','"+data.list[index].rsender_name+"');\" >";
buffer += "</a><a href=\"#\" class=\"delete_myNote_chat\">";
buffer += " <img src='"+url+"/img/icon/note_bin.png' alt='' title='삭제' onclick=\"javascript:exsoftNoteFunc.event.deleteSelectNote('"+index+"','TalkAll');\" >";
buffer += "</a>";
buffer += "</td>";
buffer += "</tr>";
}
if(note_id == data.list[index].root_id){
buffer += "<tr class='chat_view hide' id='show_cnt"+data.list[index].root_id+"'>";
buffer += "<td class='subject' colspan='4'>";
}
$(data.list).each(function(index2){
if(note_id == data.list[index2].root_id){
//rowcount++;
//var notekbn = data.list[index2].note_kbn;
//var note_type= data.list[index2].note_type;
buffer += "<input type='hidden' id='note_save' name='note_save' value='"+data.list[index2].note_save+"'>";
buffer += "<input type='hidden' id='note_manageid' name='note_manageid' value='"+data.list[index2].manage_id+"'>";
buffer += "<input type='hidden' id='note_id' name='note_id' value='"+data.list[index2].note_id+"'>";
buffer += "<input type='hidden' id='root_id' name='root_id' value='"+data.list[index2].root_id+"'>";
//수신자 발신자 좌우 구분
if(exsoft.user.user_id == data.list[index2].creator_id){
buffer += "<div class='me'>";
}else{
buffer += "<div class='opposite'>";
}
buffer += "<div>";
buffer += "<span class='chat_user'>"+data.list[index2].rsender_name+"</span>";
buffer += "<span>"+data.list[index2].create_date+"</span>";
buffer += " <img src='"+url+"/img/icon/note_bin.png' alt='' title='삭제' onclick=\"javascript:exsoftNoteFunc.event.deleteSelectNote('"+index2+"','Talk');\" >";
//buffer += "<a href=\"#\"><img src='"+url+"/img/icon/note_bin1.png' alt='' title='휴지통' onclick=\"javascript:exsoftNoteFunc.newOpen.noteCommonFrm();\" ></a>";
buffer += "</div>";
buffer += "<div class='chat_box'>"+data.list[index2].content;
buffer += "<div class='tooltip_tail'></div>";
buffer += "</div>";
buffer += "</div>";
}
});//data index2 END
if(note_id == data.list[index].root_id){
buffer += "</tr></td>";
}
});//data index END
$('#noteTalk').empty().append(buffer);
//paging
var options ={
pageSize: 20,
naviSize: 10,
currentPage: 1,
holder: "#talkPageing",
select: $("#noteTalk")
};
exsoftNoteFunc.event.quickPager(options);
});
},//대화함 초기화 END
//쪽지 보관함 update
updateNoteSave : function(rowindex) {
var mId ="";
var nsave ="";
var select_index = ($('span[class*=selected]').index());
if(select_index==0){
mId = $("#TalkTAB [id='note_manageid']").eq(rowindex).val();
nsave = $("#TalkTAB [id='note_save']").eq(rowindex).val();
}else if(select_index==1){
mId = $("#ReciveTAB [id='note_manageid']").eq(rowindex).val();
nsave = $("#ReciveTAB [id='note_save']").eq(rowindex).val();
}else if(select_index==2){
mId = $("#SendTAB [id='note_manageid']").eq(rowindex).val();
nsave = $("#SendTAB [id='note_save']").eq(rowindex).val();
}else if(select_index==3){
mId = $("#SaveTAB [id='note_manageid']").eq(rowindex).val();
nsave = $("#SaveTAB [id='note_save']").eq(rowindex).val();
}
//var mId = $("[name=recevenote_manageid]").eq(rowindex).val();
//alert(mId);
//var nsave = $("[name=note_save]").eq(rowindex).val();
if(nsave=='Y'){
jAlert('이미 보관함에 보관중인 쪽지 입니다.', "확인", 0);
return false;
}
exsoft.util.ajax.ajaxDataFunctionWithCallback({manage_id:mId}, url+"/note/noteSaveUpdate.do", "note",
function(data,e) {
if(data.result == "true") {
jAlert('쪽지를 보관함에 저장했습니다.', "확인", 0);
exsoftNoteFunc.initAction.noteRefresh();
}else {
jAlert(data.message, "확인", 0);
}
});
},// 쪽지 보관함 업데이트 END
//쪽지 읽음 update
updateNoteRead : function(rowindex) {
var mId ="";
mId = $("#ReciveTAB [id='note_manageid']").eq(rowindex).val();
exsoft.util.ajax.ajaxDataFunctionWithCallback({manage_id:mId}, url+"/note/noteReadUpdate.do", "note",
function(data,e) {
if(data.result == "true") {
//exsoftNoteFunc.initAction.noteRefresh();
}else {
jAlert(data.message, "확인", 0);
}
});
},// 쪽지 읽음 업데이트 END
//쪽지 삭제
deleteSelectNote : function(rowindex,kbn) {
var mId ="";
var nId ="";
var rId = "";
var nsave = "";
var del_kbn = "";
var select_index = ($('span[class*=selected]').index());
if(select_index==0){
mId = $("#TalkTAB [id='note_manageid']").eq(rowindex).val();
nId = $("#TalkTAB [id='note_id']").eq(rowindex).val();
rId = $("#TalkTAB [id='root_id']").eq(rowindex).val();
nsave = $("#TalkTAB [id='note_save']").eq(rowindex).val();
}else if(select_index==1){
mId = $("#ReciveTAB [id='note_manageid']").eq(rowindex).val();
nId = $("#ReciveTAB [id='note_id']").eq(rowindex).val();
rId = $("#ReciveTAB [id='root_id']").eq(rowindex).val();
nsave = $("#ReciveTAB [id='note_save']").eq(rowindex).val();
}else if(select_index==2){
mId = $("#SendTAB [id='note_manageid']").eq(rowindex).val();
nId = $("#SendTAB [id='note_id']").eq(rowindex).val();
rId = $("#SendTAB [id='root_id']").eq(rowindex).val();
nsave = $("#SendTAB [id='note_save']").eq(rowindex).val();
}else if(select_index==3){
mId = $("#SaveTAB [id='note_manageid']").eq(rowindex).val();
nId = $("#SaveTAB [id='note_id']").eq(rowindex).val();
rId = $("#SaveTAB [id='root_id']").eq(rowindex).val();
nsave = $("#SaveTAB [id='note_save']").eq(rowindex).val();
}
//alert("nsave =" + ":"+nsave+":");
//alert("kbn =" + ":"+kbn+":" );
//보관함에 보관된 쪽지는 삭제 불가
if((kbn != "Save") && (nsave == "Y")){
jAlert("보관함에 보관된 쪽지는 삭제할 수 없습니다.", "확인", 0);
return false;
}
var show_massage = "";
if(kbn == "TalkAll" ){
del_kbn="ALL";
show_massage= "전체 대화 내역을 삭제하시겠습니까?";
}else{
show_massage= "쪽지를 삭제하시겠습니까?";
}
jConfirm(show_massage, "확인", 6, function(r){
if(r){
exsoft.util.ajax.ajaxDataFunctionWithCallback({manage_id:mId,note_id:nId,root_id:rId,del_kbn:del_kbn}, url+"/note/noteDelete.do", "note",
function(data,e) {
if(data.result == "true") {
jAlert('쪽지를 삭제했습니다.', "확인", 0);
//화면 갱신 처리
exsoftNoteFunc.initAction.noteRefresh();
}else {
jAlert(data.message, "확인", 0);
}
});
}
});
},//쪽지 삭제 END
//쪽지 insert
insertNoteWrite : function(divId,formId) {
//var objForm = document.noteWriteFrm;
//$("#"+formId).find("#noteReciver").text(0); //글자 수 초기화
// validation check
//if (objForm.noteReciver.value.length == 0) {
if ($("#"+formId).find("#noteReciver").val().length == 0) {
jAlert("받는 사람을 입력(선택)하세요.", "확인", 0);
return false;
}
//if (objForm.noteContent.value.length == 0 || objForm.noteContent.value.length == 0) {
if ($("#"+formId).find("#noteContent").val().length == 0 || $("#"+formId).find("#noteContent").val() == null) {
jAlert("쪽지 내용을 입력 하세요.", "확인", 0);
return false;
}
var reciver = $("#"+formId).find("#noteReciver").val();
var content = $("#"+formId).find("#noteContent").val();
var jsonArr = [];
var jsonArrIndex = 0;
//var result = new Object();
var inputData = $("#"+formId).find("#reciverArrayList").val();
//var jsonObject = { "reciveList":JSON.stringify(result)};
var selectList = inputData.split("|");
for(var i=0;i<selectList.length;i++) {
result = selectList[i].split("#");
if(result.length == 3) {
var rowData = {accessor_id : "", accessor_isgroup : ""};
rowData['accessor_id'] = result[1];
rowData['accessor_isgroup'] = result[0] == 'GROUP' ? 'T' : 'F';
//rowData.accessor_isalias = result[0] == 'ALL' ? 'T' : 'F';
if(rowData.accessor_id){
jsonArr[jsonArrIndex] = rowData;
jsonArrIndex ++;
}
}
}
var jsonObject = { "reciveList":JSON.stringify(jsonArr),"content":content,"note_from":reciver};
exsoft.util.ajax.ajaxDataFunctionWithCallback(jsonObject, url+"/note/noteSendControl.do", 'note',
function(data, e){
if(data.result == 'true'){
jAlert('쪽지를 발송을 완료했습니다', "확인", 0);
$("#"+formId).find("#noteReciver").val("");
exsoft.util.layout.popDivLayerClose(divId);
//base.gridRefresh('authWasteGridList','${contextRoot}/mypage/authDocumentList.do');
} else {
jAlert(data.message, "확인", 0);
}
}
);
//alert(result.accessor_id);
/*exsoft.util.ajax.ajaxDataFunctionWithCallback({reciveList:jsonObject}, url+"/note/noteSendControl.do", "note",
//base.ajaxFunctionWithCallback("noteReciver",url+"/note/noteTopNSelect.do", "note",
function(data,e) {
if(data.result == "true") {
jAlert('쪽지를 발송을 완료했습니다', "확인", 0);
$(".myNoteWrite").addClass('hide');
}else {
jAlert(data.message, "확인", 0);
}
});*/
},//쪽지 insert END
searchKeyword : function(){
var index = exsoft.util.layout.getSelectBox('myNote_srch_type','option');
//var index = $("#myNote_srch_type[value='2']").text();//$('#myNote_srch_type:selected').text();//$("#myNote_srch_type option").index($("#myNote_srch_type option:selected"));
var content ="";
var rsender_name="";
if(index==0){
//내용세팅
content = $("#searchKeyword").val();
}else{
rsender_name= $("#searchKeyword").val();
}
var select_index = ($('span[class*=selected]').index());
if(select_index==0){
exsoftNoteFunc.event.talkboxInit(content,rsender_name);
}else if(select_index==1){
exsoftNoteFunc.event.reciveInit(content,rsender_name);
}else if(select_index==2){
exsoftNoteFunc.event.sendInit(content,rsender_name);
}else if(select_index==3){
exsoftNoteFunc.event.saveboxInit(content,rsender_name);
}
},
//쪽지 Reply
insertNoteReWrite : function() {
var objForm = document.noteReplyFrm;
// validation check
if (objForm.noteReReciver.value.length == 0) {
jAlert("받는 사람을 입력(선택)하세요.", "확인", 0);
return false;
}
if (objForm.noteReContent.value.length == 0 || objForm.noteReContent.value.length == 0) {
jAlert("쪽지 내용을 입력 하세요.", "확인", 0);
return false;
}
var root_id = $("#noteReplyroot_id").val();
var creator_id = $("#noteReplycreator_id").val();
var note_from_userid= $('#noteReplycreator_id').val();
var reciver = objForm.noteReReciver.value;
var content = objForm.noteReContent.value ;
var paramObject = { "root_id":root_id,"creator_id":creator_id,"note_from_userid":note_from_userid,"content":content,"note_from":reciver};
exsoft.util.ajax.ajaxDataFunctionWithCallback(paramObject, url+"/note/noteReSendControl.do", 'note',
function(data, e){
if(data.result == 'true'){
jAlert('쪽지를 발송을 완료했습니다', "확인", 0);
exsoft.util.layout.popDivLayerClose('myNoteReply');
//base.gridRefresh('authWasteGridList','${contextRoot}/mypage/authDocumentList.do');
} else {
jAlert(data.message, "확인", 0);
}
}
);
},//쪽지 Reply END
/*userSearch : function(divId) {
var groupnm="";
var usernm="";
//var usernm = $("#noteReciver").val();
//alert(usernm);
var user_id = "";
var user_name = "";
var jsonArrIndex = 0;
exsoft.util.ajax.ajaxDataFunctionWithCallback({groupName:groupnm,userName:usernm}, url+"/user/searchUserList.do", "note", function(data, e) {
//alert(data.list[0].user_id+":"+data.list[0].user_name_ko);
$(data.list).each(function(index){
//result[index] = {data.list[index].user_name_ko+";"+data.list[index].user_id;
user_id=data.list[index].user_id;
user_name=data.list[index].user_name_ko;
var rowData = {id : "", name : ""};
rowData['id'] = data.list[index].user_id;
rowData['name'] = data.list[index].user_name_ko;
//if(rowData.name){
jsonArr[jsonArrIndex] = rowData;
jsonArrIndex ++;
//}
});
});
$("#noteReciver").tokenInput(jsonArr,{theme: "facebook"});
},*/
},//event END
show_hide : {
// 쪽지 내용보기 show/hide
showTalkContent : function(rowindex) {
$(exsoft.util.common.getIdFormat(rowindex)).toggle();
//$('.chat_view:eq('+ rowindex +')').toggle();
//$("#show_cnt").show(); // id 값이 sel인 태그를 불러와서 그 값을 추출하는 문
//$('.chat_view:eq('+ rowindex +')').toggle();
//alert(base.getIdFormat(rowindex + ' : stepan'));
//$(exsoft.util.common.getIdFormat(rowindex)).addClass("show");
//$(base.getIdFormat(rowindex)).toggle();
//$(rowindx).removeclass("hide");
//$('.chat_view').toggle();
//$('.chat_view:eq('+ +')').toggle();
},
// 쪽지 내용보기 show/hide
showContent : function(rowindex) {
$('.preview:eq('+ rowindex +')').toggle();
},
},
newOpen : {
// 쪽지보내기 사용자선택
userSelectUrl : "/note/noteUserSelect.do",
userSelectTarget : "noteUserFrm",
//contextRoot : "",
// 사용자 환경설정 새창 CALL
userSelect : function() {
this.contextRoot = exsoft.contextRoot;
this.openWindow(this.userSelectTarget,920,630);
this.formInit(document.noteUserFrm,this.contextRoot+this.userSelectUrl,this.userSelectTarget);
},
// 새창 띄우기
openWindow : function(targetName,width,height) {
var win= "";
win = window.open("",targetName,"width="+width+", height="+height+", toolbar=no, menubar=no, scrollbars=no, resizable=no" );
win.focus(); // 새창의 경우 항상 맨위로
},
formInit : function(formName,url,targetName) {
var frm = formName;
frm.action = url;
frm.method = "post";
frm.target = targetName;
frm.submit();
},
// 쪽지쓰기(전달/답장)
noteCommonFrm : function(divClass,formId,kbn,index) {
$("#"+formId).find("span.current_count").text(0); //글자 수 초기화
$("."+divClass).removeClass('hide');
exsoft.util.common.formClear("#"+formId);
//답신일 경우 값 셋팅
if(kbn=='RE'){
$("#noteReplyroot_id").val($("#ReciveTAB [id='root_id']").eq(index).val());
$("#noteReplycreator_id").val($("#ReciveTAB [id='creator_id']").eq(index).val());
$('#noteReReciver').val($("#ReciveTAB [id='rsender_name']").eq(index).val());
}
//전달일 경우
if(kbn=='Trans'){
$("#"+formId).find("textarea").val($("#SendTAB [id='content']").eq(index).val());//forward 시 본문 내용셋팅
// exsoftNoteFunc.ui.contentLength('noteForwardFrm'); //글자수 셋팅
var $textarea = $('#noteForwardFrm textarea');
$textarea.next().children('span').text($textarea.val().length);
}
},
},
ui : {
// //쪽지 내용 길이 체크
// contentLength : function(formId){
// var tmpStr;
// tmpStr = $("#"+formId).find("textarea").val();//$('#noteContent').val();
// this.cal_byte(tmpStr,formId);
//
// },
// //글자 수 셋팅
// cal_byte : function (aquery,formId)
// {
//
// var tmpStr;
// var temp=0;
// var onechar;
// var tcount;
// tcount = 0;
//
// tmpStr = new String(aquery);
// temp = tmpStr.length;
// for (var k=0;k < temp; k++)
// {
// onechar = tmpStr.charAt(k);
// if (escape(onechar) =='%0D') { } else if (escape(onechar).length > 4) { tcount += 2; } else { tcount++; }
// }
// //카운트 된 수 표시
// //document.getElementById("current_textcnt").innerHTML = tcount;
// $("#"+formId).find("span.current_count").text(tcount);
// //$("#"+formId).find("span.current_count").val(tcount);
// if(tcount>1000) {
// reserve = tcount-1000;
// jAlert("메시지 내용은 1000바이트 이상은 전송하실 수 없습니다.\r\n 쓰신 메세지는 "+reserve+"바이트가 초과되었습니다.\r\n 초과된 부분은 자동으로 삭제됩니다.", "확인", 0);
// //this.nets_check($('#noteContent').val());
// this.nets_check($("#"+formId).find("textarea").val(),formId);
// return;
// }
//
// },
//
// nets_check : function(aquery,formId)
// {
//
// var tmpStr;
// var temp=0;
// var onechar;
// var tcount;
// tcount = 0;
//
// tmpStr = new String(aquery);
// temp = tmpStr.length;
//
// for(var k=0;k<temp;k++)
// {
// onechar = tmpStr.charAt(k);
//
// if(escape(onechar).length > 4) {
// tcount += 2;
// } else {
// // 엔터값이 들어왔을때 값(\r\n)이 두번실행되는데 첫번째 값(\n)이 들어왔을때 tcount를 증가시키지 않는다.
// if(escape(onechar)=='%0A') {
// } else {
// tcount++;
// }
// }
//
// if(tcount>1000) {
// tmpStr = tmpStr.substring(0,k);
// break;
// }
//
// }
// //$("[name=vc_message]").val(tmpStr);
// $("#"+formId).find("textarea").val(tmpStr);
// cal_byte(tmpStr,formId);
//
// },
}//ui
};<file_sep>/EDMS3/src/kr/co/exsoft/document/service/PageServiceImpl.java
package kr.co.exsoft.document.service;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import kr.co.exsoft.document.dao.PageDao;
import kr.co.exsoft.document.vo.PageVO;
import kr.co.exsoft.eframework.library.ExsoftAbstractServiceImpl;
import kr.co.exsoft.eframework.util.CommonUtil;
import kr.co.exsoft.eframework.util.PagingAjaxUtil;
import org.apache.ibatis.session.SqlSession;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Service;
/**
* Page 서비스 구현 부분
*
* @author 패키지팀
* @since 2014. 9. 16.
* @version 1.0
*
*/
@Service("pageService")
public class PageServiceImpl extends ExsoftAbstractServiceImpl implements PageService{
@Autowired
@Qualifier("sqlSession")
private SqlSession sqlSession;
@Override
public Map<String, Object> duplicatePageList(HashMap<String,Object> map) throws Exception {
Map<String, Object> resultMap = new HashMap<String, Object>();
List<PageVO> ret = new ArrayList<PageVO>();
int total = 0;
PageDao pageDao = sqlSession.getMapper(PageDao.class);
total = pageDao.duplicatePageCount(map);
ret = pageDao.duplicatePageList(map);
resultMap.put("page",map.get("nPage").toString());
resultMap.put("records",total);
resultMap.put("total",CommonUtil.getTotPageSize(total,Integer.parseInt(map.get("page_size").toString())));
resultMap.put("list",ret);
// Ajax Paging
String strLink = "javascript:exsoftAdminDupFunc.event.gridMainPage";
String contextRoot = map.get("contextRoot") != null ? map.get("contextRoot").toString() : "";
PagingAjaxUtil pagingInfo = new PagingAjaxUtil(Integer.parseInt(map.get("nPage").toString()),total,Integer.parseInt(map.get("page_size").toString()),10,strLink,contextRoot);
resultMap.put("pagingInfo",pagingInfo);
return resultMap;
}
@Override
public Map<String, Object> comDocPageList(HashMap<String, Object> map) throws Exception {
PageDao pageDao = sqlSession.getMapper(PageDao.class);
Map<String, Object> resultMap = new HashMap<String, Object>();
HashMap<String, Object> param = new HashMap<String, Object>();
param.put("doc_id", map.get("doc_id") != null ? map.get("doc_id") : "" );
List<PageVO> pageList = pageDao.comDocPageList(map);
resultMap.put("pageList",pageList);
return resultMap;
}
@Override
public Map<String, Object> docPageListForURLMail(HashMap<String, Object> map) throws Exception {
PageDao pageDao = sqlSession.getMapper(PageDao.class);
Map<String, Object> resultMap = new HashMap<String, Object>();
HashMap<String, Object> param = new HashMap<String, Object>();
String docList = map.get("doc_id_list") != null ? map.get("doc_id_list").toString() : "";
param.put("doc_id_list", docList);
List<PageVO> pageList = pageDao.docPageListForURLMail(map);
resultMap.put("pageList",pageList);
return resultMap;
}
}
<file_sep>/EDMS3/WebContent/js/popup/selectMultiUserWindow.js
var selectMultiUserWindow = {
callbackFunction : null, // 확인
treeObject : null, // 부서 트리 객체
pageSize : 0,
// 0. 초기화
init : function(pageSize, callback) {
//그룹관리 > 구성원추가 - 창 닫기
$('.grpDeptUser_add_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.grpDeptUser_add').addClass('hide');
$('.grpDeptUser_add_wrapper').addClass('hide');
});
//그룹관리 > 구성원추가 창 닫기 : 음영진 부분 클릭 시 닫기
$('.grpDeptUser_add_wrapper').bind("click", function(){
$(this).addClass('hide');
$('.grpDeptUser_add').addClass('hide');
});
// 콜백 함수 저장
selectMultiUserWindow.callbackFunction = callback;
selectMultiUserWindow.open();
// 1. 트리 초기화
if (selectMultiUserWindow.treeObject == undefined) {
treeOption = {
divId : "#pop_groupTree",
context : exsoft.contextRoot,
url : "/group/groupList.do",
treeType : "admin"
};
selectMultiUserWindow.treeObject = new XFTree(treeOption);
selectMultiUserWindow.treeObject.callbackSelectNode = function(e, data) {
// 부서 사용자 목록 조회
exsoft.util.grid.gridPostDataRefresh('#pop_userList', exsoft.contextRoot + '/admin/groupUserList.do', {groupId:data.node.id});
}
selectMultiUserWindow.treeObject.init();
} else {
selectMultiUserWindow.treeObject.refresh();
}
// 2. 검색 결과 테이블 초기화
if ($("#pop_userList")[0].grid == undefined) {
$('#pop_userList').jqGrid({
url: exsoft.contextRoot + '/user/searchUserList.do',
mtype:"post",
datatype:'json',
jsonReader:{
page:'page',total:'total',root:'list'
},
colNames:['group_nm','user_name_ko','user_id','position_nm','role_nm','email','user_status_nm'],
colModel:[
{name:'group_nm',index:'group_nm',width:30, editable:false,sortable:true,resizable:true,hidden:false,align:'center'},
{name:'user_name_ko',index:'user_name_ko',width:50, editable:false,sortable:false,resizable:true,hidden:false,align:'center'},
{name:'user_id',index:'user_id',width:50, editable:false,sortable:false,resizable:true,hidden:false,align:'center'},
{name:'position_nm',index:'position_nm',width:30, editable:false,sortable:true,resizable:true,hidden:true,align:'center'},
{name:'role_nm',index:'role_nm',width:10, editable:false,sortable:true,resizable:true,hidden:true,align:'center'},
{name:'email',index:'email',width:30, editable:false,sortable:true,resizable:true,hidden:true,align:'center'},
{name:'user_status_nm',index:'user_status_nm',width:30, editable:false,sortable:true,resizable:true,hidden:true,align:'center'}
],
autowidth:true,
viewrecords: true,multiselect:true,sortable: true,shrinkToFit:true,gridview: true,
sortname : "group_nm",
sortorder:"desc",
scroll: true,
scrollOffset: 0,
rowNum : selectMultiUserWindow.pageSize,
emptyDataText: "데이터가 없습니다.",
caption:'사용자 목록',
pagerpos: 'center',
pginput: true,
loadError:function(xhr, status, error) {
exsoft.util.error.isErrorChk(xhr);
}
,loadBeforeSend: function() {
exsoft.util.grid.gridNoDataMsgInit('pop_userList');
exsoft.util.grid.gridTitleBarHide('pop_userList');
}
,loadComplete: function() {
exsoft.util.grid.gridInputInit(false);
}
});
// Grid 컬럼정렬 처리
var headerData = '{"group_nm":"그룹명","user_name_ko":"사용자명", "user_id":"사용자 ID"}';
exsoft.util.grid.gridColumHeader('pop_userList',headerData,'center');
headerData = null;
}
// 3. 그룹의 기존 멤버를 설정함
if ($("#memberList")[0].grid == undefined) {
$('#memberList').jqGrid({
url: exsoft.contextRoot + '/user/searchUserList.do',
mtype:"post",
datatype:'json',
jsonReader:{
page:'page',total:'total',root:'list'
},
colNames:['group_nm','user_name_ko','user_id','position_nm','role_nm','email','user_status_nm'],
colModel:[
{name:'group_nm',index:'group_nm',width:30, editable:false,sortable:true,resizable:true,hidden:false,align:'center'},
{name:'user_name_ko',index:'user_name_ko',width:50, editable:false,sortable:false,resizable:true,hidden:false,align:'center'},
{name:'user_id',index:'user_id',width:50, editable:false,sortable:false,resizable:true,hidden:false,align:'center'},
{name:'position_nm',index:'position_nm',width:30, editable:false,sortable:true,resizable:true,hidden:true,align:'center'},
{name:'role_nm',index:'role_nm',width:10, editable:false,sortable:true,resizable:true,hidden:true,align:'center'},
{name:'email',index:'email',width:30, editable:false,sortable:true,resizable:true,hidden:true,align:'center'},
{name:'user_status_nm',index:'user_status_nm',width:30, editable:false,sortable:true,resizable:true,hidden:true,align:'center'}
],
autowidth:true,
viewrecords: true,multiselect:true,sortable: true,shrinkToFit:true,gridview: true,
sortname : "group_nm",
sortorder:"desc",
scroll: true,
scrollOffset: 0,
rowNum : selectMultiUserWindow.pageSize,
emptyDataText: "데이터가 없습니다.",
caption:'사용자 목록',
pagerpos: 'center',
pginput: true,
loadError:function(xhr, status, error) {
exsoft.util.error.isErrorChk(xhr);
}
,loadBeforeSend: function() {
exsoft.util.grid.gridNoDataMsgInit('memberList');
exsoft.util.grid.gridTitleBarHide('memberList');
}
,loadComplete: function() {
// 기존 데이터를 삭제함
$("#memberList").jqGrid("clearGridData");
exsoft.util.grid.gridInputInit(false);
}
});
// Grid 컬럼정렬 처리
var headerData = '{"group_nm":"그룹명","user_name_ko":"사용자명", "user_id":"사용자 ID"}';
exsoft.util.grid.gridColumHeader('memberList',headerData,'center');
headerData = null;
} else {
// 기존 데이터를 삭제함
$("#memberList").jqGrid("clearGridData");
}
},
// 1. 팝업
open : function() {
exsoft.util.layout.divLayerOpen("grpDeptUser_add_wrapper", "grpDeptUser_add");
},
//2. layer + show
layer : {
},
//3. 닫기 + hide
close : function() {
exsoft.util.layout.divLayerClose("grpDeptUser_add_wrapper", "grpDeptUser_add");
},
//4. 화면 이벤트 처리
event : {
// 선택된 사용자를 목록에 추가
appendUser : function() {
if (!exsoft.util.grid.gridSelectCheck("pop_userList")) {
jAlert("추가할 사용자를 선택해주세요.", "구성원추가", 0);
return;
}
var rowDataList = new Array();
var selectedUserList = $("#pop_userList").getGridParam("selarrrow");
var memberIdList = exsoft.util.grid.gridSelectArrayDataAllRow("memberList", "user_id", "user_id");
// 1. 선택된 사용자를 추가한다
$(selectedUserList).each(function() {
var row = $("#pop_userList").getRowData(this);
var isDuplicate = false;
// 1-1. 이미 있는 사용자인지 체크한다
$(memberIdList).each(function() {
if (this.user_id == row.user_id) {
isDuplicate = true;
}
});
if (!isDuplicate) {
$("#memberList").jqGrid("addRowData", row.user_id, row);
}
});
},
// 목록에서 사용자를 제거
removeUser : function() {
if (!exsoft.util.grid.gridSelectCheck("memberList")) {
jAlert("제외할 사용자를 선택해주세요.", "구성원추가", 0);
return;
}
exsoft.util.grid.gridDeleteRow("memberList", null, null, true);
},
// 부서/사용자 조건 검색
searchGroupUser : function() {
if ($("#strKeyword").val().length == 0) {
alert("검색어를 입력해주세요", "구성원추가", 0);
return;
}
exsoft.util.grid.gridPostDataInitRefresh('pop_userList', exsoft.contextRoot + '/user/searchUserList.do', {userName : $("#strKeyword").val()});
},
// 엔터키 입력시
enterKeyPress : function(e) {
if (e.keyCode == 13) {
selectMultiUserWindow.event.searchGroupUser();
return false;
}
},
// 확인버튼 클릭시
submit : function() {
// 최종 사용자 목록 설정
var userIdList = $("#memberList").jqGrid("getDataIDs");
var rowDataList = new Array();
$(userIdList).each(function(i) {
if (this != "") {
var row = $("#memberList").getRowData(this);
rowDataList.push(row);
}
})
// 콜백함수로 리턴
selectMultiUserWindow.callbackFunction(rowDataList);
selectMultiUserWindow.close();
}
},
//5. 화면 UI 변경 처리
ui : {
},
//6. callback 처리
callback : {
},
}<file_sep>/EDMS3/WebContent/js/docadmin/auditManager.js
var auditManager = {
gAudit_date : null,
gUser_id : null,
gUser_name : null,
part : null,
// 0. 초기화
init : {
initPage : function(part,sdate,edate) {
auditManager.part = part;
auditManager.init.auditListGrid();
$("#sdate").val(sdate);
$("#edate").val(edate);
},
auditListGrid : function() {
$('#auditGridList').jqGrid({
url:exsoft.contextRoot + '/admin/auditPage.do',
mtype:"post",
datatype:'json',
jsonReader:{
page:'page',toatl:'toatl',root:'list'
},
colNames:['audit_date','user_id','user_name','read_count','report_mail_sent_date'],
colModel:[
{name:'audit_date',index:'audit_date',width:80, editable:false,sortable:true,resizable:true,align:'center'},
{name:'user_name',index:'user_name',width:80, editable:false,sortable:true,resizable:true,align:'center'},
{name:'user_id',index:'user_id',width:80, editable:false,sortable:true,resizable:true,align:'center'},
{name:'read_count',index:'read_count',width:70, editable:false,sortable:true,resizable:true,align:'center'},
{name:'report_mail_sent_date',index:'report_mail_sent_date',width:80, editable:false,sortable:true,resizable:true,align:'center',hidden:true}
],
autowidth:true,
height:"auto",
viewrecords: true,
multiselect:false,
sortname : "audit_date",
sortorder:"desc",
sortable: true,
shrinkToFit:true,
scrollOffset: 0,
gridview: true,
rowNum : 15,
emptyDataText: "데이터가 없습니다.",
caption:'대량문서 열람 목록',
postData : {sdate:$("#sdate").val(),edate:$("#edate").val(),is_search:'true',part:auditManager.part}
,onCellSelect : function(rowid,iCol,cellcontent,e){
auditManager.event.auditViewRefresh('auditGridList',rowid);
}
,loadBeforeSend: function() {
exsoft.util.grid.gridNoDataMsgInit('auditGridList');
exsoft.util.grid.gridTitleBarHide('auditGridList');
}
,loadComplete: function(data) {
if ($("#auditGridList").getGridParam("records")==0) {
$(".sub_right").addClass("hide");
exsoft.util.grid.gridNoRecords('auditGridList','no_data');
}else {
$(".sub_right").removeClass("hide");
exsoft.util.grid.gridViewRecords('auditGridList');
// 조회화면 DISPLAY
// 조회화면 DISPLAY
var rowId = $("#auditGridList").getDataIDs()[0];
auditManager.gAudit_date = $("#auditGridList").getRowData(rowId).audit_date;
auditManager.gUser_id = $("#auditGridList").getRowData(rowId).user_id;
auditManager.gUser_name = $("#auditGridList").getRowData(rowId).user_name;
auditManager.event.auditDetailCall(auditManager.gAudit_date,auditManager.gUser_id);
}
exsoft.util.grid.gridInputInit(true);
exsoft.util.grid.gridPager("#auditGridPager",data);
}
,loadError:function(xhr, status, error) {
exsoft.util.error.isErrorChk(xhr);
}
});
var headerData = '{"audit_date":"감사일","user_name":"성명","user_id":"사용자ID","read_count":"조회수","report_mail_sent_date":"메일 발송일"}';
exsoft.util.grid.gridColumHeader('auditGridList',headerData,'center');
},
// 대량문서 열람 상세 목록
auditDetailListGrid : function(audit_date,user_id) {
$('#auditDetailList').jqGrid({
url:exsoft.contextRoot + '/admin/auditDetailPage.do',
mtype:"post",
datatype:'json',
jsonReader:{
page:'page',toatl:'toatl',root:'list'
},
colNames:['page_name','page_size','action_date'],
colModel:[
{name:'page_name',index:'page_name',width:220, editable:false,sortable:true,resizable:true,align:'left'},
{name:'page_size',index:'page_size',width:80, editable:false,sortable:true,resizable:true,align:'center'},
{name:'action_date',index:'action_date',width:80, editable:false,sortable:true,resizable:true,align:'center'}
],
autowidth:true,
height:"auto",
viewrecords: true,
multiselect:false,
sortname : "page_name",
sortorder:"desc",
sortable: true,
shrinkToFit:true,
scrollOffset: 0,
gridview: true,
rowNum : 15,
emptyDataText: "데이터가 없습니다.",
caption:'대량문서 열람 상세 목록',
postData :{audit_date:audit_date,user_id:user_id,is_search:'true'}
,loadBeforeSend: function() {
exsoft.util.grid.gridTitleBarHide('auditDetailList');
exsoft.util.grid.gridNoDataMsgInit('auditDetailList');
}
,loadComplete: function(data) {
if ($("#auditDetailList").getGridParam("records")==0) {
exsoft.util.grid.gridNoRecords('auditDetailList','no_data');
}else {
exsoft.util.grid.gridViewRecords('auditDetailList');
}
exsoft.util.grid.gridInputInit(false);
exsoft.util.grid.gridPager("#auditDetailPager",data);
}
,loadError:function(xhr, status, error) {
exsoft.util.error.isErrorChk(xhr);
}
});
var headerData = '{"page_name":"파일명","page_size":"크기","action_date":"조회일"}';
exsoft.util.grid.gridColumHeader('auditDetailList',headerData,'center');
}
},
// 1. 팝업
open : {
},
//2. layer + show
layer : {
},
//3. 닫기 + hide
close : {
},
//4. 화면 이벤트 처리
event : {
// 페이지이동 처리(공통)
gridPage : function(nPage) {
$("#auditGridList").setGridParam({page:nPage,postData:{is_search:'false',page_init:'false'}}).trigger("reloadGrid");
},
gridPageForDetail : function(nPage) {
$("#auditDetailList").setGridParam({page:nPage,postData:{is_search:'false',page_init:'false'}}).trigger("reloadGrid");
},
//상세화면 갱신처리
auditViewRefresh : function(gridIds,rowid) {
auditManager.gAudit_date = $("#"+gridIds).getRowData(rowid).audit_date;
auditManager.gUser_id = $("#"+gridIds).getRowData(rowid).user_id;
auditManager.gUser_name = $("#"+gridIds).getRowData(rowid).user_name;
auditManager.event.auditDetailCall(auditManager.gAudit_date,auditManager.gUser_id);
},
// 상세보기 요청
auditDetailCall : function(audit_date,user_id) {
if($('#auditDetailList')[0].grid != undefined) {
var postData = { audit_date:audit_date,user_id:user_id,is_search:'true'}; // 리스트 변경선택시 상세리스트 페이징 초기화 처리
exsoft.util.grid.gridPostDataRefresh('auditDetailList',exsoft.contextRoot + '/admin/auditDetailPage.do',postData);
}else {
auditManager.init.auditDetailListGrid(audit_date,user_id);
}
$("#detailTitle").html(auditManager.gUser_name + " :: " + auditManager.gAudit_date);
postData = null;
},
//검색처리
searchFunc : function() {
if($("#sdate").val().length == 0 || $("#edate").val().length == 0) {
jAlert("감사일을 입력하세요", "대량문서 열람 감사 관리", 0);
return false;
}
// 검색기간 유효성 체크 및 1년이내 검색만 가능함
if(exsoft.util.check.searchValid($("#sdate").val(),$("#edate").val()) ) {
var postData = {
sdate:$("#sdate").val(),
edate:$("#edate").val(),
is_search:'true'
} ;
exsoft.util.grid.gridPostDataRefresh('auditGridList',exsoft.contextRoot + '/admin/auditPage.do',postData);
}
postData = null;
}
},
//5. 화면 UI 변경 처리
ui : {
},
//6. callback 처리
callback : {
},
}<file_sep>/EDMS3/src/kr/co/exsoft/external/controller/ExternalPublicController.java
package kr.co.exsoft.external.controller;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springmodules.validation.commons.DefaultBeanValidator;
import org.springframework.web.multipart.MultipartFile;
import org.springframework.web.servlet.ModelAndView;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.ui.ModelMap;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.MessageSource;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestHeader;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import kr.co.exsoft.common.service.CommonService;
import kr.co.exsoft.document.vo.PageVO;
import kr.co.exsoft.external.service.ExternalService;
import kr.co.exsoft.eframework.configuration.Constant;
import kr.co.exsoft.eframework.library.DownloadView;
/**
* 외부연계 컨트롤
* @author 패키지 개발팀
* @since 2014.07.17
* @version 3.0
*
*/
@Controller
@RequestMapping("/external")
public class ExternalPublicController {
@Autowired
private ExternalService externalService;
@Autowired
private CommonService commonService;
@Autowired
private MessageSource messageSource;
@Autowired
private DefaultBeanValidator beanValidator;
protected static final Log logger = LogFactory.getLog(ExternalPublicController.class);
/**
*
* <pre>
* 1. 개용 : URL 링크복사 다운로드 처리
* 2. 처리내용 :
* </pre>
* @Method Name : urlDownLoad
* @param model
* @param map
* @param sessionVO
* @return ModelAndView
*/
@RequestMapping("/urlDownLoad.do")
public ModelAndView urlDownLoad(Model model, @RequestParam HashMap<String,Object> map) {
List<PageVO> pageList = new ArrayList<PageVO>();
PageVO pageVO = new PageVO();
try {
// 1.다운로드 대상 목록 및 PageVO 객체 구하기
pageVO = commonService.urlPageInfo(map);
pageList.add(pageVO);
// 2.다운로드 VIEW
model.addAttribute("isZip",Constant.F);
model.addAttribute("pageList",pageList);
}catch(Exception e) {
logger.error(e.getMessage());
model.addAttribute("message",e.getMessage());
return new ModelAndView("error/404");
}
return new ModelAndView(new DownloadView());
}
@RequestMapping(value = "/interface", method = RequestMethod.POST)
@ResponseBody
public String restfulInterface(@RequestParam HashMap<String,Object> map, @ModelAttribute("uploadForm") ExternalMultiFileUpload uploadForm)
throws IllegalStateException, IOException {
/**
* TODO : 구현체 작성
* map.get("key") : ARIA 암호화 키
* returnType : json, string
*/
return "";
}
/** Restful Test Start... */
@RequestMapping(value = "/{name}", method = RequestMethod.GET)
public String getMovie(@PathVariable String name, ModelMap model) {
model.addAttribute("movie", name);
return "user/restFul";
}
@RequestMapping(value = "/", method = RequestMethod.GET)
public String getDefaultMovie(ModelMap model) {
model.addAttribute("movie", "this is default movie");
return "/user/restFul";
}
// Get 방식
@RequestMapping(value = "/restful.jsonget/{key}", method = RequestMethod.GET)
@ResponseBody
public List<HashMap<String,Object>> getTest(Model model, @PathVariable String key, @RequestParam HashMap<String,Object> map) {
logger.info("json return Sample");
List<HashMap<String,Object>> list = new ArrayList<HashMap<String,Object>>();
HashMap<String,Object> data1 = new HashMap<String,Object>();
data1.put("name", "a : " + key);
HashMap<String,Object> data2 = new HashMap<String,Object>();
data2.put("userid", "b : " + key);
list.add(data1);
list.add(data2);
return list;
}
// Post 방식
@RequestMapping(value = "/restful.jsonpost", method = RequestMethod.POST)
@ResponseBody
public List<HashMap<String,Object>> postTest(Model model, @RequestParam HashMap<String,Object> map) {
logger.info("json return Sample");
List<HashMap<String,Object>> list = new ArrayList<HashMap<String,Object>>();
HashMap<String,Object> data1 = new HashMap<String,Object>();
data1.put("name", "a : " + map.get("key"));
HashMap<String,Object> data2 = new HashMap<String,Object>();
data2.put("userid", "b : " + map.get("userid"));
list.add(data1);
list.add(data2);
return list;
}
// Head 방식
@RequestMapping(value = "/restful.jsonhead", method = RequestMethod.HEAD)
@ResponseBody
public List<HashMap<String,Object>> haedTest(Model model, @RequestHeader String key) {
logger.info("json return Sample");
List<HashMap<String,Object>> list = new ArrayList<HashMap<String,Object>>();
HashMap<String,Object> data1 = new HashMap<String,Object>();
data1.put("name", "a : " + key);
HashMap<String,Object> data2 = new HashMap<String,Object>();
data2.put("userid", "b : " + key);
list.add(data1);
list.add(data2);
return list;
}
// Multipart single upload
@RequestMapping(value="/restful.singleUpload", method=RequestMethod.POST)
@ResponseBody
public String singleFileUploadTest(@RequestParam("uploadBox") MultipartFile file){ // <input type="file" name="uploadBox">
String name = "File not found";
if (!file.isEmpty()) {
try {
name = file.getName();
byte[] bytes = file.getBytes();
BufferedOutputStream stream =
new BufferedOutputStream(new FileOutputStream(new File(name + "-uploaded")));
stream.write(bytes);
stream.close();
return "You successfully uploaded " + name + " into " + name + "-uploaded !";
} catch (Exception e) {
return "You failed to upload " + name + " => " + e.getMessage();
}
} else {
return "You failed to upload " + name + " because the file was empty.";
}
}
// Multipart multi upload
@RequestMapping(value = "/restful.multiUpload", method = RequestMethod.POST)
@ResponseBody
public String multiFileUploadTest( @ModelAttribute("uploadForm") ExternalMultiFileUpload uploadForm, Model map)
throws IllegalStateException, IOException {
String saveDirectory = "d:/temp/";
List<MultipartFile> crunchifyFiles = uploadForm.getFiles();
List<String> fileNames = new ArrayList<String>();
if (null != crunchifyFiles && crunchifyFiles.size() > 0) {
for (MultipartFile multipartFile : crunchifyFiles) {
String fileName = multipartFile.getOriginalFilename();
if (!"".equalsIgnoreCase(fileName)) {
// Handle file content - multipartFile.getInputStream()
multipartFile.transferTo(new File(saveDirectory + fileName));
fileNames.add(fileName);
}
}
} else {
return "File not found";
}
map.addAttribute("files", fileNames);
return "uploadfilesuccess";
}
@RequestMapping(value = "/restful.postwithfile", method = RequestMethod.POST)
@ResponseBody
public List<HashMap<String,Object>> uploadMultipleFileHandler(
@RequestHeader HashMap<String,Object> headMap,
@RequestParam HashMap<String,Object> paraMap,
@RequestParam("fileUpload") MultipartFile[] files,
HttpServletRequest request)
throws IllegalStateException, IOException {
List<HashMap<String,Object>> list = new ArrayList<HashMap<String,Object>>();
HashMap<String,Object> resultMap = new HashMap<String,Object>();
// Head 정보
for( String key : headMap.keySet() ){
HashMap<String,Object> headData = new HashMap<String,Object>();
headData.put(key,headMap.get(key));
list.add(headData);
}
// Attr 정보
Enumeration<String> attrEnume = request.getAttributeNames();
while(attrEnume.hasMoreElements()){
HashMap<String,Object> attrData = new HashMap<String,Object>();
String key = attrEnume.nextElement();
attrData.put(key,headMap.get(key));
list.add(attrData);
}
// parameter 정보
for( String key : paraMap.keySet() ){
HashMap<String,Object> paramData = new HashMap<String,Object>();
paramData.put(key,paraMap.get(key));
list.add(paramData);
}
String saveDirectory = "d:/temp/00.test";
List<String> fileNames = new ArrayList<String>();
if (null != files && files.length > 0) {
for (MultipartFile multipartFile : files) {
String fileName = multipartFile.getOriginalFilename();
if (!"".equalsIgnoreCase(fileName)) {
// Handle file content - multipartFile.getInputStream()
multipartFile.transferTo(new File(saveDirectory + fileName));
fileNames.add(fileName);
}
}
} else {
resultMap.put("result", "File not found");
list.add(resultMap);
return list;
}
resultMap.put("result", "process success");
list.add(resultMap);
return list;
}
/** Restful Test End... */
}
<file_sep>/EDMS3/src/kr/co/exsoft/user/service/GroupServiceImpl.java
package kr.co.exsoft.user.service;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.ibatis.session.SqlSession;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Service;
import kr.co.exsoft.common.service.CommonService;
import kr.co.exsoft.common.vo.SessionVO;
import kr.co.exsoft.document.dao.DocumentDao;
import kr.co.exsoft.eframework.configuration.Constant;
import kr.co.exsoft.eframework.library.ExsoftAbstractServiceImpl;
import kr.co.exsoft.eframework.util.CommonUtil;
import kr.co.exsoft.eframework.util.ConfigData;
import kr.co.exsoft.eframework.util.PatternUtil;
import kr.co.exsoft.eframework.util.StringUtil;
import kr.co.exsoft.eframework.util.XlsUtil;
import kr.co.exsoft.folder.dao.FolderDao;
import kr.co.exsoft.folder.vo.FolderVO;
import kr.co.exsoft.permission.dao.AclDao;
import kr.co.exsoft.permission.service.AclService;
import kr.co.exsoft.permission.vo.AclItemVO;
import kr.co.exsoft.permission.vo.AclVO;
import kr.co.exsoft.user.dao.GroupDao;
import kr.co.exsoft.user.dao.UserDao;
import kr.co.exsoft.user.vo.GroupVO;
import kr.co.exsoft.user.vo.GroupedVO;
import kr.co.exsoft.user.vo.UserVO;
/**
* Group 서비스 구현 부분
* @author <NAME>
* @since 2014.07.17
* @version 3.0
*
*/
@Service("groupService")
public class GroupServiceImpl extends ExsoftAbstractServiceImpl implements GroupService {
@Autowired
@Qualifier("sqlSession")
private SqlSession sqlSession;
@Autowired
@Qualifier("sqlSessionBatch")
private SqlSession sqlSessionBatch;
@Autowired
private CommonService commonService;
@Autowired
private AclService aclService;
@Override
public int groupedWrite(HashMap<String,Object> map) {
int ret = 0;
GroupDao groupDao = sqlSession.getMapper(GroupDao.class);
ret = groupDao.groupedWrite(map);
return ret;
}
@Override
public void batchUserWrite(List<HashMap<String,Object>> userList) throws Exception {
GroupDao groupDao = sqlSessionBatch.getMapper(GroupDao.class);
for(HashMap<String,Object> map : userList) {
groupDao.groupedWrite(map);
}
}
@Override
public List<GroupVO> rootGroupList(HashMap<String,Object> params) throws Exception {
GroupDao groupDao = sqlSession.getMapper(GroupDao.class);
List<GroupVO> groupList = groupDao.rootGroupList(params);
return groupList;
}
@Override
public List<GroupVO> childGroupList(HashMap<String,Object> params) throws Exception {
GroupDao groupDao = sqlSession.getMapper(GroupDao.class);
List<GroupVO> groupList = groupDao.childGroupList(params);
return groupList;
}
@Override
public GroupVO groupDetail(String groupId) throws Exception {
GroupDao groupDao = sqlSession.getMapper(GroupDao.class);
GroupVO groupVO = groupDao.groupDetail(groupId);
return groupVO;
}
@Override
public Map<String, Object> groupWrite(GroupVO groupVO, SessionVO sessionVO) throws Exception {
Map<String, Object> resultMap = new HashMap<String, Object>();
GroupDao groupDao = sqlSession.getMapper(GroupDao.class);
FolderDao folderDao = sqlSession.getMapper(FolderDao.class);
AclDao aclDao = sqlSession.getMapper(AclDao.class);
// ----------------------------------
// 1. 그룹 ID 생성.
// ----------------------------------
groupVO.setGroup_id(String.format("%s%012d", Constant.ID_PREFIX_GROUP, commonService.commonNextVal(Constant.COUNTER_ID_FILE)));
groupVO.setDept_cd(groupVO.getGroup_id());
// ----------------------------------
// 2. 그룹 등록
// ----------------------------------
groupDao.groupWrite(groupVO);
// ----------------------------------
// 3. 부서맵일 경우 (기본 ACL, 부서 폴더 등록)
// ----------------------------------
if (groupVO.getMap_id().equals(Constant.MAP_ID_DEPT)) {
List<String> groupIdList = new ArrayList<String>();
List<AclItemVO> aclItemList = new ArrayList<AclItemVO>();
groupIdList.add(sessionVO.getSessGroup_id());
// ----------------------------------
// 3-1. 부서 기본 ACL 생성
// ----------------------------------
// 3-1-1.ACL
AclVO aclVO = new AclVO();
aclVO.setAcl_id(CommonUtil.getStringID(Constant.ID_PREFIX_ACL, commonService.commonNextVal(Constant.COUNTER_ID_ACL)));
aclVO.setAcl_name(String.format("%s 부서 기본 권한", groupVO.getGroup_name_ko()));
aclVO.setAcl_type(Constant.ACL_ACL_TYPE_TEAM);
aclVO.setOpen_id(groupVO.getGroup_id());
aclVO.setOpen_name(groupVO.getGroup_name_ko());
aclVO.setOpen_isgroup(Constant.T);
aclVO.setCreator_id(sessionVO.getSessId());
aclVO.setSort_index("0");
// 3-1-2.ACL Item
AclItemVO ownerFolderAclItem = new AclItemVO(aclVO.getAcl_id(), "F", "OWNER", "F", "T", "T", "T", "T", "T", "T", "T", "T");
AclItemVO ownerDocAclItem = new AclItemVO(aclVO.getAcl_id(), "D", "OWNER", "F", "T", "T", "T", "T", "T", "T", "T", "T");
AclItemVO worldFolderAclItem = new AclItemVO(aclVO.getAcl_id(), "F", "WORLD", "F", "T", "T", "T", "F", "F", "F", "F", "F");
AclItemVO worldDocAclItem = new AclItemVO(aclVO.getAcl_id(), "D", "WORLD", "F", "T", "T", "T", "F", "F", "F", "F", "F");
AclItemVO groupFolderAclItem = new AclItemVO(aclVO.getAcl_id(), "F", groupVO.getGroup_id(), "T", "F", "T", "T", "F", "F", "F", "F", "F");
AclItemVO groupDocAclItem = new AclItemVO(aclVO.getAcl_id(), "D", groupVO.getGroup_id(), "T", "F", "T", "T", "T", "F", "T", "F", "F");
aclItemList.add(ownerFolderAclItem);
aclItemList.add(ownerDocAclItem);
aclItemList.add(worldFolderAclItem);
aclItemList.add(worldDocAclItem);
aclItemList.add(groupFolderAclItem);
aclItemList.add(groupDocAclItem);
// 3-1-3. 동일한 이름의 권한이 있는지 체크
HashMap<String, Object> validation = new HashMap<String, Object>();
validation.put("acl_name", aclVO.getAcl_name());
if (aclDao.aclCountByAclName(validation) > 0) {
throw processException("acl.fail.acl.name.duplication");
}
// 3-1-4. ACL 생성
if (aclDao.aclWrite(aclVO) == 0) {
throw processException("common.system.error");
}
// 3-1-5. ACL Item 생성
for (AclItemVO aclItemVO : aclItemList) {
if (aclDao.aclItemWrite(aclItemVO) == 0) {
throw processException("common.system.error");
}
}
// ----------------------------------
// 3-2. 부서 폴더 등록
// ----------------------------------
// 부서 폴더 ID 얻기
String folder_id = CommonUtil.getChangedResourceIDByPrefix(groupVO.getGroup_id(), Constant.ID_PREFIX_FOLDER);
// 상위 부서 폴더 ID 얻기
String parent_id = CommonUtil.getChangedResourceIDByPrefix(groupVO.getParent_id(), Constant.ID_PREFIX_FOLDER);
// .a 부서 폴더 VO를 생성한다
FolderVO folderVO = new FolderVO();
folderVO.setFolder_id(folder_id);
folderVO.setFolder_name_ko(groupVO.getGroup_name_ko());
folderVO.setFolder_name_en(groupVO.getGroup_name_en());
// folderVO.setFolder_name_zh(groupVO.getGroup_name_zh());
folderVO.setParent_id(parent_id);
folderVO.setFolder_type(Constant.FOLDER_TYPE_DEPT);
folderVO.setMap_id(Constant.MAP_ID_DEPT);
folderVO.setSort_index(groupVO.getSort_index());
folderVO.setFolder_status(Constant.FOLDER_STATUS_CREATE);
folderVO.setAcl_id(aclVO.getAcl_id());
folderVO.setCreator_id(sessionVO.getSessId());
folderVO.setCreator_name(sessionVO.getSessName());
folderVO.setIs_type(Constant.FOLDER_TYPE_ALL_TYPE);
folderVO.setIs_save(Constant.FOLDER_SAVE_NO);
// .b 폴더를 등록한다
folderDao.folderWrite(folderVO);
// ----------------------------------
// 3-3. rGate 폴더 등록
// ----------------------------------
// 추후 구현 /applecode
}
// ----------------------------------
// 4. 히스토리 등록
// ----------------------------------
groupVO.setGroup_seq(commonService.commonNextVal(Constant.COUNTER_ID_GROUP_HT));
groupVO.setStatus(Constant.HISTORY_STATUS_CREATE);
groupDao.groupHistoryWrite(groupVO);
resultMap.put("result", Constant.RESULT_SUCCESS);
return resultMap;
}
@Override
public Map<String, Object> groupUpdate(GroupVO groupVO, SessionVO sessionVO) throws Exception {
Map<String, Object> resultMap = new HashMap<String, Object>();
GroupDao groupDao = sqlSession.getMapper(GroupDao.class);
FolderDao folderDao = sqlSession.getMapper(FolderDao.class);
AclDao aclDao = sqlSession.getMapper(AclDao.class);
// ------------------------------------
// 1. 그룹 수정.
// ------------------------------------
groupDao.groupUpdate(groupVO);
// ------------------------------------
// 2. 부서 기본 ACL, 부서 폴더 수정.
// ------------------------------------
// 부서 맵일 경우
if (groupVO.getMap_id().equals(Constant.MAP_ID_DEPT)) {
// ------------------------------------
// 2-1. 부서 폴더 수정.
// ------------------------------------
// 부서 폴더 ID 얻기
String folder_id = CommonUtil.getChangedResourceIDByPrefix(groupVO.getGroup_id(), Constant.ID_PREFIX_FOLDER);
// 상위 부서 폴더 ID 얻기
String parent_id = CommonUtil.getChangedResourceIDByPrefix(groupVO.getParent_id(), Constant.ID_PREFIX_FOLDER);
HashMap<String, Object> folderParam = new HashMap<String, Object>();
folderParam.put("folder_id", folder_id);
// 기존 폴더 정보 얻기
FolderVO folderVO = folderDao.folderDetail(folderParam);
if (folderVO != null) {
// 부서 폴더 수정
folderVO.setFolder_name_ko(groupVO.getGroup_name_ko());
folderVO.setFolder_name_en(groupVO.getGroup_name_en());
// folderVO.setFolder_name_zh(groupVO.getGroup_name_zh());
folderVO.setFolder_name_ja(groupVO.getGroup_name_ja());
folderVO.setParent_id(parent_id);
folderVO.setSort_index(groupVO.getSort_index());
folderVO.setFolder_status(groupVO.getStatus());
folderVO.setUpdate_action(Constant.ACTION_UPDATE);
folderDao.folderUpdate(folderVO);
}
// ------------------------------------
// 2-2. 부서 기본 ACL 수정. : 추후 구현
// ------------------------------------
AclVO aclVO = new AclVO();
aclVO.setAcl_id(folderVO.getAcl_id());
aclVO.setOpen_isgroup(""); // VO 생성시 초기값으로 'F'를 갖기때문에 의도치 않게 값이 변경되는것을 방지하기 위함.
aclVO.setAcl_name(String.format("%s 부서 기본 권한", groupVO.getGroup_name_ko()));
aclDao.aclUpdate(aclVO);
// ------------------------------------
// 2-3. rGate 폴더 수정 : 추후 구현
// ------------------------------------
}
// ------------------------------------
// 3. Grouped 수정. (그룹-사용자 맵핑)
// ------------------------------------
// 3-1.부서 맵일 경우 사용자의 부서를 이동 처리한다
if (groupVO.getMap_id().equals(Constant.MAP_ID_DEPT)) {
HashMap<String, Object> selectedUserListInfo = new HashMap<String, Object>();
List<String> removeUserList = new ArrayList<String>();
List<String> appendUserList = new ArrayList<String>();
// ### 기존 사용자를 모두 미소속 부서로 이동시킨다
// 3-1-2.기존 사용자 목록 얻기
List<GroupedVO> groupUserList = groupDao.groupedList(groupVO.getGroup_id());
// 3-1-3.기존 사용자 중 목록에서 제거된 사용자를 추출한다
for (GroupedVO groupUser : groupUserList) {
if (!groupVO.getUser_id_list().contains(groupUser.getUser_id())) {
removeUserList.add(groupUser.getUser_id());
}
}
// 3-1-4. 제거된 사용자가 있을경우 미소속으로 변경 처리한다
// 미소속 그룹을 사용할 경우에만 변경 처리함.
if (removeUserList.size() > 0 && ConfigData.getBoolean("USE_TEMP_GROUP")) {
GroupVO tempGroup = groupDao.independentGroupDetail();
if (tempGroup == null) {
throw processException("group.fail.independent.null");
}
selectedUserListInfo.put("group_id", tempGroup.getGroup_id());
// 삭제할 사용자 수 만큼 루프
for (String removeUserId : removeUserList) {
selectedUserListInfo.put("user_id", removeUserId);
groupDao.groupedUpdate(selectedUserListInfo);
}
}
// 3-1-5. 추가된 사용자를 추출한다
for (String user : groupVO.getUser_id_list()) {
boolean isMember = false;
// 최종 선택된 사용자를 기존 부서원목록에서 검색후 없으면 신규 추가원임
for (GroupedVO groupUser : groupUserList) {
if (groupUser.getUser_id().equals(user)) {
isMember = true;
}
}
if (!isMember)
appendUserList.add(user);
}
// 3-1-5. 선택된 사용자가 있을경우 현재 부서로 이동시킨다.
if (appendUserList.size() > 0) {
selectedUserListInfo.put("group_id", groupVO.getGroup_id());
for (String user : appendUserList) {
selectedUserListInfo.put("user_id", user);
groupDao.groupedUpdate(selectedUserListInfo);
}
}
}
// 3-2.기타 맵의 경우 최종 선택된 사용자를 추가한다
else {
// 3-2-1. 해당 그룹의 모든 구성원 제거.
HashMap<String, Object> groupedOptions = new HashMap<String, Object>();
groupedOptions.put("group_id", groupVO.getGroup_id());
groupDao.groupedDelete(groupedOptions);
for (String userId : groupVO.getUser_id_list()) {
// 그룹구성원을 선택했을때
if(userId != null && userId != "") {
HashMap<String, Object> userInfo = new HashMap<String, Object>();
userInfo.put("group_id", groupVO.getGroup_id());
userInfo.put("user_id", userId);
userInfo.put("is_default", Constant.F);
groupDao.groupedWrite(userInfo);
}
}
}
// ------------------------------------
// 4. 히스토리 등록. (그룹 수정)
// ------------------------------------
groupVO.setGroup_seq(commonService.commonNextVal(Constant.COUNTER_ID_GROUP_HT));
groupVO.setStatus(Constant.HISTORY_STATUS_UPDATE);
groupVO.setDept_cd(groupVO.getGroup_id());
groupDao.groupHistoryWrite(groupVO);
resultMap.put("result", Constant.RESULT_SUCCESS);
return resultMap;
}
@Override
public Map<String, Object> groupDelete(GroupVO groupVO, SessionVO sessionVO) throws Exception {
Map<String, Object> resultMap = new HashMap<String, Object>();
HashMap<String, Object> folderDeleteParam = new HashMap<String, Object>();
List<String> aclIds = new ArrayList<String>();
GroupDao groupDao = sqlSession.getMapper(GroupDao.class);
FolderDao folderDao = sqlSession.getMapper(FolderDao.class);
// ------------------------------------
// 1. 그룹 삭제.
// ------------------------------------
groupDao.groupDelete(groupVO);
// 부서 맵의 경우.
if (groupVO.getMap_id().equals(Constant.MAP_ID_DEPT)) {
// ------------------------------------
// 2. 부서 폴더 삭제.
// ------------------------------------
// 부서 폴더 ID 얻기.
folderDeleteParam.put("folder_id", CommonUtil.getChangedResourceIDByPrefix(groupVO.getGroup_id(), Constant.ID_PREFIX_FOLDER));
// 부서 폴더 정보 얻기
FolderVO groupFolder = folderDao.folderDetail(folderDeleteParam);
// 부서 폴더 삭제.
folderDao.folderDelete(folderDeleteParam);
// ------------------------------------
// 3. rGate 폴더 삭제. (고도화 시 구현)
// ------------------------------------
// ------------------------------------
// 4. ACL 정보 삭제
// ------------------------------------
aclIds.add(groupFolder.getAcl_id());
aclService.aclDelete(null, aclIds, sessionVO);
}
// ------------------------------------
// 4. 히스토리 등록. (그룹 수정)
// ------------------------------------
groupVO.setGroup_seq(commonService.commonNextVal(Constant.COUNTER_ID_GROUP_HT));
groupVO.setStatus(Constant.HISTORY_STATUS_DELETE);
groupVO.setDept_cd(groupVO.getGroup_id());
groupDao.groupHistoryWrite(groupVO);
resultMap.put("result", Constant.RESULT_SUCCESS);
return resultMap;
}
@Override
public Map<String, Object> groupMove(GroupVO groupVO, SessionVO sessionVO) throws Exception {
Map<String, Object> resultMap = new HashMap<String, Object>();
GroupDao groupDao = sqlSession.getMapper(GroupDao.class);
FolderDao folderDao = sqlSession.getMapper(FolderDao.class);
// ------------------------------------
// 1. 그룹 이동.
// ------------------------------------
groupDao.groupMove(groupVO);
// ------------------------------------
// 2. 부서 맵일 경우.
// ------------------------------------
if (groupVO.getMap_id().equals(Constant.MAP_ID_DEPT)) {
// ------------------------------------
// 2-1. 부서 폴더 수정.
// ------------------------------------
// 부서 폴더 ID 얻기
String folder_id = CommonUtil.getChangedResourceIDByPrefix(groupVO.getGroup_id(), Constant.ID_PREFIX_FOLDER);
// 상위 부서 폴더 ID 얻기
String parent_id = CommonUtil.getChangedResourceIDByPrefix(groupVO.getParent_id(), Constant.ID_PREFIX_FOLDER);
HashMap<String, Object> folderParam = new HashMap<String, Object>();
folderParam.put("folder_id", folder_id);
// 기존 폴더 정보 얻기
FolderVO folderVO = folderDao.folderDetail(folderParam);
if (folderVO != null) {
// 부서 폴더 부모ID 변경
folderVO.setParent_id(parent_id);
folderVO.setUpdate_action(Constant.ACTION_MOVE);
// 부서 폴더 수정
folderDao.folderUpdate(folderVO);
}
}
// ------------------------------------
// 3. 히스토리 등록.
// ------------------------------------
groupVO.setGroup_seq(commonService.commonNextVal(Constant.COUNTER_ID_GROUP_HT));
groupVO.setStatus(Constant.HISTORY_STATUS_UPDATE);
groupVO.setDept_cd(groupVO.getGroup_id());
groupDao.groupHistoryWrite(groupVO);
resultMap.put("result", Constant.RESULT_SUCCESS);
return resultMap;
}
@Override
public GroupVO groupWriteValid(HashMap<String, Object> map) throws Exception {
GroupDao groupDao = sqlSession.getMapper(GroupDao.class);
FolderDao folderDao = sqlSession.getMapper(FolderDao.class);
// ----------------------------------
// 1. GroupVO로 변환
// ----------------------------------
GroupVO groupVO = new GroupVO();
groupVO.setGroup_name_ko(map.get("group_name_ko").toString());
groupVO.setGroup_name_en(map.get("group_name_en").toString());
groupVO.setGroup_status(map.get("group_status").toString());
groupVO.setParent_id(map.get("parent_id").toString());
groupVO.setSort_index(Integer.parseInt(map.get("sort_index").toString()));
groupVO.setMap_id(map.get("map_id").toString());
// ----------------------------------
// 2. 유효성 체크
// ----------------------------------
if (groupVO.getMap_id().equals("") || groupVO.getGroup_name_ko().equals("") || groupVO.getGroup_status().equals("")
|| groupVO.getParent_id().equals("") || groupVO.getMap_id().equals("")) {
throw processException("common.required.error");
}
// ----------------------------------
// 3. 동일한 상위 그룹 하위에 동일한 이름의 그룹이 있는지 확인한다.
// ----------------------------------
if (groupDao.groupIsExistByGroupNameAndParentIdList(groupVO) > 0) {
throw processException("groupName.duplication.error");
}
// ----------------------------------
// 4. 동일한 상위 폴더 하위에 동일한 이름의 폴더가 있는지 확인한다
// ----------------------------------
FolderVO folderVO = new FolderVO();
folderVO.setFolder_name_ko(groupVO.getGroup_name_ko());
folderVO.setParent_id(CommonUtil.getChangedResourceIDByPrefix(groupVO.getParent_id(), Constant.ID_PREFIX_FOLDER));
if (folderDao.folderIsExistByFolderNameAndParentID(folderVO) > 0) {
throw processException("folderName.duplication.error");
}
return groupVO;
}
@Override
public GroupVO groupUpdateValid(HashMap<String, Object> map) throws Exception {
GroupDao groupDao = sqlSession.getMapper(GroupDao.class);
FolderDao folderDao = sqlSession.getMapper(FolderDao.class);
// ----------------------------------
// 1. GroupVO로 변환
// ----------------------------------
GroupVO groupVO = new GroupVO();
groupVO.setGroup_id(StringUtil.getMapString(map, "group_id"));
groupVO.setGroup_name_ko(StringUtil.getMapString(map, "group_name_ko"));
groupVO.setGroup_name_en(StringUtil.getMapString(map, "group_name_en"));
groupVO.setGroup_status(StringUtil.getMapString(map, "group_status"));
groupVO.setParent_id(StringUtil.getMapString(map, "parent_id"));
groupVO.setSort_index(StringUtil.getMapInteger(map, "sort_index"));
groupVO.setMap_id(StringUtil.getMapString(map, "map_id"));
String[] userList = map.get("user_id_list").toString().split(",");
if (userList.length > 0) {
groupVO.setUser_id_list(Arrays.asList(userList));
}
// ----------------------------------
// 2. 유효성 체크
// ----------------------------------
if (groupVO.getGroup_id().equals("") || groupVO.getMap_id().equals("") || groupVO.getGroup_name_ko().equals("")
|| groupVO.getGroup_status().equals("") || groupVO.getParent_id().equals("") ) {
throw processException("common.required.error");
}
// ----------------------------------
// 3. 부서 이동의 경우 추가 유효성 체크
// ----------------------------------
if (StringUtil.getMapString(map, "is_changed_parent").equals(Constant.TRUE)) {
// ----------------------------------
// 3-1. 동일한 상위 그룹 하위에 동일한 그룹이 있는지 확인한다.
// ----------------------------------
if (groupDao.groupIsExistByGroupNameAndParentIdList(groupVO) > 0) {
throw processException("groupName.duplication.error");
}
// ----------------------------------
// 3-2. 동일한 상위 폴더 하위에 동일한 이름의 폴더가 있는지 확인한다.
// ----------------------------------
FolderVO folderVO = new FolderVO();
folderVO.setFolder_name_ko(groupVO.getGroup_name_ko());
folderVO.setParent_id(CommonUtil.getChangedResourceIDByPrefix(groupVO.getParent_id(), Constant.ID_PREFIX_FOLDER));
if (folderDao.folderIsExistByFolderNameAndParentID(folderVO) > 0) {
throw processException("folderName.duplication.error");
}
}
return groupVO;
}
@Override
public GroupVO groupDeleteValid(HashMap<String, Object> map) throws Exception {
GroupDao groupDao = sqlSession.getMapper(GroupDao.class);
FolderDao folderDao = sqlSession.getMapper(FolderDao.class);
UserDao userDao = sqlSession.getMapper(UserDao.class);
DocumentDao documentDao = sqlSession.getMapper(DocumentDao.class);
AclDao aclDao = sqlSession.getMapper(AclDao.class);
HashMap<String, Object> groupInfo = new HashMap<String, Object>();
groupInfo.put("groupId", map.get("group_id"));
groupInfo.put("parentId", map.get("group_id")); // 하위 그룹 존재 여부 확인을 위해 group_id를 parentId로 사용함
groupInfo.put("accessor_id", map.get("group_id")); // ACL Item 조회를 위해 사용
groupInfo.put("folder_id", CommonUtil.getChangedResourceIDByPrefix(StringUtil.getMapString(map, "group_id"), Constant.ID_PREFIX_FOLDER));
// ---------------------------------------------------
// 1. 소속구성원이 존재하는지 확인한다.
// ---------------------------------------------------
List<UserVO> userList = userDao.groupUserList(groupInfo);
if (userList.size() > 0) {
throw processException("group.fail.user.exists");
}
// ---------------------------------------------------
// 2. 직계 하위 그룹이 존재하는지 확인한다.
// ---------------------------------------------------
List<GroupVO> groupList = groupDao.childGroupList(groupInfo);
if (groupList.size() > 0) {
throw processException("group.fail.child.group.exists");
}
// ---------------------------------------------------
// 3. 그룹이 접근자로 지정된 ACLITEM이 있는지 확인한다.
// ---------------------------------------------------
FolderVO groupFolder = folderDao.folderDetail(groupInfo);
groupInfo.put("acl_id", groupFolder.getAcl_id());
if (aclDao.aclItemCountByAccessorId(groupInfo) > 0) {
throw processException("group.fail.bind.accessors");
}
// ---------------------------------------------------
// 4. 부서 폴더의 하위 폴더가 존재하는지 확인한다.
// ---------------------------------------------------
if (folderDao.existChildFolder(groupInfo) > 0) {
throw processException("group.fail.child.exists");
}
// ---------------------------------------------------
// 5. 부서 폴더에 문서가 존재하는지 확인한다.
// ---------------------------------------------------
if (documentDao.getDocumentCountByFolderId(groupInfo) > 0) {
throw processException("group.fail.document.exist");
}
// ---------------------------------------------------
// 6. 부서를 관리부서로 설정한 유저가 있는지 확인한다
// ---------------------------------------------------
if (groupDao.groupManagerCnt(groupInfo) > 0) {
throw processException("group.fail.manager.exist");
}
// 그룹 정보를 가져온다
GroupVO groupVO = groupDao.groupDetail(StringUtil.getMapString(map, "group_id"));
return groupVO;
}
@Override
public GroupVO groupMoveValid(HashMap<String, Object> map) throws Exception {
GroupDao groupDao = sqlSession.getMapper(GroupDao.class);
FolderDao folderDao = sqlSession.getMapper(FolderDao.class);
// ----------------------------------
// 1. GroupVO로 변환
// ----------------------------------
// 기존 그룹 정보를 조회한다
GroupVO groupVO = groupDao.groupDetail(StringUtil.getMapString(map, "group_id"));
FolderVO folderVO = new FolderVO();
// 기존 그룹 정보가 있을 경우에만 진행한다
if (groupVO != null) {
// 기존 그룹정보를 변경된 정보로 수정한다
groupVO.setParent_id(StringUtil.getMapString(map, "parent_id"));
groupVO.setMap_id(StringUtil.getMapString(map, "map_id"));
// ----------------------------------
// 2. 동일한 상위 그룹 하위에 동일한 그룹이 있는지 확인한다.
// ----------------------------------
if (groupDao.groupIsExistByGroupNameAndParentIdList(groupVO) > 0) {
throw processException("groupName.duplication.error");
}
// ----------------------------------
// 3. 동일한 상위 폴더 하위에 동일한 이름의 폴더가 있는지 확인한다.
// ----------------------------------
folderVO.setFolder_name_ko(groupVO.getGroup_name_ko());
folderVO.setParent_id(CommonUtil.getChangedResourceIDByPrefix(groupVO.getParent_id(), Constant.ID_PREFIX_FOLDER));
if (folderDao.folderIsExistByFolderNameAndParentID(folderVO) > 0) {
throw processException("folderName.duplication.error");
}
}
return groupVO;
}
@Override
public List<GroupVO> groupExcelList(String fileName) throws Exception {
GroupDao groupDao = sqlSession.getMapper(GroupDao.class);
FolderDao folderDao = sqlSession.getMapper(FolderDao.class);
HashMap<String,Object> param = new HashMap<String,Object>();
List<GroupVO> ret = new ArrayList<GroupVO>();
@SuppressWarnings("rawtypes")
List rowList = new ArrayList();
String tmpLine = "";
String grpName = "";
String grpEnName = "";
String grpSuName = "";
boolean isDup = false;
XlsUtil excelUtil = new XlsUtil();
rowList = excelUtil.getExcelList(fileName);
for(int i=0; i < rowList.size(); i++){
tmpLine = rowList.get(i).toString().replaceAll("\\[", "").replaceAll("\\]", "").replaceAll("[ ]", "");
String[] grpList = tmpLine.toString().split(",");
if(grpList.length != 3) { continue; }
grpName = grpList[0];
grpEnName = grpList[1];
grpSuName = grpList[2];
if(StringUtil.isEmpty(grpName) || StringUtil.isEmpty(grpSuName) ){
continue;
}
// 0.그룹명이 푤더명으로 적절하지 체크한다.
if(PatternUtil.webfolderCheck(grpName)) {
continue;
}
// 1. 상위그룹명이 존재하는지 체크한다.(상위그룹명이 2개이상이면 SKIP)
param.put("map_id",Constant.MAP_ID_DEPT);
param.put("grpNm",grpSuName);
// 상위그룹 갯수 체크
int result = groupDao.chkGroupCnt(param);
if(result != 1) { continue; }
// 상위그룹정보 가져오기
GroupVO suGroup = groupDao.chkGroupName(param);
if(suGroup == null) { continue; }
// 2.그룹명이 존재하는지 체크한다.
param.put("grpNm",grpName);
GroupVO cuGroup = groupDao.chkGroupName(param);
if(cuGroup != null) { continue; }
// 3. 신규입력인 경우 객체를 생성하여 초기값을 담는다.
GroupVO groupVO = new GroupVO();
groupVO.setGroup_name_ko(grpName);
groupVO.setGroup_name_en(grpEnName);
groupVO.setParent_id(suGroup.getGroup_id());
groupVO.setMap_id(Constant.MAP_ID_DEPT);
// 4 동일한 상위 그룹 하위에 동일한 이름의 그룹이 있는지 확인한다.
if (groupDao.groupIsExistByGroupNameAndParentIdList(groupVO) > 0) {
continue;
}
// 5. 동일한 상위 폴더 하위에 동일한 이름의 폴더가 있는지 확인한다
FolderVO folderVO = new FolderVO();
folderVO.setFolder_name_ko(groupVO.getGroup_name_ko());
folderVO.setParent_id(CommonUtil.getChangedResourceIDByPrefix(groupVO.getParent_id(), Constant.ID_PREFIX_FOLDER));
if (folderDao.folderIsExistByFolderNameAndParentID(folderVO) > 0) {
continue;
}
// 6. 중복제거 로직 처리
if(ret != null && ret.size() > 0 ) {
for(GroupVO vo : ret) {
if(vo.getGroup_name_ko().equals(groupVO.getGroup_name_ko())) {
isDup = true;
break;
}
}
}
// 7. 정상적인 데이터인 경우
if(!isDup) { ret.add(groupVO); }
}
return ret;
}
}
<file_sep>/EDMS3/src/kr/co/exsoft/common/vo/LangCodeVO.java
package kr.co.exsoft.common.vo;
/**
* 코드언어 VO
* @author 패키지 개발팀
* @since 2014.07.29
* @version 3.0
*
*/
public class LangCodeVO {
private String code_id; // CODE_ID - XR_CODE
private String lang_cd; // 언어코드 : KO-한국어,JA-일본어,ZH-중국어,EN-영어
private String code_nm; // 코드명
private String gcode_id; // 그룹코드값
public LangCodeVO() {
this.code_id = "";
this.lang_cd = "";
this.code_nm = "";
this.gcode_id = "";
}
public String getCode_id() {
return code_id;
}
public void setCode_id(String code_id) {
this.code_id = code_id;
}
public String getLang_cd() {
return lang_cd;
}
public void setLang_cd(String lang_cd) {
this.lang_cd = lang_cd;
}
public String getCode_nm() {
return code_nm;
}
public void setCode_nm(String code_nm) {
this.code_nm = code_nm;
}
public String getGcode_id() {
return gcode_id;
}
public void setGcode_id(String gcode_id) {
this.gcode_id = gcode_id;
}
}
<file_sep>/EDMS3/src/kr/co/exsoft/eframework/exception/BizException.java
package kr.co.exsoft.eframework.exception;
import java.text.MessageFormat;
import java.util.Locale;
import org.springframework.context.MessageSource;
/***
* 서비스 레이어레서 발생시키는 비즈니스 예외 / BaseException 하위 클래스
* @author <NAME>
* @since 2014.07.15
* @version 3.0
*
*/
public class BizException extends BaseException {
private static final long serialVersionUID = 1L;
public BizException() {
this("BaseException without message", null, null);
}
public BizException(String defaultMessage) {
this(defaultMessage, null, null);
}
public BizException(String defaultMessage, Exception wrappedException) {
this(defaultMessage, null, wrappedException);
}
public BizException(String defaultMessage, Object[] messageParameters,Exception wrappedException) {
String userMessage = defaultMessage;
if (messageParameters != null) {
userMessage = MessageFormat.format(defaultMessage,messageParameters);
}
this.message = userMessage;
this.wrappedException = wrappedException;
}
public BizException(MessageSource messageSource, String messageKey) {
this(messageSource, messageKey, null, null, Locale.getDefault(), null);
}
public BizException(MessageSource messageSource, String messageKey,Exception wrappedException) {
this(messageSource, messageKey, null, null, Locale.getDefault(),wrappedException);
}
public BizException(MessageSource messageSource, String messageKey,Locale locale,
Exception wrappedException) {
this(messageSource, messageKey, null, null, locale, wrappedException);
}
public BizException(MessageSource messageSource, String messageKey,
Object[] messageParameters, Locale locale,Exception wrappedException) {
this(messageSource, messageKey, messageParameters, null, locale,wrappedException);
}
public BizException(MessageSource messageSource, String messageKey,Object[] messageParameters,
Exception wrappedException) {
this(messageSource, messageKey, messageParameters, null, Locale.getDefault(), wrappedException);
}
public BizException(MessageSource messageSource, String messageKey,Object[] messageParameters,
String defaultMessage,Exception wrappedException) {
this(messageSource, messageKey, messageParameters, defaultMessage,Locale.getDefault(), wrappedException);
}
public BizException(MessageSource messageSource, String messageKey,Object[] messageParameters,
String defaultMessage, Locale locale,Exception wrappedException) {
this.messageKey = messageKey;
this.messageParameters = messageParameters;
this.message = messageSource.getMessage(messageKey, messageParameters,defaultMessage, locale);
this.wrappedException = wrappedException;
}
}
<file_sep>/EDMS3/src/kr/co/exsoft/common/service/CommonCustomServiceImpl.java
package kr.co.exsoft.common.service;
import java.util.HashMap;
import org.apache.commons.collections.map.CaseInsensitiveMap;
import org.apache.ibatis.session.SqlSession;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Service;
/**
* 메뉴/코드/세션 서비스 사이트 커스터마이징 구현 Sample
* @author 패키지 개발팀
* @since 2014.08.01
* @version 3.0
*
*/
@Service("commonCustomService")
public class CommonCustomServiceImpl extends CommonServiceImpl {
@Autowired
@Qualifier("sqlSession")
private SqlSession sqlSession;
@Override
public CaseInsensitiveMap editorDetailInfo(HashMap<String,Object> map) throws Exception {
CaseInsensitiveMap ret = new CaseInsensitiveMap();
//CommonDao commonDao = sqlSession.getMapper(CommonDao.class);
//ret = commonDao.editorDetailInfo(map);
ret.put("title","사이트커스터마이징");
ret.put("create_dt","2014-08-01 14:00:05");
ret.put("content","ttttttttttttttttttttttttttttttttttttttttt<br>ddddddddddddddddddd<br>");
return ret;
}
}
<file_sep>/EDMS3/src/kr/co/exsoft/common/dao/ConfDao.java
package kr.co.exsoft.common.dao;
import java.util.HashMap;
import java.util.List;
import kr.co.exsoft.common.vo.ConfVO;
import org.springframework.stereotype.Repository;
import org.apache.commons.collections.map.CaseInsensitiveMap;
/**
* 환경설정 관련 DAO
*
* @author 패키지팀
* @since 2014. 9. 10.
* @version 1.0
*
*/
@Repository(value = "confDao")
public interface ConfDao {
/**
*
* <pre>
* 1. 개용 : 시스템 환경설정 정보 열람
* 2. 처리내용 :
* </pre>
* @Method Name : sysconfigDetail
* @param map
* @return List
*/
public List<ConfVO> sysConfigDetail(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 시스템 환경설정 수정 처리
* 2. 처리내용 :
* </pre>
* @Method Name : sysConfigUpdate
* @param map
* @return int
*/
public int sysConfigUpdate(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 감사 설정 정보 수정
* 2. 처리내용 :
* </pre>
* @Method Name : auditConfigUpdate
* @param map
* @return int
*/
public int auditConfigUpdate(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 휴지통 관리 정책 수정
* 2. 처리내용 :
* </pre>
* @Method Name : trashConfigUpdate
* @param map
* @return int
*/
public int trashConfigUpdate(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 버저관리 정책 가져오기.
* 2. 처리내용 :
* </pre>
* @Method Name : versionConfigDetail
* @param map
* @return List<CaseInsensitiveMap>
*/
public List<CaseInsensitiveMap> versionConfigDetail();
/**
*
* <pre>
* 1. 개용 : 버전관리 정책 수정하기.
* 2. 처리내용 :
* </pre>
* @Method Name : versionConfigUpdatel
* @param map
* @return int
*/
public int versionConfigUpdate(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 첨부파일 정책 수정.
* 2. 처리내용 :
* </pre>
* @Method Name : fileConfigUpdate
* @param map
* @return int
*/
public int fileConfigUpdate(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : URL 유효기간 설정 가져오기.
* 2. 처리내용 :
* </pre>
* @Method Name : urlConfigDetail
* @param map
* @return CaseInsensitiveMap
*/
public CaseInsensitiveMap urlConfigDetail();
/**
*
* <pre>
* 1. 개용 : URL 유효기간 설정 변경하기
* 2. 처리내용 :
* </pre>
* @Method Name : urlConfigUpdate
* @param map
* @return int
*/
public int urlConfigUpdate(HashMap<String,Object> map);
}
<file_sep>/EDMS3/src/kr/co/exsoft/eframework/handler/SessionManager.java
package kr.co.exsoft.eframework.handler;
import java.io.Reader;
import java.util.HashMap;
import java.util.List;
import java.util.Date;
import java.util.ArrayList;
import java.text.SimpleDateFormat;
import java.util.Locale;
import javax.servlet.http.HttpSession;
import javax.servlet.http.HttpSessionBindingListener;
import javax.servlet.http.HttpSessionBindingEvent;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.ibatis.io.Resources;
import org.apache.ibatis.session.SqlSession;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.ibatis.session.SqlSessionFactoryBuilder;
import kr.co.exsoft.user.dao.UserDao;
import kr.co.exsoft.user.vo.LoginLogVO;
import kr.co.exsoft.common.vo.SessionVO;
/***
* 세션 생성/소멸 리스너
* @author 패키지 개발팀
* @since 2014.07.15
* @version 3.0
*
*/
public class SessionManager implements HttpSessionBindingListener {
protected static final Log logger = LogFactory.getLog(SessionManager.class);
private static List<HttpSession> sessionList;
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS", Locale.KOREA);
public SessionManager() {
sessionList = new ArrayList<HttpSession>();
}
@Override
public void valueBound(HttpSessionBindingEvent arg0) {
// 세션에 값이 등록될때 호출
// 세션목록에 추가
HttpSession session = arg0.getSession();
sessionList.add(session);
String creationTime = sdf.format(new Date(session.getCreationTime()));
logger.info("HttpSession is created.");
logger.info("Session ID: " + session.getId());
logger.info("Creation Time: " + creationTime);
logger.info("Timeout: " + session.getMaxInactiveInterval() + " seconds");
}
@Override
public void valueUnbound(HttpSessionBindingEvent arg0) {
// 세션에 값이 지워질때 (무효화시) 호출
logger.info("SessionManager : session destroy");
// 세션종료전에 XR_LOGIN_LOG 테이블 정보 삭제.
HttpSession session = arg0.getSession();
if(session != null) {
// 세션 목록에서 제거.
sessionList.remove(session);
SqlSessionFactory sqlSessionFactory = null;
try {
Reader reader = Resources.getResourceAsReader("../config/mybatis-application.xml");
sqlSessionFactory = new SqlSessionFactoryBuilder().build(reader);
reader.close();
} catch (Exception e) {
e.printStackTrace();
}
SqlSession sqlSession = sqlSessionFactory.openSession(true);
UserDao userDao = sqlSession.getMapper(UserDao.class);
HashMap<String,Object> map = new HashMap<String,Object>();
map.put("session_id",session.getId());
LoginLogVO loginLogVO = userDao.loginLogDetail(map);
if(loginLogVO != null) {
userDao.loginLogDelete(map);
}
sqlSession.close();
}
}
/**
*
* <pre>
* 1. 개요 : 생성된 세션 사용자 ID 목록을 구한다.
* 2. 처리내용 :
* </pre>
* @Method Name : getSessionUserIdList
* @return
*/
public static List<String> getSessionUserIdList() {
List<String> user_id_list = new ArrayList<String>();
try {
for (HttpSession currentSession : sessionList) {
SessionVO sessionVO = (SessionVO)currentSession.getAttribute("sessionVO");
if(sessionVO != null && !user_id_list.contains(sessionVO.getSessId())) {
user_id_list.add(sessionVO.getSessId());
}
}
}catch(Exception e) {
logger.error(e.getMessage());
}
return user_id_list;
}
}
<file_sep>/EDMS3/src/kr/co/exsoft/document/controller/TypeAuthController.java
package kr.co.exsoft.document.controller;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import kr.co.exsoft.common.service.CommonService;
import kr.co.exsoft.common.vo.SessionVO;
import kr.co.exsoft.document.service.TypeService;
import kr.co.exsoft.eframework.configuration.Constant;
import kr.co.exsoft.eframework.exception.BizException;
import kr.co.exsoft.eframework.library.LocaleLibrary;
import kr.co.exsoft.eframework.util.ConfigData;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.ui.Model;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.MessageSource;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.SessionAttributes;
import org.springmodules.validation.commons.DefaultBeanValidator;
/**
* 문서유형 관련 클래스
*
* @author 패키지팀
* @since 2014. 10. 14.
* @version 1.0
*
*/
@Controller
@SessionAttributes("sessionVO")
@RequestMapping("/type")
public class TypeAuthController {
@Autowired
private TypeService typeService;
@Autowired
private CommonService commonService;
@Autowired
private MessageSource messageSource;
@Autowired
private DefaultBeanValidator beanValidator;
protected static final Log logger = LogFactory.getLog(TypeAuthController.class);
/**
*
* <pre>
* 1. 개용 : 문서유형 속성 리스트 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : attrList
* @param model
* @param sessionVO
* @param map
* @param request
* @return Map<String,Object>
*/
@RequestMapping(value="/attrList.do", method=RequestMethod.POST)
@ResponseBody
public Map<String,Object> attrList(Model model, @ModelAttribute SessionVO sessionVO, @RequestParam HashMap<String,Object> map,
HttpServletRequest request) {
Locale locale = LocaleLibrary.setLocale(sessionVO.getSessLanguage() != null ? sessionVO.getSessLanguage() : ConfigData.getString("LANGUAGE"));
Map<String, Object> resultMap = new HashMap<String, Object>();
try {
// 문서유형 속성정보 가져오기 :: 파라미터 type_id
resultMap = typeService.attrList(map);
}catch(BizException e){
resultMap.put("result",Constant.RESULT_FALSE);
resultMap.put("message",e.getMessage());
}catch(Exception e) {
resultMap.put("result",Constant.RESULT_FALSE);
resultMap.put("message",messageSource.getMessage("common.system.error",new Object[0],locale));
}
return resultMap;
}
}
<file_sep>/EDMS3/src/kr/co/exsoft/quartz/controller/QuartzJob.java
package kr.co.exsoft.quartz.controller;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import org.springframework.context.ApplicationContext;
import org.springframework.scheduling.quartz.QuartzJobBean;
/**
* Quartz Job Abstract 클래스
*
* @author 패키지팀
* @since 2014. 9. 30.
* @version 1.0
*
*/
public abstract class QuartzJob extends QuartzJobBean {
private ApplicationContext ctx;
protected void executeInternal(JobExecutionContext context) throws JobExecutionException {
ctx = (ApplicationContext)context.getJobDetail().getJobDataMap().get("applicationContext");
executeJob(context);
}
protected Object getBean(String beanId) {
return ctx.getBean(beanId);
}
protected abstract void executeJob(JobExecutionContext jobexecutioncontext);
}
<file_sep>/EDMS3/src/kr/co/exsoft/external/dao/ExternalDao.java
package kr.co.exsoft.external.dao;
import java.util.HashMap;
import org.apache.commons.collections.map.CaseInsensitiveMap;
import org.springframework.stereotype.Repository;
/**
* External 매퍼클래스
* @author <NAME>
* @since 2014.07.21
* @version 3.0
*
*/
@Repository(value = "externalDao")
public interface ExternalDao {
/**
*
* <pre>
* 1. 개요 : 외부 사용자 정보 조회 Sample
* 2. 처리내용 :
* </pre>
* @Method Name : externalUserDetail
* @param map
* @return
*/
public CaseInsensitiveMap externalUserDetail(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개요 : 외부 사용자 등록 처리 Sample
* 2. 처리내용 :
* </pre>
* @Method Name : externalUserWrite
* @param map
* @return
*/
public int externalUserWrite(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개요 : 외부 사용자 소속부서 등록 처리 Sample
* 2. 처리내용 :
* </pre>
* @Method Name : externalGroupedWrite
* @param map
* @return
*/
public int externalGroupedWrite(HashMap<String,Object> map);
}
<file_sep>/EDMS3/src/kr/co/exsoft/user/vo/GroupedVO.java
package kr.co.exsoft.user.vo;
/**
* 그룹(부서) VO
* @author 패키지 개발팀
* @since 2014.09.10
* @version 1.0
*
*/
public class GroupedVO {
private String group_id;
private String user_id;
private String is_default;
public String getGroup_id() {
return group_id;
}
public void setGroup_id(String group_id) {
this.group_id = group_id;
}
public String getUser_id() {
return user_id;
}
public void setUser_id(String user_id) {
this.user_id = user_id;
}
public String getIs_default() {
return is_default;
}
public void setIs_default(String is_default) {
this.is_default = is_default;
}
}
<file_sep>/EDMS3/WebContent/js/common/tree.js
/******************************************************************************************************
* @since 2014/07
* @author 패키지 개발팀
* @version 1.1.0.0
*
* ver 1.1
* - parameter전달 방식을 json으로 변경
* - contextAction 추가
*****************************************************************************************************/
//--------------------------------
// eXsoft Framework Tree Object
//--------------------------------
//var XFTree = function(divId, context, url, mapId, workType, manageGroupId){
var XFTree = function(opt){
// 구버전 var XFTree = function(divId, context, url, mapId, workType, manageGroupId)
//----------------------------------------------------------------
// 1.Default variable
//----------------------------------------------------------------
// divId : 트리를 그릴 DIV의 ID
// url : 트리가 데이터를 요청할 서버 Url
// selectedNode : 체크 박스로 체크된 전체 노드 배열
// rootId : 특정위치의 그룹이나 폴더를 Tree의 Root로 지정하고 싶을 경우 최상단의 ID를 설정
// isBasicTree : 기본형태의 트리인지 체크. 기본형식이 아니라면 최초 로드 시 강제 선택을 빼야함.
//----------------------------------------------------------------
this.divId = opt.divId;
this.url = opt.context + opt.url;
this.contextRoot = opt.context;
this.selectedNode = new Array();
this.selectedNodeData = new Array();
this.rootId = opt.manageGroupId == null ? null : opt.manageGroupId;
this.mapId = opt.mapId == null ? "MYDEPT" : opt.mapId;
this.workType = opt.workType == null ? "WORK_MYDEPT" : opt.workType; //WORK_MYDEPT(부서), WORK_ALLDEPT(전사), WORK_PROJECT(프로젝트)
this.isBasicTree = true;
this.refreshNodeId = "";
this.refreshNodeParentId = "";
this.isFavoriteFolder = false; // Context 메뉴 분기처리를 위한 변수
this.onlyVirtual = opt.onlyVirtual == null ? null : opt.onlyVirtual;
this.contextAction = opt.contextAction;
this.isSelectHiddenRoot = opt.isSelectHiddenRoot == null ? false : opt.isSelectHiddenRoot;
this.treeType = opt.treeType != "" ? opt.treeType : ""; // 그룹관리 > 구성원추가시 미소속그룹을 보이기위한 변수
//----------------------------------------------------------------
// 2.Overloading functions & options
//----------------------------------------------------------------
// plugins : 트리에 적용할 플러그인 배열
// isMultiplecheck : 체크박스 플러그인 사용시 다중 체크 여부
//----------------------------------------------------------------
this.plugins = ["wholerow","types"];
this.isMultipleCheck = false;
//----------------------------------------------------------------
// 3.Overloading default functions
//----------------------------------------------------------------
// dataFunction : core::data::data의 오버라이딩 구현부분
// filterFunction : core::data::dataFilter의 오버라이딩 구현부분
// contextMenuFunction : contextmenu의 오버라이딩 구현부분
// checkBoxOptions : checkbox의 오버라이딩 구현부분
// callbackOpenNode : open_node.jstree의 오버라이딩 구현부분
// callbackSelectNode : select_node.jstree의 오버라이딩 구현부분
//----------------------------------------------------------------
this.dataFunction = null;
this.filterFunction = function(jsonString) {
var items = [];
var msg = JSON.parse(jsonString);
var groupList = msg["groupList"];
var folderList = msg["folderList"]; //FolderVO List 객체
var favoriteList = msg["favoriteList"];
if (groupList != null) {
$.each(groupList, function(i) {
var node = groupList[i];
var item = {
id : node.group_id,
parent : node.parent_id == "" ? "#" : node.parent_id,
text : node.group_name_ko,
children : node.children_count > 0 ? true : false,
childrenCnt : node.children_count,
type : "group",
contextRoot : opt.context
};
items.push(item);
});
} else if (folderList != null) {
$.each(folderList, function(i) {
var node = folderList[i];
var folderType = "default";
if (node.folder_type == "MYDEPT") {
folderType = "group_folder";
} else {
if (node.folder_status == "D") {
folderType = "inactive";
} else {
folderType = "default";
}
}
var item = {
id : node.folder_id,
parent : node.parent_id == "" ? "#" : node.parent_id,
text : node.folder_name_ko,
children : node.children_count > 0 ? true : false,
acl_id : node.acl_id,
acl_level : node.acl_level,
acl_create : node.acl_create,
acl_changePermission : node.acl_changePermission,
acl_document_create : node.acl_document_create,
is_save : node.is_save,
is_type : node.is_type,
childrenCnt : node.children_count,
map_id : node.map_id,
contextRoot : opt.context,
type : folderType,
is_groupFolder : node.folder_type == "MYDEPT" ? true : false
};
items.push(item);
});
} else if (favoriteList != null) {
$.each(favoriteList, function(i) {
var node = favoriteList[i];
var item = {
id : node.folder_id,
parent : node.parent_folder_id == node.folder_id ? "#" : node.parent_folder_id,
text : node.favorite_nm,
children : node.children_count > 0 ? true : false,
childrenCnt : node.children_count,
is_virtual : node.is_virtual,
type : node.is_virtual == "Y" ? "virtual" : "link",
contextRoot : opt.context,
sorts : node.sorts
};
items.push(item);
});
}
return JSON.stringify(items);
};
this.contextMenuFunction = function(node) {
// 권한 레벨 계산하는 function 만든다.
var isAuthCheck = function(acl_level, type) {
if( acl_level == 'DELETE' && type == 'delete') {
return true;
} else if( (acl_level == 'DELETE' || acl_level == 'UPDATE') && (type == 'update' || type == 'move')) {
return true;
}
return false;
};
node.full_path = this.get_path(node.id);
return {
createFolder : {
separator_before : false,
separator_after : false,
_disabled : node.original.acl_create == 'F' ? false : false, // 주의 :: false가 사용할 수 있음
label : "폴더 생성",
icon : "{0}/js/plugins/jstree/img/tree/context_folder_write.png".format(opt.context),
action : function(data) {
if (opt.contextAction !== undefined && opt.contextAction.createFolder !== undefined) {
opt.contextAction.createFolder(node); // 호출한 페이지에서 구현 되어 있어야 함
} else {
jAlert("폴더 생성 콜백함수가 구현되있지 않습니다.");
return;
}
}
},
modifyFolder : {
separator_before : false,
separator_after : false,
_disabled : !isAuthCheck(node.original.acl_level, 'update'), // true : 메뉴비활성화, false : 메뉴 활성화 하여 !조건 추가
label : "폴더 수정",
icon : "{0}/js/plugins/jstree/img/tree/context_folder_modify.png".format(opt.context),
action : function(data) {
if (opt.contextAction !== undefined && opt.contextAction.modifyFolder !== undefined) {
opt.contextAction.modifyFolder(node); // 호출한 페이지에서 구현 되어 있어야 함
} else {
jAlert("폴더 수정 콜백함수가 구현되있지 않습니다.");
return;
}
}
},
moveFolder : {
separator_before : false,
separator_after : false,
_disabled : !isAuthCheck(node.original.acl_level, 'move'), // true : 메뉴비활성화, false : 메뉴 활성화 하여 !조건 추가
label : "폴더 이동",
icon : "{0}/js/plugins/jstree/img/tree/context_folder_move.png".format(opt.context),
action : function(data) {
if (opt.contextAction !== undefined && opt.contextAction.moveFolder !== undefined) {
opt.contextAction.moveFolder(node); // 호출한 페이지에서 구현 되어 있어야 함
} else {
jAlert("폴더 이동 콜백함수가 구현되있지 않습니다.");
return;
}
}
},
deleteFolder : {
separator_before : false,
separator_after : false,
_disabled : !isAuthCheck(node.original.acl_level, 'delete'), // true : 메뉴비활성화, false : 메뉴 활성화 하여 !조건 추가
label : "폴더 삭제",
icon : "{0}/js/plugins/jstree/img/tree/context_folder_delete.png".format(opt.context),
action : function(data) {
if (opt.contextAction !== undefined && opt.contextAction.deleteFolder !== undefined) {
opt.contextAction.deleteFolder(node);
} else {
jAlert("폴더 삭제 콜백함수가 구현되있지 않습니다.");
return;
}
}
},
addFavoriteFolder : {
separator_before : true,
separator_after : false,
_disabled : false,
label : "폴더 즐겨찾기 추가",
icon : "{0}/js/plugins/jstree/img/tree/context_folder_favorite_add.png".format(opt.context),
action : function(data) {
if (opt.contextAction !== undefined && opt.contextAction.addFavoriteFolder !== undefined) {
opt.contextAction.addFavoriteFolder(node); // 호출한 페이지에서 구현 되어 있어야 함
} else {
jAlert("폴더 즐겨찾기 추가 콜백함수가 구현되있지 않습니다.");
return;
}
}
}
};
};
this.contextMenuForFavorite = function(node) {
// 권한 레벨 계산하는 function 만든다.
var isAuthCheck = function(acl_level, type) {
if( acl_level == 'DELETE' && type == 'delete') {
return true;
} else if( (acl_level == 'DELETE' || acl_level == 'UPDATE') && (type == 'update' || type == 'move')) {
return true;
}
return false;
};
node.full_path = this.get_path(node.id);
return {
createFavoriteFolder : {
separator_before : false,
separator_after : false,
_disabled : false,
label : "등록",
icon : "{0}/js/plugins/jstree/img/tree/context_folder_write.png".format(opt.context),
action : function(data) {
if (opt.contextAction !== undefined && opt.contextAction.createFavoriteFolder !== undefined) {
opt.contextAction.createFavoriteFolder(node); // 호출한 페이지에서 구현 되어 있어야 함
} else {
jAlert("즐겨찾기 생성 콜백함수가 구현되있지 않습니다.");
return;
}
}
},
modifyFavoriteFolder : {
separator_before : false,
separator_after : false,
_disabled : false,
label : "이름 변경",
icon : "{0}/js/plugins/jstree/img/tree/context_folder_modify.png".format(opt.context),
action : function(data) {
if (opt.contextAction !== undefined && opt.contextAction.modifyFavoriteFolder !== undefined) {
opt.contextAction.modifyFavoriteFolder(node); // 호출한 페이지에서 구현 되어 있어야 함
} else {
jAlert("즐겨찾기 이름 변경 콜백함수가 구현되있지 않습니다.");
return;
}
}
},
moveFavoriteFolder : {
separator_before : false,
separator_after : false,
_disabled : false,
label : "폴더 이동",
icon : "{0}/js/plugins/jstree/img/tree/context_folder_move.png".format(opt.context),
action : function(data) {
if (opt.contextAction !== undefined && opt.contextAction.moveFavoriteFolder !== undefined) {
opt.contextAction.moveFavoriteFolder(node); // 호출한 페이지에서 구현 되어 있어야 함
} else {
jAlert("즐겨찾기 폴더 이동 콜백함수가 구현되있지 않습니다.");
return;
}
}
},
deleteFavoriteFolder : {
separator_before : false,
separator_after : false,
_disabled : false,
label : "폴더 삭제",
icon : "{0}/js/plugins/jstree/img/tree/context_folder_delete.png".format(opt.context),
action : function(data) {
if (opt.contextAction !== undefined && opt.contextAction.deleteFavoriteFolder !== undefined) {
opt.contextAction.deleteFavoriteFolder(node); // 호출한 페이지에서 구현 되어 있어야 함
} else {
jAlert("즐겨찾기 폴더 삭제 콜백함수가 구현되있지 않습니다.");
return;
}
}
}
};
};
this.checkBoxOptions = {
visible : true, // 체크박스 숨김여부 [true : 보임, false : 숨김]
three_state : false, // 하위 폴더 자동 체크 [true : 자동체크, false : 개별체크]
whole_node : false, // 토글시 체크칸을 정확히 클릭해야 해제 가능
keep_selected_style : false,
cascade : "up+down+undetermined",
two_state : true,
tie_selection : true,
};
this.callbackAllSelectNode = function(selectedNode) {};
this.callbackAllSelectNodeData = function(selectedNodeData) {};
this.callbackLoadNode = function(e, data) {};
this.callbackBeforeOpenNode = function(e, data) {};
this.callbackOpenNode = function(e, data) {};
this.callbackCloseNode = function(e, data) {}
this.callbackSelectNode = function(e, data) {};
//==================================================================================================================
// 4.Template Methods
//==================================================================================================================
// template_context : 우클릭 메뉴를 가진 트리 옵션을 추가한다
// template_singleCheck : 단일 체크가 가능한 트리 옵션을 추가한다
// template_multiCheck(checkSubFolder) : 다중 선택이 가능한 트리 옵션을 추가한다
// checkSubFolder : true = 부모 폴더 체크 시 하위 폴더 전체를 체크한다
// false = 부모 폴더 체크 시 하위 폴더에 영향을 주지 않는다
//==================================================================================================================
this.template_context = function() {
// 1. plugin 추가
this.plugins.push("contextmenu");
};
this.template_singleCheck = function() {
this.plugins.push("checkbox");
this.isMultipleCheck = false;
this.isBasicTree = false;
this.checkBoxOptions.cascade = "up";
};
this.template_multiCheck = function(checkSubFolder) {
this.plugins.push("checkbox");
this.isMultipleCheck = true;
this.isBasicTree = false;
this.checkBoxOptions.three_state = checkSubFolder;
this.checkBoxOptions.whole_node = true;
this.checkBoxOptions.cascade = "up"; /* jsTree 3.0.0.9 버전에 맞춰서 추가*/
};
//==================================================================================================================
// 5.Util Methods
//==================================================================================================================
// refresh : 트리 객체를 갱신한다
// destroy : 트리 객체를 제거한다
// refreshNode(nodeId) : 특정 노드를 갱신한다
// - nodeId : 갱신할 nodeId, null일 경우 현재 선택된 Node를 갱신함
//==================================================================================================================
this.reset = function() {
$(exsoft.util.common.getIdFormat(this.divId)).jstree().refresh();
$(exsoft.util.common.getIdFormat(this.divId)).jstree("deselect_all");
this.selectedNode = new Array();
this.selectedNodeData = new Array();
}
this.refresh = function() {
$(this.divId).jstree().refresh();
return true;
}
this.destroy = function() {
$(this.divId).jstree("destroy");
}
this.expandNode = function(nodeId) {
$(this.divId).jstree().open_node(nodeId);
return nodeId;
}
this.expandNodeCallBack = function(nodeId, callback) {
$(this.divId).jstree().open_node(nodeId, callback);
}
this.selectNode = function(nodeId) {
$(this.divId).jstree("select_node", nodeId);
}
this.selectNodeForInvoke = function(nodeId) {
$(this.divId).jstree("deselect_node", nodeId);
$(this.divId).jstree("select_node", nodeId);
}
this.refreshNode = function(nodeId) {
if(nodeId == undefined) {
nodeId = $(this.divId).jstree("get_selected");
}
// 갱신하려는 노드의 정보를 가져온다
var nodeObj = $(this.divId).jstree("get_node", nodeId);
// 갱신하려는 노드의 부모폴더 정보를 가져온다
var parentId = nodeObj.parent == "#" ? nodeId : nodeObj.parent;
var parentObj = $(this.divId).jstree("get_node", parentId);
// 부모 노드가 Root일 경우 refresh()만 한다.
if (parentObj.parent == "#") {
$(this.divId).jstree().refresh();
} else {
$(this.divId).jstree("refresh_node", parentId);
}
// 갱신하려는 아이디를 기억해둔다
this.refreshNodeId = nodeId;
this.refreshNodeParentId = parentId;
}
this.refreshNodeForAddChildren = function(nodeId) {
// param이 null일 경우는 없으나 혹시나 하는 마음에 처리함.
if (nodeId == undefined) {
nodeId = this.getCurrentNodeParentId();
}
var node = $(this.divId).jstree("get_node", nodeId);
// nodeId가 refresh하려는 Tree Object에 아직 load되지 않았을 경우 로직 종료
if (node == false) {
return;
}
// 자식 Node가 없을 경우만 임시 폴더를 추가하여 expend 가능상태로 만든다
if (node.original.childrenCnt == 0) {
$(this.divId).jstree().create_node(nodeId);
node.original.childrenCnt++;
}
// 트리가 expend있는지 체크해서 collapsed일 경우만 토글하여 expend 한다
if (!node.state.opened) {
$(this.divId).jstree().toggle_node(nodeId);
}
$(this.divId).jstree("refresh_node", nodeId);
}
this.refreshCurrentNode = function(nodeId) {
if(nodeId == undefined) {
return true;
}
// 부모 노드가 Root일 경우 refresh()만 한다.
if (nodeId == "#") {
$(this.divId).jstree().refresh();
} else {
$(this.divId).jstree("refresh_node", nodeId);
}
}
this.getCurrentNode = function(isDOM) {
// isDOM is true : return DOM Object
// isDOM is false or undefined : return jsTree node object
isDOM = isDOM == undefined ? false : isDOM;
var current_id = $(this.divId).jstree("get_selected");
var current_node_object = $(this.divId).jstree("get_node", current_id, isDOM);
return current_node_object;
}
this.getCurrentNodeById = function(current_id) {
var current_node_object = $(this.divId).jstree("get_node", current_id);
return current_node_object;
}
this.getCurrentNodeIds = function() {
return $(this.divId).jstree("get_selected");
}
this.getCurrentNodeId = function() {
var cId = $(this.divId).jstree("get_selected");
var cNode = $(this.divId).jstree("get_node", cId); // 첫번째 배열 값 id 리턴
return cNode.id;
}
this.getCurrentNodeName = function() {
var cId = $(this.divId).jstree("get_selected");
var cNode = $(this.divId).jstree("get_node", cId);
return cNode.text;
}
this.getCurrentNodeFullPath = function() {
var currentId = $(this.divId).jstree("get_selected");
return $(this.divId).jstree("get_path", currentId);
}
this.getCurrentNodeFullPathIds = function() {
var currentId = $(this.divId).jstree("get_selected");
return $(this.divId).jstree("get_path", currentId, "", true);
}
this.getCurrentNodeParentId = function() {
var currentNodeId = this.getCurrentNodeId();
var parentId;
try {
// 현재 선택된 노드 ID가 있을경우 진행함
if (currentNodeId != undefined) {
parentId = $(this.divId).jstree("get_node", currentNodeId).parent;
return parentId;
}
} finally {
currentNodeId = null;
parentId = null;
}
}
this.setInitSelectById = function(nodeId) {
$(this.divId).jstree("deselect_all");
$(this.divId).jstree("select_node", nodeId);
}
this.unChecked = function(nodeId) {
}
this.is_loaded = function(obj){
return $(this.divId).jstree("is_loaded", obj);
}
this.is_open = function(obj){
return $(this.divId).jstree("is_open", obj);
}
// 이동할 수 있는 형제 노드가 있는지 체크
this.SiblingMovable = function() {
var curNodeDOM = this.getCurrentNode(true);
return {
prevSibling : $(this.divId).jstree()._previousSibling(curNodeDOM[0]),
nextSibling : $(this.divId).jstree()._nextSibling(curNodeDOM[0])
};
}
// 현재 노드의 이동 가능한 범위를 구한다 (형제 노드의 Start ~ End index를 구함)
this.getMoveRange = function() {
var info = {
range : new Array(), // 선택한 노드의 형제노드 index 목록
pointer : -1 // 선택한 노드의 Index
}
var curNode = this.getCurrentNode();
var curAllNode = this.getAllNodeToJstree();
$(curAllNode).each(function(i) {
// 형제 노드일 경우 인덱스 정보 추가
if (curNode.parent == this.parent)
info.range.push({node : this, index : i});
// 선택한 노드의 인덱스
if (curNode.id == this.id)
info.pointer = i;
})
return info;
}
// 형제 노드의 순서중 현재 노드의 POS(index)를 구한다
this.getNodeIndex = function() {
var index = -1;
var curNode = this.getCurrentNode();
var curAllNode = this.getAllNodeToJstree();
$(curAllNode).each(function(i) {
// 1. '즐겨찾기' 폴더가 아님폴더 중 형제 폴더들을 카운팅함
if (this.parent != this.id && this.parent == curNode.parent) {
index++;
// 2. 자기 자신이 나올경우 인덱스를 반환
if (this.id == curNode.id) {
return false;
}
}
})
return index;
}
// 모든 Node를 구한다 (DOM)
this.getAllNodeToDOM = function() {
var curContainer = $(this.divId).jstree().get_container();
return curContainer.find("li");
}
// 모든 Node를 구한다 (Tree Nodes)
this.getAllNodeToJstree = function() {
var treeObj = this;
var nodeArrays = new Array();
var curContainer = $(this.divId).jstree().get_container();
$(curContainer.find("li")).each(function(index) {
nodeArrays.push(treeObj.getCurrentNodeById(this.id));
})
return nodeArrays;
}
// 선택한 노드부터 최상위 노드까지 폴더 ID 목록
this.getParentIdList = function(nodeId) {
return $(this.divId).jstree("get_path", nodeId, "", true);
}
// 노드의 Text를 변경한다
this.setNodeText = function(nodeId, str) {
$(this.divId).jstree("set_text", nodeId, str);
}
this.moveToPrev = function(callback) {
// 1. 이동이 가능한지 체크한다
var siblings = this.SiblingMovable();
if (siblings.prevSibling == null) {
jAlert("현재 폴더를 위로 이동할 수 없습니다.");
return;
}
var tempDivId = this.divId;
var curNode = this.getCurrentNode();
var curNodeIndex = this.getNodeIndex();
var prevNode = $(this.divId).jstree("get_node", siblings.prevSibling);
var prevNodeIndex = curNodeIndex-1;
var jsonObject = {
sourceFolderId : curNode.id,
sourceFolderIndex : prevNodeIndex,
targetFolderId : prevNode.id,
targetFolderIndex : curNodeIndex,
type : "SWAP_INDEX"
}
exsoft.util.ajax.ajaxDataFunctionWithCallback(jsonObject, this.contextRoot + "/folder/favoriteControl.do" , "SWAP_INDEX", function(param, data) {
$(tempDivId).jstree().move_node(curNode.id, curNode.parent, curNodeIndex-1);
});
}
this.moveToNext = function() {
// 1. 이동이 가능한지 체크한다
var siblings = this.SiblingMovable();
if (siblings.nextSibling == null) {
jAlert("현재 폴더를 아래로 이동할 수 없습니다.");
}
var tempDivId = this.divId;
var curNode = this.getCurrentNode();
var curNodeIndex = this.getNodeIndex();
var nextNode = $(this.divId).jstree("get_node", siblings.nextSibling);
var nextNodeIndex = curNodeIndex+1;
var jsonObject = {
sourceFolderId : curNode.id,
sourceFolderIndex : nextNodeIndex,
targetFolderId : nextNode.id,
targetFolderIndex : curNodeIndex,
type : "SWAP_INDEX"
}
exsoft.util.ajax.ajaxDataFunctionWithCallback(jsonObject, this.contextRoot + "/folder/favoriteControl.do" , "SWAP_INDEX", function(param, data) {
$(tempDivId).jstree().move_node(curNode.id, curNode.parent, curNodeIndex+2);
});
}
// Node.original.childCnt를 -1 한다
this.removeChildCnt = function(nodeId) {
var node = $(this.divId).jstree("get_node", nodeId);
node.original.childrenCnt--;
// 자식이 없을경우 state.opened를 false로 변경
if (node.original.childrenCnt == 0) {
node.state.opened = false;
}
}
// 노드 아이콘 갱신
this.updateNodeIcon = function(nodeId) {
var defaultType = $(this.divId).jstree("get_type", nodeId).replace("_open", "");
var openState = $(this.divId).jstree("get_node", nodeId).state.opened;
if (openState) {
$(this.divId).jstree("set_type", nodeId, "{0}_open".format(defaultType));
} else {
$(this.divId).jstree("set_type", nodeId, "{0}".format(defaultType));
}
}
// 선택한 폴더의 소속 부서를 조회한다
this.getFolderGroupId = function(nodeId) {
var returnObj = null;
var div = this.divId;
// nodeId가 null일경우 현재 선택된 Node의 ID를 설정함.
if (nodeId == null) {
nodeId = this.getCurrentNodeId();
}
// 전체 경로(id)를 역순으로 가져온다
var parentList = $(div).jstree("get_path", nodeId, "", true).reverse();
// 트리 조회
$(parentList).each(function(index) {
var node = $(div).jstree("get_node", this);
// [업무문서함] || [프로젝트함] 케이스에 따라 소속부서(프로젝트)를 가져오는 로직을 분기함
if (node.original.map_id == "MYDEPT") {
if (node.original.is_groupFolder) {
returnObj = node;
return false; // Break
}
} else if (node.original.map_id == "PROJECT") {
if (parentList.length == index+2) {
returnObj = node;
return false;
}
}
});
return returnObj;
}
//==================================================================================================================
// 6.Initialize JS Tree
//==================================================================================================================
this.init = function() {
//--------------------------------
// 6-1.localize tree object
//--------------------------------
var currentTree = this;
//--------------------------------
// 6-2.Validation functions & options
//--------------------------------
// **. div의 ID에 '#'이 없을 경우 앞에 붙여준다. (없을 경우 Jquery의 Object 접근 방식에 맞지 않아서 오류남)
if (this.divId.indexOf("#") == -1) {
this.divId = "#" + this.divId;
}
// 1. Node정보를 가져올 url
if (this.url == null) {
jAlert("대상 Url은 꼭 필요합니다.");
return;
}
//----------------------------------------------------------------
// 6-3.Create JS Tree
//----------------------------------------------------------------
// core::multiple : 여러개의 Node를 체크할수 있는지 여부 (true : 다중 체크, false : 단일체크)
// core::data::url : 요청을 보낼 url
// core::data::type : 전송 방식 GET, POST
// core::data::data : 전송 파라메터 구성 부분
// core::data::dataFilter : 응답받은 Response를 조작하여 트리 구조에 맞게 변경하는 부분
// contextmenu::items : 컨텍스트 메뉴의 아이템 객체
// checkbox : 체크박스 옵션 객체
// plugins : 플러그인 옵션 배열 객체
//----------------------------------------------------------------
$(function () {
$(currentTree.divId).jstree({
core : {
multiple : currentTree.isMultipleCheck,
data : {
url : currentTree.url,
type : "POST",
data : currentTree.dataFunction != null ?
currentTree.dataFunction :
function(currentNode) {
return {
map_id : currentTree.mapId,
root_id : currentTree.rootId,
parent_id : currentNode.id == "#" ? null : currentNode.id,
work_Type : currentTree.workType,
only_virtual : currentTree.onlyVirtual,
treeType : currentTree.treeType
};
},
dataFilter : currentTree.filterFunction
},
check_callback : true
},
contextmenu : {
items : currentTree.isFavoriteFolder ? currentTree.contextMenuForFavorite : currentTree.contextMenuFunction
},
checkbox : currentTree.checkBoxOptions,
plugins : currentTree.plugins,
types : {
"default" : { // 기본폴더
icon : currentTree.contextRoot + "/js/plugins/jstree/img/folder_normal.png"
},
"default_open" : { // 기본폴더 (확장됨)
icon : currentTree.contextRoot + "/js/plugins/jstree/img/folder_normal_open.png"
},
"link" : { // 바로가기 폴더
icon : currentTree.contextRoot + "/js/plugins/jstree/img/folder_link.png"
},
"link_open" : { // 바로가기 폴더 (확장됨)
icon : currentTree.contextRoot + "/js/plugins/jstree/img/folder_link_open.png"
},
"virtual" : { // 가상폴더
icon : currentTree.contextRoot + "/js/plugins/jstree/img/folder_imagine.png"
},
"virtual_open" : { // 가상폴더 (확장됨)
icon : currentTree.contextRoot + "/js/plugins/jstree/img/folder_imagine_open.png"
},
"group" : { // 그룹 폴더
icon : currentTree.contextRoot + "/js/plugins/jstree/img/folder_group.png"
},
"group_open" : { // 그룹 폴더 (확장됨)
icon : currentTree.contextRoot + "/js/plugins/jstree/img/folder_group_open.png"
},
"inactive" : { // 비활성
icon : currentTree.contextRoot + "/js/plugins/jstree/img/folder_inactive.png"
},
"inactive_open" : { // 비활성 (확장됨)
icon : currentTree.contextRoot + "/js/plugins/jstree/img/folder_inactive_open.png"
},
"group_folder" : { // 그룹 폴더
icon : currentTree.contextRoot + "/js/plugins/jstree/img/folder_division.png"
},
"group_folder_open" : { // 그룹 폴더 (확장됨)
icon : currentTree.contextRoot + "/js/plugins/jstree/img/folder_division_open.png"
}
}
});
//----------------------------------------------------------------
// 6-3-1.Binding JS Tree event
//----------------------------------------------------------------
// changed.jstree : 체크박스의 상태가 변화할때 발생하는 이벤트
// open_node.jstree : Tree를 확장할 경우 발생하는 이벤트
// select_node.jstree : Tree의 Node를 선택할 경우 발생하는 이벤트
// load_node.jstree : 해당 Node의 조회가 끝났을때 발생하는 이벤트
//----------------------------------------------------------------
$(currentTree.divId)
.on('changed.jstree', function (e, data) {
currentTree.selectedNode = new Array();
currentTree.selectedNodeData = new Array();
$.each(data.selected, function(i) {
currentTree.selectedNode.push(data.instance.get_node(data.selected[i]).id);
currentTree.selectedNodeData.push(data.instance.get_node(data.selected[i]).id +"#" + data.instance.get_node(data.selected[i]).text);
});
currentTree.callbackAllSelectNode(currentTree.selectedNode);
currentTree.callbackAllSelectNodeData(currentTree.selectedNodeData);
})
.on('open_node.jstree', function(e, data) {
if (currentTree.refreshNodeParentId == data.node.id) {
$(currentTree.divId).jstree("deselect_all");
$(currentTree.divId).jstree("select_node", currentTree.refreshNodeId);
}
var curClass = $(currentTree.divId).jstree("get_type", data.node.id);
$(currentTree.divId).jstree("set_type", data.node.id, "{0}_open".format(curClass));
currentTree.callbackOpenNode(e, data);
})
.on('close_node.jstree', function(e, data) {
var curClass = $(currentTree.divId).jstree("get_type", data.node.id);
$(currentTree.divId).jstree("set_type", data.node.id, "{0}".format(curClass.replace("_open", "")));
currentTree.callbackCloseNode(e, data);
})
.on('before_open.jstree', function(e, data) {
currentTree.callbackBeforeOpenNode(e, data);
})
// 폴더트리 선택시 유효성 체크 로직을 추가함.
.on('select_node.jstree', function (e, data) {
data.node.full_path = currentTree.getCurrentNodeFullPath();
data.node.parentIdList = currentTree.getParentIdList(data.node.id);
data.node.parentGroup = currentTree.getFolderGroupId(data.node.id);
data.node.mapId = currentTree.mapId;
currentTree.callbackSelectNode(e, data);
})
.on('load_node.jstree', function (e, data) {
if(data.node.id === "#") {
// 1. 자식이 있을경우 : load한 node가 Root(#)일경우 첫번째 자식 Node를 확장한다
if (data.node.children.length > 0 && $(currentTree.divId).jstree("get_node", data.node.children[0]).original.childrenCnt > 0) {
$(exsoft.util.common.getIdFormat(currentTree.divId)).jstree("deselect_all");
$(currentTree.divId).jstree("toggle_node", data.node.children[0]);
// 2. Root("#")를 select event Trigging할 경우
} else if (currentTree.isSelectHiddenRoot && data.node.children.length == 0) {
// hidden root "#" select 이벤트 발생
var _node = {
id : "##",
text : "공유 폴더가 없습니다."
};
$(currentTree.divId).jstree().create_node("#", _node);
$(currentTree.divId).jstree("activate_node", _node.id, true);
// 3. 자식이 없을경우 (다중 선택 모드일 경우엔 사용하지 않음)
} else if (currentTree.isBasicTree){
$(currentTree.divId).jstree("activate_node", data.node.children[0], true);
}
} else if (data.node.parent === "#") {
// load한 node가 Root(#)의 자식일 경우 해당 노드를 선택한다.
// 단, 기본 형태의 트리에서만 사용한다
if (currentTree.isBasicTree) {
$(currentTree.divId).jstree("select_node", data.node.id, true);
}
}
})
.on('loaded.jstree', currentTree.callbackLoadNode)
.on('move_node.jstree', function (e, data) {
$.get('?operation=move_node', { 'id' : data.node.id, 'parent' : data.parent, 'position' : data.position })
.fail(function () {
data.instance.refresh();
});
})
});
};
};
<file_sep>/EDMS3/src/kr/co/exsoft/common/vo/DocumentHtVO.java
package kr.co.exsoft.common.vo;
import kr.co.exsoft.eframework.configuration.Constant;
import kr.co.exsoft.eframework.vo.VO;
/**
* 문서이력 VO
* @author <NAME>
* @since 2014.07.31
* @version 3.0
*
*/
public class DocumentHtVO extends VO {
// 테이블 객체(XR_DOCUMENT_HT)
private long doc_seq; // 문서이력 IDX
private String root_id; // 문서루트 ID
private String action_date; // 수행(처리)일
private String actor_id; // 수행자ID
private String actor_nm; // 수행자명
private String group_id; // 부서ID
private String group_nm; // 부서명
private String action_id; // 처리구분 - CREATE/DELETE/MOVE/READ/CHEKCOUT/CHECKIN/TERMINDATE/CHANGE_PERMISSION
private String target_id; // 업무대상 - XR_DOCUMENT DOC_ID
private String type_id; // 문서유형 - XR_DOCUMENT/RGATE
private String type_nm; // 문서유형명 - 일반문서/파일문서
private String connect_ip; // 접속IP
private String action_place; // 로그구분 - RGATE/EDMS
private String before_id; // 문서이동:이전폴더ID / 소유권변경:이전소유자ID
private String before_nm; // 문서이동:이전폴더명 / 소유권변경:이전소유자명
private String after_id; // 문서이동:변경폴더ID / 소유권변경:변경소유자ID
private String after_nm; // 문서이동:이전폴더명 / 소유권변경:이전소유자명
private String doc_name; // 문서제목: 사용자/부서별 문서이력 상세조회
private String version_no; // 문서버전정보
//조회항목
private String action_name; // 처리명
private String etc; // 비고
public DocumentHtVO() {
this.doc_seq = 0;
this.root_id = "";
this.action_date = "";
this.actor_id = "";
this.actor_nm = "";
this.group_id = "";
this.group_nm = "";
this.action_id = "";
this.target_id = "";
this.type_id = "";
this.type_nm = "";
this.connect_ip = "";
this.action_place = "";
this.before_id = "";
this.before_nm = "";
this.after_id = "";
this.after_nm = "";
this.doc_name = "";
this.version_no = "1.0";
this.action_name = "";
this.etc = "";
}
public String getAction_name() {
return action_name;
}
public void setAction_name(String action_name) {
this.action_name = action_name;
}
public String getEtc() {
if(this.action_id.equals(Constant.ACTION_MOVE)) {
etc = this.before_nm + "폴더에서" + " " + this.after_nm + "폴더로 이동";
}else if(this.action_id.equals(Constant.ACTION_CHANGE_CREATOR)) {
etc = this.before_nm + "에서" + " " + this.after_nm + "소유권 이전";
}else {
etc = "";
}
return etc;
}
public void setEtc(String etc) {
this.etc = etc;
}
public String getVersion_no() {
return version_no;
}
public void setVersion_no(String version_no) {
this.version_no = version_no;
}
public String getDoc_name() {
return doc_name;
}
public void setDoc_name(String doc_name) {
this.doc_name = doc_name;
}
public String getBefore_id() {
return before_id;
}
public void setBefore_id(String before_id) {
this.before_id = before_id;
}
public String getBefore_nm() {
return before_nm;
}
public void setBefore_nm(String before_nm) {
this.before_nm = before_nm;
}
public String getAfter_id() {
return after_id;
}
public void setAfter_id(String after_id) {
this.after_id = after_id;
}
public String getAfter_nm() {
return after_nm;
}
public void setAfter_nm(String after_nm) {
this.after_nm = after_nm;
}
public long getDoc_seq() {
return doc_seq;
}
public void setDoc_seq(long doc_seq) {
this.doc_seq = doc_seq;
}
public String getRoot_id() {
return root_id;
}
public void setRoot_id(String root_id) {
this.root_id = root_id;
}
public String getAction_date() {
return action_date;
}
public void setAction_date(String action_date) {
this.action_date = action_date;
}
public String getActor_id() {
return actor_id;
}
public void setActor_id(String actor_id) {
this.actor_id = actor_id;
}
public String getActor_nm() {
return actor_nm;
}
public void setActor_nm(String actor_nm) {
this.actor_nm = actor_nm;
}
public String getGroup_id() {
return group_id;
}
public void setGroup_id(String group_id) {
this.group_id = group_id;
}
public String getGroup_nm() {
return group_nm;
}
public void setGroup_nm(String group_nm) {
this.group_nm = group_nm;
}
public String getAction_id() {
return action_id;
}
public void setAction_id(String action_id) {
this.action_id = action_id;
}
public String getTarget_id() {
return target_id;
}
public void setTarget_id(String target_id) {
this.target_id = target_id;
}
public String getType_id() {
return type_id;
}
public void setType_id(String type_id) {
this.type_id = type_id;
}
public String getType_nm() {
return type_nm;
}
public void setType_nm(String type_nm) {
this.type_nm = type_nm;
}
public String getConnect_ip() {
return connect_ip;
}
public void setConnect_ip(String connect_ip) {
this.connect_ip = connect_ip;
}
public String getAction_place() {
return action_place;
}
public void setAction_place(String action_place) {
this.action_place = action_place;
}
}
<file_sep>/EDMS3/WebContent/js/popup/selectSingleUserWindow.js
var selectSingleUserWindow = {
callbackFunction : null,
treeObject : null,
// 0. 초기화
init : {
initSingleUserWindow : function(callback) {
//내 소유 문서 > 소유권이전 - 창 닫기
$('.grant_transfer_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.grant_transfer').addClass('hide');
$('.grant_transfer_wrapper').addClass('hide');
});
//내 소유 문서 > 소유권이전 창 닫기 : 음영진 부분 클릭 시 닫기
$('.grant_transfer_wrapper').bind("click", function(){
$(this).addClass('hide');
$('.grant_transfer').addClass('hide');
});
// 콜백 함수 저장
selectSingleUserWindow.callbackFunction = callback;
// 팝업창 오픈
selectSingleUserWindow.open.layerOpen();
// 1. 트리 초기화
if (selectSingleUserWindow.treeObject == undefined) {
var treeOption = {
divId : "#transfer_left_tree",
context : exsoft.contextRoot,
url : "/group/groupList.do"
};
selectSingleUserWindow.treeObject = new XFTree(treeOption);
selectSingleUserWindow.treeObject.callbackSelectNode = function(e, data) {
// 검색 옵션 초기화
$("#pop_sg_groupName").val("");
$("#pop_sg_userName").val("");
var param = {
groupName : "",
userName : "",
groupId : data.node.id
}
// 부서 사용자 목록 조회
exsoft.util.grid.gridPostDataRefresh('#pop_searchUserList',exsoft.contextRoot + '/user/searchUserList.do', param);
}
selectSingleUserWindow.treeObject.init();
} else {
selectSingleUserWindow.treeObject.refresh();
}
// 2. 사용자 목록 그리드 초기화
if ($("#pop_searchUserList")[0].grid == undefined) {
$('#pop_searchUserList').jqGrid({
url:exsoft.contextRoot + '/user/searchUserList.do',
mtype:"post",
datatype:'json',
jsonReader:{
page:'page',total:'total',root:'list'
},
colNames:['','group_nm','user_name_ko','position_nm','role_nm','email','user_status_nm'],
colModel:[
{name:'user_id',index:'user_id',width:5, editable:false,sortable:false,resizable:true,align:'center',key:true,edittype:'radio',
formatter:function(cellValue, option) {
return '<input type="radio" name="radio_'+option.gid+'" value="'+cellValue
+'" onclick="javascript:selectSingleUserWindow.event.selectUser(\''+cellValue+'\')"/>';
},hidden:false
},
{name:'group_nm',index:'group_nm',width:60, editable:false,sortable:true,resizable:true,hidden:false,align:'center'},
{name:'user_name_ko',index:'user_name_ko',width:60, editable:false,sortable:false,resizable:true,hidden:false,align:'center'},
{name:'position_nm',index:'position_nm',width:30, editable:false,sortable:true,resizable:true,hidden:true,align:'center'},
{name:'role_nm',index:'role_nm',width:10, editable:false,sortable:true,resizable:true,hidden:true,align:'center'},
{name:'email',index:'email',width:30, editable:false,sortable:true,resizable:true,hidden:true,align:'center'},
{name:'user_status_nm',index:'user_status_nm',width:30, editable:false,sortable:true,resizable:true,hidden:true,align:'center'}
],
autowidth:true,
height:200,
viewrecords: true,multiselect:false,sortable: true,shrinkToFit:true,gridview: true,
sortname : "user_name_ko",
sortorder:"asc",
scroll:true, // virtual Scrolling
scrollOffset : 0,
rowNum : 10,
rowList : exsoft.util.grid.listArraySize(),
emptyDataText: "데이터가 없습니다.",
caption:'사용자 목록',
loadError:function(xhr, status, error) {
exsoft.util.error.isErrorChk(xhr);
}
,loadBeforeSend: function() {
exsoft.util.grid.gridTitleBarHide('pop_searchUserList');
}
,loadComplete: function() {
exsoft.util.grid.gridInputInit(false);
}
,beforeSelectRow: function(rowid, e)
{
var $radio = $(e.target).closest('tr').find('input[type="radio"]');
$radio.prop('checked', 'checked');
// 라디오 버튼만 눌렀을 경우 rowid값이 셋팅 안되어 강제로 set
$("#pop_searchUserList").jqGrid('setSelection',rowid);
return true; // allow row selection
}
});
// Grid 컬럼정렬 처리
var headerData = '{"group_nm":"그룹명","user_name_ko":"사용자명"}';
exsoft.util.grid.gridColumHeader('pop_searchUserList',headerData,'center');
headerData = null;
currentGrid = null;
}
},
},
// 1. 팝업
open : {
layerOpen : function() {
exsoft.util.layout.divLayerOpen("grant_transfer_wrapper", "grant_transfer");
},
},
//2. layer + show
layer : {
},
//3. 닫기 + hide
close : {
layerClose : function() {
exsoft.util.layout.divLayerClose("grant_transfer_wrapper", "grant_transfer");
},
},
//4. 화면 이벤트 처리
event : {
// 확인버튼 클릭시
okButtonClick : function() {
var rowId = $("#pop_searchUserList").getGridParam("selrow");
var rowData = $("#pop_searchUserList").getRowData(rowId);
rowData.user_id = rowId;
if (rowId == null) {
jAlert("사용자를 선택해주세요");
return;
}
selectSingleUserWindow.event.cancelButtonClick();
selectSingleUserWindow.callbackFunction(rowData);
},
// 취소버튼 클릭시
cancelButtonClick : function() {
selectSingleUserWindow.close.layerClose();
},
// 사용자 검색
searchGroupUser : function() {
var param = {
groupName : $("#pop_sg_groupName").val(),
userName : $("#pop_sg_userName").val(),
groupId : ''
}
exsoft.util.grid.gridPostDataRefresh('#pop_searchUserList',exsoft.contextRoot + '/user/searchUserList.do', param);
},
//grid 라디오 버튼 클릭 시 이벤트
selectUser : function(rowid) {
// 현재값 셋팅
$("#pop_searchUserList").jqGrid('setSelection',rowid);
},
// 엔터키 입력시
enterKeyPress : function(e) {
if (e.keyCode == 13) {
selectSingleUserWindow.event.searchGroupUser();
return false;
}
}
},
//5. 화면 UI 변경 처리
ui : {
},
//6. callback 처리
callback : {
},
}
<file_sep>/EDMS3/src/kr/co/exsoft/quartz/dao/QuartzDao.java
package kr.co.exsoft.quartz.dao;
import java.util.HashMap;
import java.util.List;
import kr.co.exsoft.document.vo.PageVO;
import kr.co.exsoft.quartz.vo.BatchWorkVO;
import kr.co.exsoft.quartz.vo.FileQueueDeleteVO;
import org.apache.commons.collections.map.CaseInsensitiveMap;
import org.springframework.stereotype.Repository;
/**
* Quartz 매퍼클래스
* @author 패<NAME>
* @since 2014.07.21
* @version 3.0
*
*/
@Repository(value = "quartzDao")
public interface QuartzDao {
/**
*
* <pre>
* 1. 개용 : 배치작업 로그 등록처리
* 2. 처리내용 :
* </pre>
* @Method Name : batchWorkWrite
* @param batchWorkVO
* @return int
*/
public int batchWorkWrite(BatchWorkVO batchWorkVO);
/**
*
* <pre>
* 1. 개용 : 배치작업 로그 수행완료처리
* 2. 처리내용 :
* </pre>
* @Method Name : batchWorkUpdate
* @param map
* @return int
*/
public int batchWorkUpdate(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 배치작업 수행여부 체크
* 2. 처리내용 :
* </pre>
* @Method Name : isBatchWork
* @param map
* @return int
*/
public int isBatchWork(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 기준 열람수 초과자 목록 열람
* 2. 처리내용 :
* </pre>
* @Method Name : auditExceedList
* @param map
* @return List<CaseInsensitiveMap>
*/
public List<CaseInsensitiveMap> auditExceedList(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 만기 처리 대상 목록 얻기
* 2. 처리내용 :
* </pre>
* @Method Name : expiredDocList
* @param map
* @return List<HashMap<String,Object>>
*/
public List<CaseInsensitiveMap> expiredDocList(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 개인휴지통 대상 목록 얻기
* 2. 처리내용 :
* </pre>
* @Method Name : privateTrashDocList
* @param map
* @return List<CaseInsensitiveMap>
*/
public List<CaseInsensitiveMap> privateTrashDocList(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 시스템 휴지통 대상 목록 얻기
* 2. 처리내용 :
* </pre>
* @Method Name : systemTrashDocList
* @param map
* @return List<CaseInsensitiveMap>
*/
public List<CaseInsensitiveMap> systemTrashDocList(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : EDMS 시스템 관리자 정보 얻기
* 2. 처리내용 :
* </pre>
* @Method Name : systemUserInfo
* @param map
* @return CaseInsensitiveMap
*/
public CaseInsensitiveMap systemUserInfo(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 사용자별 문서현황 집계
* 2. 처리내용 :
* </pre>
* @Method Name : userDocStatus
* @param map
* @return List<CaseInsensitiveMap>
*/
public List<CaseInsensitiveMap> userDocStatus(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 부서별 문서현황 집계
* 2. 처리내용 :
* </pre>
* @Method Name : groupDocStatus
* @param map
* @return List<CaseInsensitiveMap>
*/
public List<CaseInsensitiveMap> groupDocStatus(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 사용자별 문서현황 집계 등록처리
* 2. 처리내용 :
* </pre>
* @Method Name : userDocHtWrite
* @param map
* @return int
*/
public int userDocHtWrite(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 부서별 문서현황 집계 등록처리
* 2. 처리내용 :
* </pre>
* @Method Name : groupDocHtWrite
* @param map
* @return int
*/
public int groupDocHtWrite(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 폐기문서 첨부파일 삭제 목록 가져오기.
* 2. 처리내용 :
* </pre>
* @Method Name : delPageList
* @param map
* @return List<PageVO>
*/
public List<PageVO> delPageList(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 :DELETEFILE_QUEUE테이블 조회
* 2. 처리내용 :
* </pre>
* @Method Name : delPageList
* @param map
* @return List<FileQueueDeleteVO>
*/
public List<FileQueueDeleteVO> fileQueueDeleteList(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 :DELETEFILE_QUEUE테이블 삭제
* 2. 처리내용 :
* </pre>
* @Method Name : delPageList
* @param map
*/
public int deleteQueue(HashMap<String,Object> map);
/***
*
* <pre>
* 1. 개용 : 임시작업함 대상 목록 얻기
* 2. 처리내용 :
* </pre>
* @Method Name : tempDelDocList
* @param map
* @return List<CaseInsensitiveMap>
*/
public List<CaseInsensitiveMap> tempDelDocList(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 임시작업함 삭제처리
* 2. 처리내용 :
* </pre>
* @Method Name : tempDocDelete
* @param map
* @return int
*/
public int tempDocDelete(HashMap<String,Object> map);
}
<file_sep>/EDMS3/WebContent/js/mydoc/myDocList.js
/**
* myDocList.js
*/
var myDocList = {
/**
* member variables
*/
workType : Constant.WORK_MYPAGE,
folderId : null,
tree : {
mypageTree : null
},
// 0. 초기화
init : {
isInitialized : false,
//page 초기화
initPage : function(pageSize){
// 트리 초기화
myDocList.treeFunctions.initTree();
// UI 초기화
myDocList.init.initUi(pageSize);
// 그리드 초기화
myDocList.grid.initGrid();
},
// 화면 구성 요소 초기화 (ddslick 등)
initUi : function(pageSize) {
if (!myDocList.init.isInitialized) {
//검색 selectbox
exsoft.util.common.ddslick('#myDoc_select', 'srch_type1', '', 79, function(divId, selectedData){
});
// 목록 출력 갯수
exsoft.util.common.ddslick('#myDocListRowCount', 'srch_type1', '', 68, function(divId, selectedData){
$("#myDocList").setGridParam({page:1, rowNum:selectedData.selectedData.value}).trigger("reloadGrid");
});
//depth navigation
$('.depth_navi > span').mouseover(function(){
var path = $(this).parent().find(".depth_navi_path");
if(!path.is(":visible")) {
path.removeClass('hide');
}
}).mouseout(function(){
var path = $(this).parent().find(".depth_navi_path");
if(path.is(":visible")) {
path.addClass('hide');
}
});
myDocList.grid.pageSize = pageSize;
// 트리 리프레시 이벤트 연동
$("#treeRefresh").bind("click", function() {
myDocList.treeFunctions.refresh();
});
// workspace 탭 숨김 / mypage 탭 보임
$("[data-group=treeTabWorkspace]").each(function(idx) {
$(this).addClass("hide").removeClass("focus");
});
$("[data-group=treeTabMypage]").each(function(idx) {
$(this).removeClass("hide").addClass("focus");
});
// workspace 트리 숨김 / mypage 트리 보임
$("[data-group=workspaceTree]").each(function(idx) {
$(this).addClass("hide");
});
$("[data-group=mypageTree]").each(function(idx) {
$(this).removeClass("hide");
});
// 페이지목록 값 설정
exsoft.util.layout.setSelectBox('myDocListRowCount',myDocList.grid.pageSize);
// 쿠키값에 설정된 화면 상하좌우 분활 자동으로 보이기
exsoft.common.bind.doFunction.layoutViewCookie();
isInitialized = true;
}
}
},
treeContextAction : {
// Tree Context : 폴더 생성 Callback
createFolder : function(node) {
exsoft.util.layout.divLayerOpen(folderWindow.wrapperClass, folderWindow.layerClass);
folderWindow.callback = myDocList.callback.refreshTree;
folderWindow.initForm(node)
},
// Tree Context : 폴더 수정 Callback
modifyFolder : function(node) {
exsoft.util.layout.divLayerOpen(folderWindow.wrapperClass, folderWindow.layerClass);
folderWindow.callback = myDocList.callback.refreshTree;
folderWindow.initForm(node, node.id)
},
// Tree Context : 폴더 이동 Callback
moveFolder : function(node) {
selectSingleFolderWindow.init(myDocList.callback.moveFolder, Constant.MAP_MYPAGE, Constant.WORK_MYPAGE, true, "ALL_TYPE");
},
// Tree Context : 폴더 삭제 Callback
deleteFolder : function(node) {
selectSingleFolderWindow.callback = myDocList.callback.refreshTree;
var jo = {
folder_id : node.id,
folder_name_ko : node.text,
type : "DELETE"
}
jConfirm("폴더를 삭제 하시겠습니까?", "폴더 삭제", 0, function(r) {
if (r) {
exsoft.util.ajax.ajaxDataFunctionWithCallback(jo, exsoft.contextRoot+"/folder/folderControl.do", "", function(data) {
if (data.result == "true") {
myDocList.treeFunctions.refresh();
} else {
jAlert(data.message);
}
});
}
});
},
// Tree Context : 즐겨찾기 추가 callback
addFavoriteFolder : function(node) {
// 1. 이미 추가된 폴더인지 체크
exsoft.util.ajax.ajaxDataFunctionWithCallback({folder_id : node.id, type : "CHECK_EXISTS"}, exsoft.contextRoot+"/folder/favoriteControl.do", "", function(data, param) {
if (data.result == "true") {
var jsonObject = {
folder_id : node.id,
folder_nm : node.text,
mode : "ADD_FAVORITE",
only_virtual : "Y"
}
// 즐겨찾기 부모 선택창 팝업 (selectFavoriteFolderWindow.jsp 필요함)
selectFavoriteFolderWindow.init(jsonObject, false, function(returnObject) {
returnObject.type = "ADD_TO_FAVORITES";
exsoft.util.ajax.ajaxDataFunctionWithCallback(returnObject, exsoft.contextRoot+"/folder/favoriteControl.do", "", function(data, param) {
if (data.result == "true") {
jAlert("즐겨찾기 폴더 등록 완료");
} else {
jAlert(data.message);
}
});
});
} else {
jAlert("이미 즐겨찾기 폴더로 등록 됐습니다.");
return;
}
});
}
},
grid : {
pageSize : 10,
initGrid : function() {
// 초기화 여부
// if ($("#myDocList")[0].grid != undefined) {
// $('#myDocList').jqGrid('GridUnload');
// }
var _postData = {
folder_id : myDocList.folderId,
strIndex : exsoft.util.layout.getSelectBox('myDoc_select','option'),
strKeyword1 : $("#strKeyword1").val(),
}
// Grid 세팅
$('#myDocList').jqGrid({
url: exsoft.contextRoot + '/document/workDocumentList.do',
mtype:"post",
datatype:'json',
jsonReader:{
page:'page',total:'total',root:'list'
},
colNames : ['doc_id','page_cnt','relation_doc','is_locked','doc_name','type_name','creator_name','create_date',
'acl_create','acl_changePermission','acl_checkoutCancel','root_id','doc_type','lock_date','lock_owner','is_inherit_acl','lock_status', 'folder_id','acl_level'],
colModel : [
{name:'doc_id', index:'doc_id', width:1, editable:false, sortable:false, key:true, align:'center', hidden:true},
{name:'page_cnt', index:'page_cnt', width:10, editable:false, sortable:false, resizable:true, align:'center',
formatter : function(cellValue, option, rowObject) {
return cellValue > 0 ? "<li class='icon' id='file_"+rowObject.doc_id+"'><img src='"+ exsoft.contextRoot +"/img/icon/attach.png' class='attach_file'></li>" : "";
}
},
{name:'relation_doc', index:'relation_doc', width:10, editable:false, sortable:false, resizable:true, align:'center',
formatter : function(cellValue, option, rowObject) {
return cellValue > 0 ? "<li class='icon' id='relation _"+rowObject.doc_id+"'><img src='"+ exsoft.contextRoot +"/img/icon/link.png' class='relative_docs'></li>" : "";
}
},
{name:'is_locked', index:'is_locked', width:10, editable:false, sortable:false, resizable:true, align:'center',
formatter : function(cellValue, option) {
return cellValue == 'T' ? "<li class='icon'><img src='"+ exsoft.contextRoot +"/img/icon/lock1.png' alt='' class='doc_lock'></li>" : "";
},
cellattr : function(rowId, cellValue, rowObject) {
var tooltip = '반출자 : '+rowObject.lock_owner+'\n';
tooltip += '반출일시 : '+rowObject.lock_date+'\n';
return rowObject.is_locked == 'T' ? ' title="'+tooltip+'"' : "";
}
},
{name:'doc_name', index:'doc_name', width:150, editable:false, sortable:true, resizable:true, title:true,
formatter : function(cellValue, option, rowObject){
return "<img src='{0}{1}' class='extension'>".format(exsoft.contextRoot, rowObject.page_extension_img) +
"<a href='#' onclick='exsoft.preview.event.getPreview(\"{0}\")'>{1}</a>".format(rowObject.doc_id, cellValue) +
"<a href='#' onclick='myDocList.event.popDocDetail(\"{0}\")'><img src='{1}/img/icon/new_window.png'></a>".format(rowObject.doc_id, exsoft.contextRoot);
},
cellattr : function(rowId, cellValue, rowObject) {
return ' title="'+rowObject.doc_name+'"';
}
},
{name:'type_name', index:'type_name', width:20, editable:false, sortable:true, resizable:true, align:'center'},
{name:'creator_name', index:'creator_name', width:30, editable:false, sortable:true, resizable:true, align:'center'},
{name:'create_date', index:'create_date', width:30, editable:false, sortable:true, resizable:true, align:'center'},
{name:'acl_level', index:'acl_level', width:20, editable:false, sortable:false, resizable:true, align:'center',
formatter : function(cellValue, option) {
return "<li class='previlege'><img src='"+ exsoft.contextRoot +"/img/icon/prev_"+ (cellValue.toLowerCase()).substring(0,1) +".png' class='previlege_grade'><label class='hide'>" + exsoft.util.grid.getAclItemTitle(cellValue) + "</label</li>";
},
cellattr: function (rowId, cellValue, rowObject) {
var tooltip = '소유자 : '+rowObject.owner_name+'\n';
tooltip += '기본권한 : '+ exsoft.util.grid.getAclItemTitle(rowObject.acl_level) + '\n';
tooltip += '반출취소 : '+(rowObject.acl_checkoutCancel == 'T' ? "가능" : "없음")+'\n';
tooltip += '권한변경 : '+(rowObject.acl_changePermission == 'T' ? "가능" : "없음");
return ' title="'+tooltip+'"';
}
},
{name:'acl_create', index:'acl_create', width:1, editable:false, sortable:false, align:'center', hidden:true},
{name:'acl_changePermission', index:'acl_changePermission', width:1, editable:false, sortable:false, align:'center', hidden:true},
{name:'acl_checkoutCancel', index:'acl_checkoutCancel', width:1, editable:false, sortable:false, align:'center', hidden:true},
{name:'root_id', index:'root_id', width:1, editable:false, sortable:false, align:'center', hidden:true},
{name:'doc_type', index:'doc_type', width:1, editable:false, sortable:false, align:'center', hidden:true},
{name:'lock_date', index:'lock_date', width:1, editable:false, sortable:false, align:'center', hidden:true},
{name:'lock_owner', index:'lock_owner', width:1, editable:false, sortable:false, align:'center', hidden:true},
{name:'is_inherit_acl', index:'is_inherit_acl', width:1, editable:false, sortable:false, align:'center', hidden:true},
{name:'lock_status',index:'lock_status',width:1,editable:false,sortable:false,align:'center',hidden:true},
{name:'folder_id',index:'folder_id',width:1, editable:false,sortable:false,align:'center',hidden:true},
],
autowidth:true,viewrecords: true,multikey: "ctrlKey",multiselect:true,sortable: true,shrinkToFit:true,gridview: true,
height:"auto",
sortname : "create_date",
sortorder:"desc",
scrollOffset: 0,
viewsortcols:'vertical',
rowNum : myDocList.grid.pageSize,
emptyDataText: "데이터가 없습니다.",
caption:'문서목록',
postData : _postData,
onCellSelect : function(rowid, iCol, cellcontent, e) {
var setCol = "";
var preview = 'doc_preview';
var file = 'attach_file';
var relation = 'relative_docs';
var lock = 'doc_lock';
if(~cellcontent.indexOf(preview)){
setCol = preview;
} else if(~cellcontent.indexOf(file)) {
setCol = file;
} else if (~cellcontent.indexOf(relation)) {
setCol = relation;
} else if (~cellcontent.indexOf(lock)) {
setCol = lock;
}
if(iCol == 0){
// 체크시 row값을 set한다.(선택시 : rowid셋팅, 해제시 : rowid제거)
$("#myDocList").jqGrid('setSelection',rowid);
} else if(setCol == preview){
} else if(setCol == file && cellcontent != ''){
var row = $("#myDocList").getRowData(rowid);
documentListLayerWindow.open.openAttachWindow(row);
} else if(setCol == relation && cellcontent != ''){
var row = $("#myDocList").getRowData(rowid);
documentListLayerWindow.open.openRelationWindow(row);
} else if(setCol == lock && cellcontent != ''){
}
},
loadBeforeSend: function() {
exsoft.util.grid.gridTitleBarHide('myDocList');
exsoft.util.grid.gridNoDataMsgInit('myDocList');
}
,loadComplete: function(data) {
if ($("#myDocList").getGridParam("records")==0) {
exsoft.util.grid.gridNoDataMsg("myDocList","nolayer_data");
}else {
exsoft.util.grid.gridViewRecords('myDocList');
}
exsoft.util.grid.gridInputInit(false);
exsoft.util.grid.gridPager("#myDocPager",data);
$("tr.jqgrow", this).contextMenu('documentListLayer_context_menu', {
bindings: {
// 수정
'documentListLayer_update' : function(trigger) {
var row = $("#myDocList").getRowData(trigger.id);
var aclLevel = exsoft.util.common.getAclItemTitleEn(exsoft.util.common.stripHtml(row.acl_level));
if(exsoft.util.common.getAclLevel(aclLevel) < exsoft.util.common.getAclLevel("UPDATE")) {
jAlert("문서 수정 권한이 없습니다.", "수정", 0);
return false;
}
documentUpdate(trigger.id, fRefreshDocumentList);
},
// 삭제
'documentListLayer_delete': function(trigger) {
var row = $("#myDocList").getRowData(trigger.id);
var aclLevel = exsoft.util.common.getAclItemTitleEn(exsoft.util.common.stripHtml(row.acl_level));
var jsonArr = [{
doc_id : row.doc_id
, root_id : row.root_id
, is_locked : row.lock_status
, doc_type : row.doc_type
}];
if(exsoft.util.common.getAclLevel(aclLevel) < exsoft.util.common.getAclLevel("DELETE")) {
jAlert("문서 삭제 권한이 없습니다.", "삭제", 0);
return false;
}
documentListLayerWindow.gObjectID = "myDocList";
documentListLayerWindow.event.documentDeleteSend(jsonArr, "ONLY");
},
// 이동
'documentListLayer_move': function(trigger) {
var row = $("#myDocList").getRowData(trigger.id);
var aclLevel = exsoft.util.common.getAclItemTitleEn(exsoft.util.common.stripHtml(row.acl_level));
var jsonArr = [{
doc_id : row.doc_id
, doc_name : exsoft.util.common.stripHtml(row.doc_name)
, is_locked : row.lock_status
, root_id : row.root_id
, doc_type : row.doc_type
, is_inherit_acl : row.is_inherit_acl
, folder_id : myDocList.folderId
}];
if(exsoft.util.common.getAclLevel(aclLevel) < exsoft.util.common.getAclLevel("UPDATE")) {
jAlert("문서 이동 권한이 없습니다.", "이동", 0);
return false;
}
documentListLayerWindow.gObjectID = "myDocList";
documentListLayerWindow.gWorkType = null;
documentListLayerWindow.event.documentMove("ONLY", jsonArr);
},
// 복사
'documentListLayer_copy': function(trigger) {
var row = $("#myDocList").getRowData(trigger.id);
var aclLevel = exsoft.util.common.getAclItemTitleEn(exsoft.util.common.stripHtml(row.acl_level));
var jsonArr = [{
doc_id : row.doc_id
, doc_name : exsoft.util.common.stripHtml(row.doc_name)
, is_locked : row.lock_status
, root_id : row.root_id
, doc_type : row.doc_type
, is_inherit_acl : row.is_inherit_acl
, folder_id : myDocList.folderId
}];
if(exsoft.util.common.getAclLevel(aclLevel) < exsoft.util.common.getAclLevel("UPDATE")) {
jAlert("문서 복사 권한이 없습니다.", "복사", 0);
return false;
}
documentListLayerWindow.gObjectID = "myDocList";
documentListLayerWindow.gWorkType = null;
documentListLayerWindow.event.documentCopy("ONLY", jsonArr);
} ,
// 즐겨찾기 추가
'documentListLayer_favorite_add' : function(trigger) {
var row = $('#myDocList').getRowData(trigger.id);
var jsonArr = [{
doc_id : row.doc_id
,root_id : row.root_id
}];
documentListLayerWindow.event.documentAddFavoriteSend(jsonArr);
},
// 작업카트 추가
'documentListLayer_work_add': function(trigger) {
var row = $("#myDocList").getRowData(trigger.id);
var jsonArr = [{
doc_id : row.doc_id
, root_id : row.root_id
, is_locked : row.lock_status
}];
documentListLayerWindow.event.documentTempworkSend(jsonArr);
} ,
// 체크아웃 취소
'documentListLayer_checkout_cancel':function(trigger) {
var row = $('#myDocList').getRowData(trigger.id);
var jsonArr = [{
doc_id : row.doc_id
, root_id : row.root_id
, is_locked : row.lock_status
, doc_type : row.doc_type
}];
documentListLayerWindow.gObjectID = "myDocList";
documentListLayerWindow.event.documentCancelCheckoutSend(jsonArr, "ONLY");
},
},
onContextMenu: function(event) {
var row = $('#myDocList').getRowData(event.currentTarget.id);
$("#documentListLayer_update").removeClass('hide');
$("#documentListLayer_delete").removeClass('hide');
$("#documentListLayer_move").removeClass('hide');
$("#documentListLayer_copy").removeClass('hide');
$("#documentListLayer_favorite_add").removeClass('hide');
if (row.lock_status == "T")
$("#documentListLayer_checkout_cancel").removeClass('hide');
else
$("#documentListLayer_checkout_cancel").addClass("hide");
return true;
}
});
}
,loadError:function(xhr, status, error) {
exsoft.util.error.isErrorChk(xhr);
}
});
// 컬럼 헤더 정렬 및 다국어 변경 처리
var headerData = '{"doc_id":"doc_id","page_cnt":"<img src=\'{0}/img/icon/attach.png\' class=\'attach_file\'>","relation_doc":"<img src=\'{0}/img/icon/link.png\' class=\'relative_docs\'>","is_locked":"<img src=\'{0}/img/icon/lock.png\' class=\'doc_lock\'>","doc_name":"제목","type_name":"문서유형","creator_name":"등록자","create_date":"등록일","acl_level":"권한"}'.format(exsoft.contextRoot);
exsoft.util.grid.gridColumHeader('myDocList',headerData,'center');
headerData = null;
},
refresh : function(page) {
$("#myDocList").setGridParam({page:page}).trigger("reloadGrid");
}
},
treeFunctions : {
initTree : function() {
var treeOption = {
context : exsoft.contextRoot,
contextAction : myDocList.treeContextAction,
url : "/folder/folderList.do",
};
if (myDocList.tree.mypageTree === null) {
treeOption.divId = "#mypageTree";
treeOption.mapId = Constant.MAP_MYPAGE;
treeOption.workType = Constant.WORK_MYPAGE;
myDocList.tree.mypageTree = new XFTree(treeOption);
myDocList.tree.mypageTree.template_context(); // 우클릭 메뉴
myDocList.tree.mypageTree.callbackSelectNode = myDocList.callback.selectTreeNode;
myDocList.tree.mypageTree.init(); //부서 rootId는 서버에서 처리
} else {
// 해당 폴더 문서목록 새로고침
myDocList.tree.mypageTree.refresh();
}
},
refresh : function() {
myDocList.tree.mypageTree.refresh();
}
},
ui : {
setNavigationText : function(nodeTitle, path) {
$("#nav_title").text(nodeTitle);
$("#nav_fullpath").text(path);
}
},
event : {
// 검색
searchDocument : function() {
var _post = {
strIndex:exsoft.util.common.getDdslick("#myDoc_select"),
strKeyword1:$("#strKeyword1").val(),
folder_id:myDocList.folderId,
is_search:'true'
};
exsoft.util.grid.gridPostDataInitRefresh("myDocList", exsoft.contextRoot + "/document/workDocumentList.do", _post);
},
popDocDetail : function(docId) {
exsoft.document.layer.docCommonFrm('doc_detail_wrapper', 'doc_detail', docId);
}
},
callback : {
// registFolderWindow.js 에서 등록/수정 후 호출하는 callback
refreshTree : function (e, data) {
myDocList.treeFunctions.refresh();
},
moveFolder : function (parentFolder) {
var _tree = myDocList.tree.mypageTree;
// 1. 이동 대상 폴더가 현재 폴더와 동일한 위치인지
if (parentFolder.original.parentId == _tree.getCurrentNodeParentId()) {
jAlert("동일한 위치로 이동할 수 없습니다.");
return;
}
for (var i = 0; i < parentFolder.parentIdList.length; i++) {
if (parentFolder.parentIdList[i] == _tree.getCurrentNodeId() || parentFolder.id == _tree.getCurrentNodeParentId()) {
jAlert("현재 폴더 및 현재 폴더 하위로 이동할 수 없습니다.");
return;
}
}
var targetRootFolder = parentFolder.mapId == "PROJECT" && parentFolder.parentGroup == null ? parentFolder.id : parentFolder.parentGroup.id;
var changeRootFolder = _tree.getFolderGroupId(_tree.selectedNode[0]).id != targetRootFolder ? "T" : "F";
var jsonObject = {
type : "MOVE",
folder_id : _tree.getCurrentNodeId(),
folder_name_ko : _tree.getCurrentNodeName(),
parent_id : parentFolder.id,
map_id : parentFolder.mapId,
parentGroup_id : targetRootFolder,
root_folder_change : changeRootFolder
};
exsoft.util.ajax.ajaxDataFunctionWithCallback(jsonObject, exsoft.contextRoot+"/folder/folderControl.do", "", function(data, param) {
if (data.result == "true") {
myDocList.treeFunctions.refresh();
}
});
},
selectTreeNode : function (e, data) {
myDocList.folderId = data.node.id;
myDocList.grid.page = 1;
documentListLayerWindow.gCurrentFolderId = data.node.id;
docDetailSearch.functions.changeFolderId(data.node.id);
// 1. 목록 조회
myDocList.event.searchDocument();
// 2. Navigation 변경
myDocList.ui.setNavigationText(data.node.text, data.node.full_path.join(" < "));
},
}
}
<file_sep>/EDMS3/WebContent/js/config/userConfig.js
/**
* 사용자 환경설정 js
*/
$(function(){
exsoft.util.filter.maxNumber(); // maxlength
exsoft.util.common.ddslick('#language', 'language', '', 123, function(){});
exsoft.util.common.ddslick('#pg_size', 'pg_size', '', 70, function(){});
exsoft.util.common.ddslick('#doc_search', 'doc_search', '', 70, function(){});
//스킨설정 클릭
$('.default_setting').find('a[class^="skin_set"]').bind("click", function(e){
e.preventDefault();
$('.default_setting').find('a[class^="skin_set"]').removeClass('pushed');
$(this).addClass('pushed');
});
});
var exsoftUserConfigFunc = {
currentTab : "",
tabClass : ['myinfo','passwdConf','myconfig'],
tabForms : ['myinfoFrm','passwdConfFrm','myconfigFrm'],
tabHeight : {
'myinfo' : '572',
'passwdConf' : '592',
'myconfig' : '552',
},
notyId : "#noty",
userConfigUrl : "/user/userConfigProc.do",
userConfigBinder : null,
init : {
notyEmpty : function(notyId) {
$(notyId).html('');
},
// 탭선택 초기화
tabSelectInit : function() {
for (var n in exsoftUserConfigFunc.tabClass) {
$("#"+exsoftUserConfigFunc.tabClass[n]).removeClass("selected");
}
},
tabFormInit : function() {
for (var n in exsoftUserConfigFunc.tabForms) {
$("#"+exsoftUserConfigFunc.tabForms[n]).addClass("hide");
}
},
// 새창 CALL시 최초 진입
pageInit : function(tabType,width,height){
exsoftUserConfigFunc.currentTab = tabType;
exsoftUserConfigFunc.init.tabSelectInit();
exsoftUserConfigFunc.ui.tabActiveStatus(tabType); // 선택된 탭 및 컨텐츠 활성화
exsoftUserConfigFunc.event.configDetail(tabType); // 서버CALL
window.resizeTo(width,height); // 페이지 resize
},
},
open : {
},
layer : {
},
close : {
},
event : {
// 탠선택시
tabSelectFunc : function(tabType) {
if(exsoftUserConfigFunc.currentTab != tabType) {
exsoftUserConfigFunc.currentTab = tabType;
exsoftUserConfigFunc.init.tabSelectInit();
exsoftUserConfigFunc.init.tabFormInit();
exsoftUserConfigFunc.ui.tabActiveStatus(tabType); // 선택된 탭 및 컨텐츠 활성화
exsoftUserConfigFunc.event.configDetail(tabType); // 서버CALL
window.resizeTo(750,exsoftUserConfigFunc.tabHeight[tabType]); // 페이지 resize
window.focus();
}
// 현재 활성화된 TAB을 누를 경우 PASS
},
// 환경설정값 조회
configDetail : function(tabType) {
//var userConfigBinder = new DataBinder("#"+tabType+"Frm");
exsoftUserConfigFunc.userConfigBinder = new DataBinder("#"+tabType+"Frm");
exsoft.util.ajax.ajaxPopDataFunctionWithCallback({type:'view',updateType:tabType}, exsoft.contextRoot+exsoftUserConfigFunc.userConfigUrl,function(data, e) {
if(data.result == "false") {
jAlert('데이터를 로드하는데 실패했습니다.','확인',0);
exsoft.util.layout.windowClose();
}else {
if(tabType == "myconfig") {
// 스킨은 추후 설정한다. :: addClass("pushed");
exsoft.util.layout.setSelectBox("language",data.userVO.language); // 언어
exsoft.util.layout.setSelectBox("pg_size",data.userVO.page_size); // 페이지당 목록수
exsoft.util.layout.setSelectBox("doc_search",data.userVO.doc_search); // 나의문서 표시기간
exsoft.util.layout.setRadioVal("view_type",data.userVO.view_type); // 미리보기 설정
}else {
exsoftUserConfigFunc.userConfigBinder.binding(data.userVO);
}
}
});
},
updateConfigProc : function() {
// 사용자정보 & 패스워드 관리 Validator
var jsonObject = null;
var scopeFrm = "";
if(exsoftUserConfigFunc.currentTab != 'myconfig') {
scopeFrm = exsoftUserConfigFunc.currentTab + "Frm";
var isValid = $("#"+scopeFrm).validation({
options : {
debug : false,alert : false,effect : false,
guideMessage : true,
notyId : 'noty',
}
});
if(!isValid) {
return false;
}
if(exsoftUserConfigFunc.currentTab == "passwdConf") {
exsoftUserConfigFunc.userConfigBinder.set("user_pass",exsoftUserConfigFunc.userConfigBinder.get("passwd1"));
}
}else {
exsoftUserConfigFunc.userConfigBinder.set("language",exsoft.util.layout.getSelectBox('language','option')); // 언어
exsoftUserConfigFunc.userConfigBinder.set("theme","themeGray"); // 스킨
exsoftUserConfigFunc.userConfigBinder.set("page_size",exsoft.util.layout.getSelectBox('pg_size','option')); // 페이지당 문서목록수
exsoftUserConfigFunc.userConfigBinder.set("doc_search",exsoft.util.layout.getSelectBox('doc_search','option')); // 나의문서 표시기간
exsoftUserConfigFunc.userConfigBinder.set("view_type",exsoft.util.layout.getRadioVal('view_type')); // 본문보기
// 첨부아이콘 미리보기
}
// TAB별 구분값 정의
exsoftUserConfigFunc.userConfigBinder.set("type","update");
exsoftUserConfigFunc.userConfigBinder.set("updateType",exsoftUserConfigFunc.currentTab);
exsoft.util.ajax.ajaxPopDataFunctionWithCallback(exsoftUserConfigFunc.userConfigBinder.getDataToJson(), exsoft.contextRoot + exsoftUserConfigFunc.userConfigUrl,
function(data, e){
if(data.result == "true"){
$(exsoftUserConfigFunc.notyId).html(data.message);
setTimeout("exsoftUserConfigFunc.init.notyEmpty('"+exsoftUserConfigFunc.notyId+"')",2000);
}else {
$(exsoftUserConfigFunc.notyId).html(data.message);
}
}
);
},
},
ui : {
// 탭활성화처리
tabActiveStatus : function(tabType){
$("#"+tabType).addClass("selected");
$("#"+tabType+"Frm").removeClass("hide");
},
},
callback : {
},
}<file_sep>/EDMS3/src/kr/co/exsoft/process/controller/ProcessAuthController.java
package kr.co.exsoft.process.controller;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import kr.co.exsoft.common.vo.SessionVO;
import kr.co.exsoft.document.controller.DocumentAuthController;
import kr.co.exsoft.document.service.TypeService;
import kr.co.exsoft.document.vo.TypeVO;
import kr.co.exsoft.eframework.configuration.Constant;
import kr.co.exsoft.eframework.exception.BizException;
import kr.co.exsoft.eframework.library.ExcelView;
import kr.co.exsoft.eframework.library.LocaleLibrary;
import kr.co.exsoft.eframework.util.CommonUtil;
import kr.co.exsoft.eframework.util.ConfigData;
import kr.co.exsoft.eframework.util.StringUtil;
import kr.co.exsoft.process.service.ProcessService;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.MessageSource;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.SessionAttributes;
import org.springframework.web.servlet.ModelAndView;
/**
* 협업프로세스 Controller
* @author <NAME>
* @since 2015.03.12
* @version 3.0
*
*/
@Controller
@SessionAttributes("sessionVO")
@RequestMapping("/process")
public class ProcessAuthController {
@Autowired
private ProcessService processService;
@Autowired
private MessageSource messageSource;
@Autowired
private TypeService typeService;
protected static final Log logger = LogFactory.getLog(ProcessAuthController.class);
/**
*
* <pre>
* 1. 개용 : 협업 레이아웃
* 2. 처리내용 :
* </pre>
* @Method Name : processLayout
* @param sessionVO
* @param model
* @param map
* @param request
* @return String
*/
@RequestMapping("/processLayout.do")
public String processLayout(@ModelAttribute SessionVO sessionVO,Model model,@RequestParam HashMap<String,Object> map,HttpServletRequest request) {
CommonUtil.setSessionToModel(model, sessionVO); // call by reference
// 문서유형 : 파라미터 - is_doc
HashMap<String, Object> param = new HashMap<String, Object>();
List<TypeVO> typeList = new ArrayList<TypeVO>();
try{
param.put("is_doc", Constant.T);
param.put("is_hidden", Constant.T); // 쿼리가 != 비교여서 T를 넘김
typeList = typeService.typeList(param);
}catch (BizException e){
logger.info(StringUtil.getErrorTrace(e));
} catch (Exception e) {
logger.info(StringUtil.getErrorTrace(e));
}
model.addAttribute("typeList",typeList);
model.addAttribute("menuType",Constant.TOPMENU_WORKPROCESS);
model.addAttribute("processType",Constant.PROCESS_WRITE_ING_MENU);
return "process/processList";
}
/**
*
* <pre>
* 1. 개용 : 협업 메뉴별 목록 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : processList
* @param model
* @param sessionVO
* @param map
* @param request
* @return ModelAndView
*/
@RequestMapping(value="/processList.do")
@ResponseBody
public ModelAndView processList(Model model, @ModelAttribute SessionVO sessionVO, @RequestParam HashMap<String,Object> map,
HttpServletRequest request) {
Locale locale = LocaleLibrary.setLocale(sessionVO.getSessLanguage() != null ? sessionVO.getSessLanguage() : ConfigData.getString("LANGUAGE"));
Map<String, Object> resultMap = new HashMap<String, Object>();
HashMap<String, Object> param = new HashMap<String, Object>();
// 사용자 조건 셋팅
param.put("user_id", sessionVO.getSessId());
// 입력 파라미터 유효성 체크
param.put("contextRoot",sessionVO.getSessContextRoot()); // page에서 이미지 처리를 위해
param.put("sdate",map.get("sdate") != null ? map.get("sdate").toString() : "");
param.put("edate",map.get("edate") != null ? map.get("edate").toString() : "");
param.put("type",map.get("type") != null ? map.get("type").toString() : Constant.PROCESS_WRITE_ING_MENU); // 협업 메뉴 type
String strIndex = StringUtil.getMapString(map, "strIndex");
String strIndexData = "";
switch (strIndex) {
case "doc_name" :
strIndexData = "P.NAME";
break;
case "doc_description" :
strIndexData = "P.CONTENT";
break;
case "creator_name" :
strIndexData = "P.CREATOR_NAME";
break;
}
param.put("strIndexColumn", strIndexData);
param.put("strKeyword1", StringUtil.getMapString(map, "strKeyword1"));
param.put("orderCol", StringUtil.getMapString(map, "sidx", "PRECESS_ID"));
param.put("orderType", StringUtil.getMapString(map, "sord", "ASC"));
param.put("page_size", StringUtil.getMapString(map, "rows", sessionVO.getSessPage_size()));
param.put("nPage",CommonUtil.getPage(map));
//상세검색 입력 파라미터 유효성 체크
param.put("page_name",map.get("page_name") != null ? map.get("page_name") : ""); // 첨부파일명
// Excel Down 처리 : 파리미터=oper
String oper = map.get("oper") != null ? map.get("oper").toString() : "";
String[] members ;
String[] cell_headers ;
int[] cell_widths;
try{
// 1.1 엑셀다운로드시 한글검색어 인코딩 처리
if(oper.equals(Constant.EXCEL_FORMAT)) {
param.put("nPage",1);
param.put("page_size",ConfigData.getInt("EXCEL_MAX_LIMIT")); // 엑셀 저장 row수
param.put("strKeyword",map.get("strKeyword") != null ? new String(map.get("strKeyword").toString().getBytes("8859_1"),"utf-8") : "");
}
// 2. 협업 리스트 가져오기
resultMap = processService.processList(param);
// Excel Down 처리 :: 목록
if(oper.equals(Constant.EXCEL_FORMAT)) {
CommonUtil.getExcelList(resultMap,model);
members = new String[]{ "name", "status_nm", "expect_date","complete_date","author_nm","approval_count","receiver_count"};
cell_headers = new String[]{ "제목", "상태", "완료예정일","최종수정일","작성자","승인","열람"};
cell_widths = new int[]{ 80,15,20,20,15,15,15};
model.addAttribute("members",members);
model.addAttribute("cell_headers",cell_headers);
model.addAttribute("cell_widths",cell_widths);
model.addAttribute("fileName","downLoad.xls");
return new ModelAndView(new ExcelView());
}
}catch(BizException e){
logger.info(StringUtil.getErrorTrace(e));
resultMap.put("result",Constant.RESULT_FALSE);
resultMap.put("message",e.getMessage());
}catch(Exception e) {
logger.info(StringUtil.getErrorTrace(e));
resultMap.put("result",Constant.RESULT_FALSE);
resultMap.put("message",messageSource.getMessage("process.list.error",new Object[0],locale));
}
ModelAndView mav = new ModelAndView("jsonView",resultMap);
return mav;
}
/**
*
* <pre>
* 1. 개용 : 좌측 메뉴 상단엣 문서에 대한 count를 가져오기
* 2. 처리내용 : 사용자 ID기준으로 협업 관련 문서 count를 가져오기
* </pre>
* @Method Name : processCount
* @param sessionVO
* @param model
* @param map
* @return Map<String,Object>
*/
@RequestMapping("/processCount.do")
@ResponseBody
public Map<String,Object> processCount(@ModelAttribute SessionVO sessionVO,Model model,@RequestParam HashMap<String,Object> map){
@SuppressWarnings("unused")
Locale locale = LocaleLibrary.setLocale(sessionVO.getSessLanguage() != null ? sessionVO.getSessLanguage() : ConfigData.getString("LANGUAGE"));
Map<String, Object> resultMap = new HashMap<String, Object>();
try{
map.put("user_id", sessionVO.getSessId());
resultMap = processService.processCount(map);
}catch(BizException e){
logger.info(StringUtil.getErrorTrace(e));
resultMap.put("result",Constant.RESULT_FALSE);
resultMap.put("data",-1000);
}catch(Exception e) {
logger.info(StringUtil.getErrorTrace(e));
resultMap.put("result",Constant.RESULT_FALSE);
resultMap.put("data",-2000);
}
return resultMap;
}
@RequestMapping(value="/processRecentlyList.do")
@ResponseBody
public ModelAndView processRecentlyList(Model model, @ModelAttribute SessionVO sessionVO, @RequestParam HashMap<String,Object> map,
HttpServletRequest request) {
@SuppressWarnings("unused")
Locale locale = LocaleLibrary.setLocale(sessionVO.getSessLanguage() != null ? sessionVO.getSessLanguage() : ConfigData.getString("LANGUAGE"));
Map<String, Object> resultMap = new HashMap<String, Object>();
HashMap<String, Object> param = new HashMap<String, Object>();
// 사용자 조건 셋팅
param.put("user_id", sessionVO.getSessId());
try{
// 1. 최근 협업 등록 리스트 가져오기
resultMap = processService.processRecentlyList(param);
}catch(BizException e){
logger.info(StringUtil.getErrorTrace(e));
resultMap.put("result",Constant.RESULT_FALSE);
resultMap.put("message",e.getMessage());
}catch(Exception e) {
logger.info(StringUtil.getErrorTrace(e));
resultMap.put("result",Constant.RESULT_FALSE);
resultMap.put("message",messageSource.getMessage("process.recently.list.error",new Object[0],locale));
}
ModelAndView mav = new ModelAndView("jsonView",resultMap);
return mav;
}
@RequestMapping("/selectProcessRecently.do")
@ResponseBody
public Map<String,Object> selectProcessRecently(@ModelAttribute SessionVO sessionVO,Model model,@RequestParam HashMap<String,Object> map){
@SuppressWarnings("unused")
Locale locale = LocaleLibrary.setLocale(sessionVO.getSessLanguage() != null ? sessionVO.getSessLanguage() : ConfigData.getString("LANGUAGE"));
Map<String, Object> resultMap = new HashMap<String, Object>();
try{
resultMap = processService.selectProcessRecently(map);
}catch(BizException e){
logger.info(StringUtil.getErrorTrace(e));
resultMap.put("result",Constant.RESULT_FALSE);
resultMap.put("message",e.getMessage());
}catch(Exception e) {
logger.info(StringUtil.getErrorTrace(e));
resultMap.put("result",Constant.RESULT_FALSE);
resultMap.put("message",messageSource.getMessage("process.recently.info.error",new Object[0],locale));
}
return resultMap;
}
@RequestMapping("/processControl.do")
@ResponseBody
public Map<String,Object> processControl(@ModelAttribute SessionVO sessionVO, Model model, @RequestParam HashMap<String,Object> map, HttpServletRequest request){
@SuppressWarnings("unused")
Locale locale = LocaleLibrary.setLocale(sessionVO.getSessLanguage() != null ? sessionVO.getSessLanguage() : ConfigData.getString("LANGUAGE"));
Map<String, Object> resultMap = new HashMap<String, Object>();
Object[] msgObject = null;
try{
// 1. action에 따른 service 구분
String strActionType = map.get("actionType") != null ? map.get("actionType").toString() : "";
switch (strActionType) {
case Constant.PROCESS_ACTION_APPROVEREQUEST:resultMap = processService.approveAction(map, sessionVO);msgObject = new String[]{"승인 요청이"};break;
case Constant.PROCESS_ACTION_APPROVE:resultMap = processService.approveAction(map, sessionVO);msgObject = new String[]{"승인이"};break;
case Constant.PROCESS_ACTION_APPROVEREJECT:resultMap = processService.approveAction(map, sessionVO);msgObject = new String[]{"반려가"};break;
case Constant.ACTION_VIEW:resultMap = processService.processDetail(map);msgObject = new String[]{"협업 정보 가져오기에"};break;
case Constant.ACTION_CREATE:resultMap = processService.processWrite(sessionVO, model, map, request); msgObject = new String[]{"신규 업무 등록이"}; break; //협업 등록
default:
break;
}
resultMap.put("message",messageSource.getMessage("process.control.sucess", msgObject,locale));
}catch(BizException e){
logger.info(StringUtil.getErrorTrace(e));
resultMap.put("result",Constant.RESULT_FALSE);
resultMap.put("message",e.getMessage());
}catch(Exception e) {
logger.info(StringUtil.getErrorTrace(e));
resultMap.put("result",Constant.RESULT_FALSE);
resultMap.put("message",messageSource.getMessage("process.control.error", new String[]{e.getMessage()},locale));
}
return resultMap;
}
}
<file_sep>/EDMS3/WebContent/js/popup/selectAccessorWindow.js
var selectAccessorWindow = {
callback : null,
accessorCheckedIds : null, // Tree에서 선택된 접근자 Id목록
accessorParentList : null, // 현재 grid상의 접근자 목록
isInitialized : false,
tree : {
groupTree : null, // 그룹 트리 오브젝트
projectTree : null, // 프로젝트 트리 오브젝트
initTree : function (type) {
var _treeName = type == "GROUP" ? "groupTree" : "projectTree";
var _treeOpt = {
divId : "accessorWindow_" + _treeName,
context : exsoft.contextRoot,
url : "/group/groupList.do",
mapId : type != "GROUP" ? "PROJECT" : null,
mapId : type != "GROUP" ? Constant.MAP_PROJECT : Constant.MAP_MYDEPT,
workType : type != "GROUP" ? Constant.WORK_PROJECT : Constant.WORK_ALLDEPT
};
if (type == "GROUP") {
if (selectAccessorWindow.tree.groupTree == null) {
selectAccessorWindow.tree.groupTree = new XFTree(_treeOpt);
selectAccessorWindow.tree.groupTree.template_multiCheck(false);
selectAccessorWindow.tree.groupTree.callbackSelectNode = selectAccessorWindow.callbackFunctions.selectTreeNode;
selectAccessorWindow.tree.groupTree.callbackAllSelectNodeData = selectAccessorWindow.callbackFunctions.checkedChangeTreeNode;
selectAccessorWindow.tree.groupTree.init();
} else {
selectAccessorWindow.tree.groupTree.refresh();
}
} else {
if (selectAccessorWindow.tree.projectTree == null) {
selectAccessorWindow.tree.projectTree = new XFTree(_treeOpt);
selectAccessorWindow.tree.projectTree.template_multiCheck(false);
selectAccessorWindow.tree.projectTree.callbackSelectNode = selectAccessorWindow.callbackFunctions.selectTreeNode;
selectAccessorWindow.tree.projectTree.callbackAllSelectNodeData = selectAccessorWindow.callbackFunctions.checkedChangeTreeNode;
selectAccessorWindow.tree.projectTree.init();
} else {
selectAccessorWindow.tree.projectTree.refresh();
}
}
}
},
init : function(callback, parentGridId, type) {
selectAccessorWindow.callback = callback;
selectAccessorWindow.functions.getAccessorParentList(parentGridId);
selectAccessorWindow.initOnce();
console.log(type);
if (type == "DOCUMENT") {
$("#selectAccessorWindowFolderTr").hide();
} else {
$("#selectAccessorWindowFolderTr").show();
}
selectAccessorWindow.open();
},
initOnce : function() {
if (!selectAccessorWindow.isInitialized) {
selectAccessorWindow.isInitialized = true;
selectAccessorWindow.grid.initTreeGroupUser();
selectAccessorWindow.grid.initSearchGroupUser();
exsoft.util.common.ddslick("#selectAccessorWindowMapList", "selectAccessorWindowMapList", "", 262, function(divId,selectedData){
// 콤보박스 이벤트
selectAccessorWindow.event.changeMap(selectedData.selectedData.value);
});
exsoft.util.common.ddslick("#selectAccessorWindowDefaultFolderAcl", "selectAccessorWindowDefaultFolderAcl", "", 80, function(divId,selectedData){
});
exsoft.util.common.ddslick("#selectAccessorWindowDefaultDocAcl", "selectAccessorWindowDefaultDocAcl", "", 80, function(divId,selectedData){
});
}
},
open : function() {
exsoft.util.layout.divLayerOpen("doc_authSet_wrapper", "doc_authSet");
},
close : function() {
exsoft.util.layout.divLayerClose("doc_authSet_wrapper", "doc_authSet");
},
grid : {
initTreeGroupUser : function(postData) {
if ($("#initTreeGroupUser")[0].grid == undefined) {
$("#initTreeGroupUser").jqGrid({
url: exsoft.contextRoot + "/user/groupUserList.do",
mtype:"post",
datatype:"json",
postData : postData,
list : "",
jsonReader:{
root:"list"
},
colNames:["user_id","사용자명"],
colModel:[
{name:"user_id",index:"user_id",width:10, editable:false,sortable:true,resizable:true,hidden:true},
{name:"user_nm",index:"user_nm",width:70, editable:false,sortable:true,resizable:true,hidden:false,align:"left"}
],
autowidth:true,
viewrecords: true,multiselect:true,sortable: true,shrinkToFit:true,gridview: true,
sortname : "user_nm",
sortorder:"desc",
scroll:true, // virtual Scrolling
scrollOffset : 0,
rowNum : 15,
rowList : exsoft.util.grid.listArraySize(),
emptyDataText: "데이터가 없습니다.",
caption:"사용자 목록",
pagerpos: "center",
pginput: true,
loadError:function(xhr, status, error) {
exsoft.util.error.isErrorChk(xhr);
}
,loadBeforeSend: function() {
exsoft.util.grid.gridNoDataMsgInit("initTreeGroupUser");
exsoft.util.grid.gridTitleBarHide("initTreeGroupUser");
}
,loadComplete: function() {
if ($("#initTreeGroupUser").getGridParam("records")==0) {
exsoft.util.grid.gridNoDataMsg("initTreeGroupUser","nolayer_data");
}
exsoft.util.grid.gridInputInit(false); // 페이지 창 숫자만 test
exsoft.util.grid.gridResize('initTreeGroupUser','accessorWindowGroupUserGridTarget',20,0);
}
});
} else {
exsoft.util.grid.gridPostDataRefresh("initTreeGroupUser", exsoft.contextRoot + "/user/groupUserList.do", postData);
}
},
initSearchGroupUser : function(postData) {
if ($("#initSearchGroupUser")[0].grid == undefined) {
$("#initSearchGroupUser").jqGrid({
url:exsoft.contextRoot + "/user/searchGroupUser.do",
mtype:"post",
postData : postData,
datatype:"json",
list : "",
jsonReader:{
root:"list"
},
colNames:["is_group","is_group_nm","성명/부서명","사용자/부서ID"],
colModel:[
{name:"is_group",index:"is_group",width:10, editable:false,sortable:true,resizable:true,hidden:true},
{name:"is_group_nm",index:"is_group_nm",width:10, editable:false,sortable:true,resizable:true,hidden:true,align:"center"},
{name:"unique_nm",index:"unique_nm",width:30, editable:false,sortable:false,resizable:true,hidden:false,align:"center"},
{name:"unique_id",index:"unique_id",width:30, editable:false,sortable:true,resizable:true,hidden:false,align:"center"}
],
autowidth:true,
height:"auto",
viewrecords: true,multiselect:true,sortable: true,shrinkToFit:true,gridview: true,
sortname : "unique_nm",
sortorder:"desc",
rowNum : 15,
rowList : exsoft.util.grid.listArraySize(),
emptyDataText: "데이터가 없습니다.",
caption:"확장자 목록",
pagerpos: "center",
pginput: true,
loadError:function(xhr, status, error) {
exsoft.util.error.isErrorChk(xhr);
}
,loadBeforeSend: function() {
exsoft.util.grid.gridNoDataMsgInit("initSearchGroupUser");
exsoft.util.grid.gridTitleBarHide("initSearchGroupUser");
}
,loadComplete: function() {
if ($("#initSearchGroupUser").getGridParam("records")==0) {
exsoft.util.grid.gridNoDataMsg("initSearchGroupUser","nolayer_data");
}
exsoft.util.grid.gridInputInit(false); // 페이지 창 숫자만 test
exsoft.util.grid.gridResize('initSearchGroupUser','accessorWindowGroupUserGridTarget',20,0);
}
});
} else {
exsoft.util.grid.gridPostDataRefresh("initSearchGroupUser",exsoft.contextRoot + "/user/groupUserList.do", postData);
}
}
},
event : {
changeMap : function(type) {
selectAccessorWindow.tree.initTree(type);
},
searchAccessor : function() {
var _post = {
strIndex : "ALL",
strKeyword : $("#searchAccessorKeyword").val()
};
if (_post.strKeyword.length == 0) {
jAlert("검색어를 입력하세요.");
return;
}
exsoft.util.grid.gridPostDataRefresh("initSearchGroupUser", exsoft.contextRoot + "/user/searchGroupUser.do", _post);
},
addAccessor : function() {
/**
* 좌측 선택 그리드
* 1. initSearchGroupUser
* 2. initTreeGroupUser
*
* 선택된 트리 아이템
* 1. selectAccessorWindow.accessorCheckedIds
*/
// accesorWindowSelectedList 선택자를 뿌릴 곳
var _accessorList = [];
var _select = {
treeGroup : selectAccessorWindow.accessorCheckedIds.join(","), // 선택된 부서 트리 ID 목록
treeGroupUser : $("#initTreeGroupUser").getGridParam("selarrrow"), // 선택된 부서 사용자 ID 목록
searchGroupUser : $("#initSearchGroupUser").getGridParam("selarrrow"), // 선택된 검색 결과 ID 목록
checkAllUser : $("#checkAccessorAll").is(":checked") ? "T" : "F", // 전사 체크 여부
}
if (_select.treeGroup.length == 0 &&_select.treeGroupUser.length == 0 &&_select.searchGroupUser.length == 0 && _select.checkAllUser == "F") {
jAlert("권한을 적용할 사용자나 부서를 선택하세요.");
return;
}
// 1-1. 전사 체크
if (_select.checkAllUser == "T") {
_accessorList.push({type : "ALL", id : "WORLD", name : "전체"});
}
// 1-2. 부서 트리
$(_select.treeGroup.split(",")).each(function() {
var _tem = this.split("#");
_accessorList.push({type : "GROUP", id : _tem[0], name : _tem[1]});
});
// 1-3. 사용자 목록
$(_select.treeGroupUser).each(function() {
var _row = $("#initTreeGroupUser").getRowData(this);
_accessorList.push({type : "USER", id : _row.user_id, name : _row.user_nm});
});
// 1-4. 검색 목록
$(_select.searchGroupUser).each(function() {
var _row = $("#initSearchGroupUser").getRowData(this);
_accessorList.push({type : _row.is_group, id : _row.unique_id, name : _row.unique_nm});
});
// 2. 아이템 추가
$(_accessorList).each(function() {
// 2-1. 이미 있는지 확인
if (!selectAccessorWindow.functions.checkDuplicate(this.id)) {
console.info(this.id + " 새로 추가");
// 2-2. 추가
selectAccessorWindow.ui.addSelectedAccessor(this);
} else {
console.info(this.id + " 이미 있음");
}
});
},
submit : function() {
var _list = [];
$("#accesorWindowSelectedList span").each(function() {
var _row = {
accessor_id : $(this).data("id"),
accessor_isgroup : $(this).data("type") == "GROUP" ? "T" : "F",
accessor_isalias : $(this).data("type") == "ALL" ? "T" : "F",
accessor_name : $(this).data("name"),
fol_default_acl : exsoft.util.common.getDdslick("#selectAccessorWindowDefaultFolderAcl"),
fol_act_create : $("#accessorWindowFolActCreate").is(":checked") ? "T" : "F",
fol_act_change_permission : $("#accessorWindowFolActChangePermission").is(":checked") ? "T" : "F",
doc_default_acl : exsoft.util.common.getDdslick("#selectAccessorWindowDefaultDocAcl"),
doc_act_create : $("#accessorWindowDocActCreate").is(":checked") ? "T" : "F",
doc_act_cancel_checkout : $("#accessorWindowDocActCancelCheckout").is(":checked") ? "T" : "F",
doc_act_change_permission : $("#accessorWindowDocActChangePermission").is(":checked") ? "T" : "F"
};
_list.push(_row);
});
selectAccessorWindow.callback(_list);
selectAccessorWindow.close();
},
},
ajax : {
},
functions : {
/*
* 부모폼에 이미 있는 "추가 접근자"목록을 리스트화 해서 저장 함
* - 사용자/부서 추가 시 중복 데이터를 걸러내기 위함임.
*/
getAccessorParentList : function(parentGridId) {
var arrayId = [];
var rowIDs = $("#"+parentGridId).jqGrid("getDataIDs");
var _cnt = rowIDs.length;
for (var i = 0; i < _cnt; i++) {
var row =$("#"+parentGridId).getRowData(rowIDs[i]);
arrayId[i] = row.accessor_id;
}
// 현재 grid상의 접근자 목록
selectAccessorWindow.accessorParentList = arrayId.join(",");
},
checkDuplicate : function(itemId) {
var _isDuplicate = false;
// 1. 이미 선택된 값을 체크해본다
_isDuplicate = $("#accesorWindowSelectedList span").filter(function() {return $(this).data("id") == itemId ? true : false}).length > 0 ? true : false;
// 2. 부모창에서 가지고있는 값을 비교해 본다.
if (_isDuplicate == false)
_isDuplicate = $.inArray(itemId, selectAccessorWindow.accessorParentList) > -1 ? true : false;
return _isDuplicate;
}
},
ui : {
resetSearchKeyword : function() {
$("#searchAccessorKeyword").val("");
},
addSelectedAccessor : function(obj) {
var _str = "<li>";
_str += "<span class='chosen_user' data-id='{0}' data-name='{1}' data-type='{2}'> {1}</span>".format(obj.id, obj.name, obj.type);
_str += "<a href='#' class='remove' onclick='selectAccessorWindow.ui.removeSelectedAccessor(\"{0}\")'><img src='{1}/img/icon/window_close3.png'></a>".format(obj.id, exsoft.contextRoot);
_str += "</li>";
$("#accesorWindowSelectedList").append(_str);
},
removeSelectedAccessor : function(itemId) {
$("#accesorWindowSelectedList span").filter(function() {
return ($(this).data("id") == itemId) ? true : false;
}).parent().remove();
}
},
callbackFunctions : {
selectTreeNode : function(e, data) {
selectAccessorWindow.grid.initTreeGroupUser({groupId:data.node.id});
},
checkedChangeTreeNode : function(selectedNodedata) {
selectAccessorWindow.accessorCheckedIds = selectedNodedata;
}
}
}<file_sep>/EDMS3/src/kr/co/exsoft/permission/vo/AclExItemVO.java
package kr.co.exsoft.permission.vo;
/**
* 문서확장 권한속성 아이템 VO
*
* @author 패키지팀
* @since 2014. 9. 24.
* @version 1.0
*
*/
public class AclExItemVO {
private String doc_id; // 문서ID
private String accessor_id; // 접근자ID - ALIAS/USER_ID/GROUP_ID
private String accessor_isgroup; // 접근자 그룹여부 : 접근자ID가 GROUP_ID인 경우 T , 기본값 F로 변경
private String accessor_isalias; // 접근자 가칭 여부 : 접근자ID가 ALIAS(WORLD,OWNER,OGROUP) 인 경우 T
private String act_browse; // 목록보기(BROWSE) 권한 : 권한 있는 경우 T 없는 경우 F
private String act_read; // 조회(READ) 권한 : 권한 있는 경우 T 없는 경우 F
private String act_update; // 수정(UPDATE) 권한 : 권한 있는 경우 T 없는 경우 F
private String act_delete; // 삭제(DELETE) 권한 : 권한 있는 경우 T 없는 경우 F
private String act_create; // 생성(CREATE)권한 : 문서권한에서는 제외된다.
private String act_cancel_checkout; // 반출취소(CANCEL CHECKOUT) 권한 : 권한 있는 경우 T 없는 경우 F
private String act_change_permission; // 권한변경 (CHANGE PERMISSION) 권한 : 권한 있는 경우 T 없는 경우 F
// 조회항목
private String accessor_name;
private String doc_default_acl; // 기본권한
@SuppressWarnings("unused")
private String doc_act_create; // 문서기본권한 항목 통일 위해 값을 추가함
@SuppressWarnings("unused")
private String doc_act_cancel_checkout;
@SuppressWarnings("unused")
private String doc_act_change_permission;
public AclExItemVO() {
this.doc_id = "";
this.accessor_id = "";
this.accessor_isgroup = "F";
this.accessor_isalias = "F";
this.act_browse = "F";
this.act_read = "F";
this.act_create = "F";
this.act_update = "F";
this.act_delete = "F";
this.act_cancel_checkout = "F";
this.act_change_permission = "F";
this.accessor_name = "";
this.doc_default_acl = "";
this.doc_act_create = "";
this.doc_act_cancel_checkout = "";
this.doc_act_change_permission = "";
}
public String getDoc_act_create() {
return act_create;
}
public void setDoc_act_create(String doc_act_create) {
this.doc_act_create = doc_act_create;
}
public String getDoc_act_cancel_checkout() {
return act_cancel_checkout;
}
public void setDoc_act_cancel_checkout(String doc_act_cancel_checkout) {
this.doc_act_cancel_checkout = doc_act_cancel_checkout;
}
public String getDoc_act_change_permission() {
return act_change_permission;
}
public void setDoc_act_change_permission(String doc_act_change_permission) {
this.doc_act_change_permission = doc_act_change_permission;
}
public String getDoc_default_acl() {
return doc_default_acl;
}
public void setDoc_default_acl(String doc_default_acl) {
this.doc_default_acl = doc_default_acl;
}
public String getDoc_id() {
return doc_id;
}
public void setDoc_id(String doc_id) {
this.doc_id = doc_id;
}
public String getAccessor_id() {
return accessor_id;
}
public void setAccessor_id(String accessor_id) {
this.accessor_id = accessor_id;
}
public String getAccessor_isgroup() {
return accessor_isgroup;
}
public void setAccessor_isgroup(String accessor_isgroup) {
this.accessor_isgroup = accessor_isgroup;
}
public String getAccessor_isalias() {
return accessor_isalias;
}
public void setAccessor_isalias(String accessor_isalias) {
this.accessor_isalias = accessor_isalias;
}
public String getAct_browse() {
return act_browse;
}
public void setAct_browse(String act_browse) {
this.act_browse = act_browse;
}
public String getAct_read() {
return act_read;
}
public void setAct_read(String act_read) {
this.act_read = act_read;
}
public String getAct_update() {
return act_update;
}
public void setAct_update(String act_update) {
this.act_update = act_update;
}
public String getAct_delete() {
return act_delete;
}
public void setAct_delete(String act_delete) {
this.act_delete = act_delete;
}
public String getAct_create() {
return act_create;
}
public void setAct_create(String act_create) {
this.act_create = act_create;
}
public String getAct_cancel_checkout() {
return act_cancel_checkout;
}
public void setAct_cancel_checkout(String act_cancel_checkout) {
this.act_cancel_checkout = act_cancel_checkout;
}
public String getAct_change_permission() {
return act_change_permission;
}
public void setAct_change_permission(String act_change_permission) {
this.act_change_permission = act_change_permission;
}
public String getAccessor_name() {
return accessor_name;
}
public void setAccessor_name(String accessor_name) {
this.accessor_name = accessor_name;
}
}
<file_sep>/EDMS3/WebContent/js/popup/selectMultiFolderWindow.js
var gMultiFolder_list = new Array(); // 현재 선택된 폴더 목록
var gMultiFolder_isValidation = false; // 유효성 검사 여부
var gMultiFolder_currentTreeObject; // 현재 선택된 XFTree Object
var gMultiFolder_currentTreeDivID; // 현재 선택된 XFTree Div ID
var gMultiFolder_map_id = "MYDEPT"; // 현재 선택된 맵
var selectMultiFolderWindow = {
currentMapId : "", // 현재 선택된 맵
currentWorkType : "", // 현재 선택된 부서함
currentFolderID : "", // 현재 선택된 폴더 ID
isValidation : false, // 유효성 검사 여부
docType : "", // 문서 등록/이동 등등 연계 사용시 해당 문서의 Type
callback : null, // 확인버튼 클릭시 결과를 반환할 함수
treeDiv : {
mypage : "#multiFolderMypageTree",
mydept : "#multiFolderMydeptTree",
alldept : "#multiFolderAlldeptTree",
project : "#multiFolderProjectTree"
},
open : function() {
exsoft.util.layout.divLayerOpen("multifolder_choose_wrapper", "doc_multifolder_choose");
},
close : function() {
exsoft.util.layout.divLayerClose("multifolder_choose_wrapper", "doc_multifolder_choose");
},
tree : {
mypageTree : null, // 개인 문서함
mydeptTree : null, // 부서 문서함
allDeptTree : null, // 전사 문서함
projectTree : null // 프로젝트 함
},
treeFunctions : {
initTree : function(workType) {
var treeOption = {
context : exsoft.contextRoot,
url : "/folder/folderList.do",
};
switch(workType) {
case Constant.WORK_MYPAGE : // 개인 문서함
treeOption.divId = selectMultiFolderWindow.treeDiv.mypage;// "#multiFolderMypageTree";
treeOption.mapId = Constant.MAP_MYPAGE;
treeOption.workType = Constant.WORK_MYPAGE;
selectMultiFolderWindow.ui.activeTreeDiv("mypage");
selectMultiFolderWindow.currentMapId = Constant.MAP_MYPAGE;
selectMultiFolderWindow.currentWorkType = Constant.WORK_MYPAGE;
if (selectMultiFolderWindow.tree.mypageTree == null) {
selectMultiFolderWindow.tree.mypageTree = new XFTree(treeOption);
// 3. 다중 선택 옵션 설정
selectMultiFolderWindow.tree.mypageTree.template_multiCheck(false);
selectMultiFolderWindow.tree.mypageTree.init();
} else {
selectMultiFolderWindow.tree.mypageTree.refresh();
}
gMultiFolder_currentTreeObject = selectMultiFolderWindow.tree.mypageTree;
gMultiFolder_currentTreeDivID = selectMultiFolderWindow.treeDiv.mypage; //"MULTI_FOLDER_WORK_MYDEPT";
break;
case Constant.WORK_MYDEPT : // 부서 문서함
treeOption.divId = selectMultiFolderWindow.treeDiv.mydept; //"#multiFolderMydeptTree";
treeOption.mapId = Constant.MAP_MYDEPT;
treeOption.workType = Constant.WORK_MYDEPT;
selectMultiFolderWindow.ui.activeTreeDiv("mydept");
selectMultiFolderWindow.currentMapId = Constant.MAP_MYDEPT;
selectMultiFolderWindow.currentWorkType = Constant.WORK_MYDEPT;
if (selectMultiFolderWindow.tree.mydeptTree == null) {
selectMultiFolderWindow.tree.mydeptTree = new XFTree(treeOption);
// 3. 다중 선택 옵션 설정
selectMultiFolderWindow.tree.mydeptTree.template_multiCheck(false);
selectMultiFolderWindow.tree.mydeptTree.init();
} else {
selectMultiFolderWindow.tree.mydeptTree.refresh();
}
gMultiFolder_currentTreeObject = selectMultiFolderWindow.tree.mydeptTree;
gMultiFolder_currentTreeDivID = selectMultiFolderWindow.treeDiv.mydept; //"MULTI_FOLDER_WORK_MYDEPT";
break;
case Constant.WORK_ALLDEPT : // 전사 문서함
treeOption.divId = selectMultiFolderWindow.treeDiv.alldept;// "#multiFolderAlldeptTree";
treeOption.mapId = Constant.MAP_MYDEPT;
treeOption.workType = Constant.WORK_ALLDEPT;
selectMultiFolderWindow.ui.activeTreeDiv("alldept");
selectMultiFolderWindow.currentMapId = Constant.MAP_MYDEPT;
selectMultiFolderWindow.currentWorkType = Constant.WORK_ALLDEPT;
if (selectMultiFolderWindow.tree.allDeptTree == null) {
selectMultiFolderWindow.tree.allDeptTree = new XFTree(treeOption);
// 3. 다중 선택 옵션 설정
selectMultiFolderWindow.tree.allDeptTree.template_multiCheck(false);
selectMultiFolderWindow.tree.allDeptTree.init();
} else {
selectMultiFolderWindow.tree.allDeptTree.refresh();
}
gMultiFolder_currentTreeObject = selectMultiFolderWindow.tree.allDeptTree;
gMultiFolder_currentTreeDivID = selectMultiFolderWindow.treeDiv.alldept;
// 3. 다중 선택 옵션 설정
gMultiFolder_currentTreeObject.template_multiCheck(false);
break;
case Constant.WORK_PROJECT : // 프로젝트 함
treeOption.divId = selectMultiFolderWindow.treeDiv.project; //"#multiFolderProjectTree";
treeOption.mapId = Constant.MAP_PROJECT;
treeOption.workType = Constant.WORK_PROJECT;
selectMultiFolderWindow.ui.activeTreeDiv("project");
selectMultiFolderWindow.currentMapId = Constant.MAP_PROJECT;
selectMultiFolderWindow.currentWorkType = Constant.WORK_PROJECT;
if (selectMultiFolderWindow.tree.projectTree == null) {
selectMultiFolderWindow.tree.projectTree = new XFTree(treeOption);
// 3. 다중 선택 옵션 설정
selectMultiFolderWindow.tree.projectTree.template_multiCheck(false);
selectMultiFolderWindow.tree.projectTree.init();
} else {
selectMultiFolderWindow.tree.projectTree.refresh();
}
gMultiFolder_currentTreeObject = selectMultiFolderWindow.tree.projectTree;
gMultiFolder_currentTreeDivID = selectMultiFolderWindow.treeDiv.project;
break;
default :
console.error("[selectMultiFolderWindow] workType : {0} 이 올바르지 않습니다. ".format(workType));
break;
}
},
getCurrentTree : function() {
switch(selectMultiFolderWindow.currentWorkType) {
case Constant.WORK_MYPAGE :
return selectMultiFolderWindow.tree.mypageTree;
case Constant.WORK_MYDEPT :
return selectMultiFolderWindow.tree.mydeptTree;
case Constant.WORK_ALLDEPT :
return selectMultiFolderWindow.tree.allDeptTree;
case Constant.WORK_PROJECT :
return selectMultiFolderWindow.tree.projectTree;
default :
console.error("[selectMultiFolderWindow] workType : {0} 이 올바르지 않습니다. ".format(selectMultiFolderWindow.currentWorkType));
}
}
},
ui : {
activeTreeDiv : function(activeDivId) {
// 1. Tree Div
var keys = Object.keys(selectMultiFolderWindow.treeDiv);
$(keys).each(function(idx) {
if (this == activeDivId) {
$(selectMultiFolderWindow.treeDiv[this]).removeClass("hide");
} else {
$(selectMultiFolderWindow.treeDiv[this]).addClass("hide");
}
});
var _title = $("#lb_multiFolderWorkspace");
var _selectOption = $("#doc_multifolder_list");
// 2. Titles
if (activeDivId == "mypage") {
_title.text("개인함");
_selectOption.hide();
} else {
_title.text("문서함");
_selectOption.show();
}
},
activeTitle : function(activeDivId) {
}
},
init : function(callback, mapId, workType, isValidation, docType) {
selectMultiFolderWindow.open();
selectMultiFolderWindow.callback = callback;
selectMultiFolderWindow.currentMapId = mapId == undefined ? Constant.MAP_MYDEPT : mapId;
selectMultiFolderWindow.currentWorkType = (workType == undefined || workType == null || workType == "null") ? Constant.WORK_MYDEPT : workType;
selectMultiFolderWindow.isValidation = isValidation == undefined ? false : isValidation;
selectMultiFolderWindow.docType = docType == undefined ? "" : docType;
selectMultiFolderWindow.treeFunctions.initTree(selectMultiFolderWindow.currentWorkType);
//검색 selectbox
exsoft.util.common.ddslick('#doc_folder_list', 'doc_folder_list', '', 262, function(divId,selectedData){
// 콤보박스 이벤트
selectMultiFolderWindow.event.changeMap(selectedData.selectedData.value);
});
// 초기화 :: 선택폴더 및 전달 객체 초기화
$('#multiFolderSelectedfolderList').empty();
gMultiFolder_list = [];
},
event : {
submit : function() {
if (gMultiFolder_list.length == 0) {
jAlert('폴더를 선택하세요.', "확인", 0);
return false;
} else {
selectMultiFolderWindow.close();
selectMultiFolderWindow.callback(gMultiFolder_list);
//exsoft.util.common.popDivLayerClose("folder_choose_wrapper");
}/*
var currentNode = selectMultiFolderWindow.treeFunctions.getCurrentTree().getCurrentNode();
selectMultiFolderWindow.callback(currentNode);
selectMultiFolderWindow.close();*/
},
changeMap : function(workType) {
selectMultiFolderWindow.treeFunctions.initTree(workType);
},
// 목록에 folderId가 추가되어있는지 체크를 한다
multiFolderIsExistFolder : function(folderId) {
var isExists = false;
$("#multiFolderSelectedfolderList input[type='checkbox']").each(function() {
if ($(this).val() == folderId) {
isExists = true;
return false; // each문 종료
}
})
return isExists;
},
// 선택된 폴더를 목록에 추가
multiFolderAddFolder : function() {
var isValid = false;
var buffer="";
buffer = "<ul>";
// 트리에서 선택한 목록을 for문 처리
$(exsoft.util.common.getReturnTreeObject(gMultiFolder_currentTreeObject, exsoft.util.common.getIdFormat(gMultiFolder_currentTreeDivID), selectMultiFolderWindow.currentMapId)).each(function(index) {
// 중복이 아닐경우만 리스트에 추가를 한다
if (!selectMultiFolderWindow.event.multiFolderIsExistFolder(this.id)) {
// 1. 유효성 검사
if (selectMultiFolderWindow.isValidation) {
var nodeDetail = this.original;
// 1-1. 폴더에 문서를 등록할수 있는지 체크 (is_save)
if (nodeDetail.is_save != "Y") {
jAlert("\"{0}\" 폴더에는 문서를 등록할 수 없습니다.".format(this.text));
isValid = true;
}
// 1-2. 폴더에 문서를 등록할수 있는 권한이 있는지 체크 (acl_document_create)
if (nodeDetail.map_id != "MYPAGE" && nodeDetail.acl_document_create != "T") {
jAlert("\"{0}\" 폴더에 권한이 없습니다".format(this.text));
isValid = true;
}
// 1-3. 문서 유형이 동일한지 체크(단, ALL_TYPE은 FOLDER의 TYPE과 관련없이 모두 저장이 가능)
if(selectMultiFolderWindow.docType != "ALL_TYPE") {
if ((selectMultiFolderWindow.docType != nodeDetail.is_type) && nodeDetail.is_type != "ALL_TYPE") {
jAlert("문서유형이 맞지 않습니다");
isValid = true;
}
}
// 1-#. 유효성 검사에 걸리는 항목이 있을 경우
if (isValid) {
// 체크 해제
$(gMultiFolder_currentTreeObject.divId).jstree("deselect_node", this.id);
// 이벤트 종료
return false;
}
}
// 유효성 검사 실패시
if (isValid) {
return;
}
// 1. 화면에 추가
buffer += "<li><input type=\"checkbox\" value=\"{0}\"/>".format(this.id)+"<div>"+this.fullPath.join(' / ')+"</div></li>";
// 2. Array에 추가
gMultiFolder_list.push(this);
}
});
buffer += "</ul>";
$("#multiFolderSelectedfolderList").append(buffer);
},
// 목록에서 선택된 폴더를 제거
multiFolderRemoveFolder : function() {
$("#multiFolderSelectedfolderList input[type='checkbox']:checked").each(function() {
// 1. 화면에서 삭제
$(this).closest("li").remove();
// 2. Array에서 삭제
multiFolderRemoveArrayItem(gMultiFolder_list, "id", $(this).val());
});
},
/**
* 선택된 폴더 목록에서 특정 폴더ID(value)를 찾아서 삭제한다
*
* @param arrays - 선택해서 추가한 폴더 목록
* @param key - 찾을 Key값
* @param value - 비교할 value
**/
multiFolderRemoveArrayItem : function(arrays, key, value) {
$(arrays).each(function(index) {
if(this[key] == value) {
arrays.remove(this);
return false; // each문 종료
}
});
},
},//event END
callback : {
changedDocFolderList : function(divId, selectObject, arrParams) {
selectMultiFolderWindow.event.changeMap(selectObject.selectedData.value);
}
}
}
<file_sep>/EDMS3/WebContent/js/common/validator.js
(function($) {
$.fn.validation = function(opt) {
/**
* Members
*/
var ret = true;
var validList = $.extend({}, $.fn.validation.validList);
var messages = $.extend({}, $.fn.validation.messages);
var alert = $.extend({}, $.fn.validation.alert);
var effect = $.extend({}, $.fn.validation.effect);
var guide = $.extend({}, $.fn.validation.guide);
var error = $.extend({}, $.fn.validation.error);
var exception = $.extend({}, $.fn.validation.exception);
var options = {
debug : true,
alert : true,
effect : true,
guideMessage : false,
notyId : '', // 알림메세지 ID
displayTime : 2000 // 알림메세지 보여주는 시간(초)
}
if (opt !== undefined) $.extend(options, opt.options);
/**
* private methods
*/
run = function(obj) {
try {
var tg = $(obj);
$("[ex-valid], [ex-length]").filter(function() {
var ch = $(this);
if(ch.attr('ex-display') === undefined)
ch.attr('ex-display','검사 항목');
$(this).parents().each(function() {
if ($(this).is(tg)) {
// 1. check option framework syntax
var hasAttr = {
valid : ch.is("[ex-valid]"),
range : ch.is("[ex-length]"),
equalTo : ch.is("[ex-equalTo]"),
optional : ch.is("[ex-option]")
}
if (hasAttr.valid) {
var valid = ch.attr("ex-valid");
var optional = ch.attr("ex-option");
var validValue = getElementValue(ch);
console.log("optional="+optional);
// 속성 ex-name 은 입력되는 필드값의 출력명임(사용자편의성 제공)
if(optional != undefined) {
// optional logic
if (validValue.length > 0) {
if (validList[valid] === undefined)
throw new exception.valid_confValueNotExist(ch, error.isNotExistMember.format(valid));
if (!validList[valid](getElementValue(ch))) {
throw new exception.defaultException(ch, "{0}는(은) {1}".format(ch.attr('ex-display'), messages[valid]));
}
}
}else {
// valid logic
if (validValue.length == 0) {
throw new exception.valueIsRequired(ch, "{0}는(은) {1}".format(ch.attr('ex-display'),error.valueIsEmpty));
}
if (validList[valid] === undefined)
throw new exception.valid_confValueNotExist(ch, error.isNotExistMember.format(valid));
if (!validList[valid](getElementValue(ch))) {
throw new exception.defaultException(ch, "{0}는(은) {1}".format(ch.attr('ex-display'), messages[valid]));
}
}
}
if (hasAttr.range) {
var range = getRangeValue(ch.attr("ex-length"));
var min = validList.min(ch, range[0]);
var max = validList.max(ch, range[1]);
var msg = '한글 {0}자, 영문/숫자 {1}';
if (!min) {
throw new exception.defaultException(ch, "{0}는(은) {1}{2}".format(ch.attr('ex-display'),msg.format(parseInt(range[0]/2), range[0]),messages.rangeless));
} else if (!max) {
throw new exception.defaultException(ch, "{0}는(은) {1}{2}".format(ch.attr('ex-display'),msg.format(parseInt(range[1]/2), range[1]),messages.rangeover));
}
}
if (hasAttr.equalTo) { // 컬럼비교(사용자 환경설정 패스워드관리)
var filedVal = getEqualToValue(ch.attr('ex-equalTo'));
if($("input[name="+filedVal[0]+"]").val() != $("input[name="+filedVal[1]+"]").val()) {
if(filedVal[0] == "current_pass" || filedVal[1] == "current_pass" ) {
throw new exception.defaultException(ch,error.equalToCurrentPassword);
}else {
throw new exception.defaultException(ch,error.equalToRePassword);
}
}
}
}
})
})
} catch (exception) {
if (getOptions().debug) console.warn("validator : " + exception.msg);
alert.info(exception.msg);
effect.warn(exception.targetObject);
guide.guideMessage(exception.msg);
ret = false;
}
}
getElementId = function(obj) {
return $(obj).attr("id");
}
getElementValue = function(obj) {
if (isEditable(obj)) {
return $(obj).val();
} else {
return $(obj).html();
}
}
getRangeValue = function(rangeVal) {
if (rangeVal === undefined || rangeVal.trim().length === 0)
throw new exception.number_confValueNotValid(error.configValueIsEmpty);
var ret = [];
var arr = rangeVal.split(",");
if (arr.length !== 2)
throw new exception.number_confValueNotValid(error.isNotRangeArray);
$(arr).each(function() {
var pVal = parseInt(this.trim());
if (isNaN(pVal)) {
throw new exception.number_confValueNotValid(error.isNotNumber);
}
ret.push(parseInt(this.trim()));
})
return ret;
}
isEditable = function(obj) {
return $(obj).is("input, textarea, select");
}
getOptions = function() {
return options;
}
// 알림메세지 초기화
notyEmpty = function() {
$("#"+getOptions().notyId).html('');
}
// 컬럼비교(사용자 환경설정 패스워드관리)
getEqualToValue = function(equalTo) {
if (equalTo === undefined || equalTo.trim().length === 0)
throw new exception.equalToValid(error.equalToError);
var ret = [];
var arr = equalTo.split(",");
if (arr.length !== 2)
throw new exception.equalToValid(error.isNotRangeArray);
$(arr).each(function() {
ret.push(this.trim());
})
return ret;
}
getCurrentLength = function(content) {
var byteCount = 0;
// 한글은 2byte 계산한다.
for (var k=0;k < content.length; k++)
{
onechar = content.charAt(k);
if (escape(onechar) =='%0D') {} else if (escape(onechar).length > 4) { byteCount += 2; } else { byteCount++; }
}
console.log('currentlength : ' + byteCount);
return byteCount;
}
/**
* body retural
*/
this.each(function() {
$.fn.validation.clearShadowBox();
run(this);
})
return ret;
};
$.fn.validation.validList = {
email : function(val) {
return /^[a-zA-Z0-9.!#$%&'*+\/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$/.test(val);
},
date : function(val) {
return !/Invalid|NaN/.test(new Date(val).toString());
},
digit : function(val) {
return /^\d+$/.test(val);
},
number : function(val) {
return /^-?(?:\d+|\d{1,3}(?:,\d{3})+)?(?:\.\d+)?$/.test(val);
},
min : function(element, val) {
return getCurrentLength(getElementValue(element)) >= parseInt(val);
},
max : function(element, val) {
return getCurrentLength(getElementValue(element)) <= parseInt(val);
},
phone : function(val) {
return /^\d{2,3}-\d{3,4}-\d{4}$/.test(val);
},
require : function(val) {
return val.length == 0 ? false : true;
},
passwd : function(val) { // 사용자 패스워드 정책(영문/숫자/특수문자포함)
return /([a-zA-Z0-9].*[!,@,#,$,%,^,&,*,?,_,~])|([!,@,#,$,%,^,&,*,?,_,~].*[a-zA-Z0-9])/.test(val);
},
};
$.fn.validation.messages = {
email : "email 형식에 맞지 않습니다.",
date : "날짜 형식에 맞지 않습니다.",
digit : "숫자 형식이 아닙니다",
number : "통화 형이 아닙니다",
rangeless : "자 이상 입력하세요.",
rangeover : "자를 넘을 수 없습니다.",
phone : "전화번호 형식이 아닙니다.",
require : "필수값은 비어있을수 없습니다.",
passwd : "비밀번호 형식에 맞지 않습니다."
};
$.fn.validation.alert = {
info : function(msg) {
if (!getOptions().alert) return;
jAlert(msg,"확인",0);
},
confirm : function(msg) {
if (!getOptions().effect) return;
jConfirm(msg, "title", function(ret){
console.info("선택한 값 : " + ret);
});
}
};
$.fn.validation.effect = {
info : function(obj) {
},
warn : function(obj) {
if (!getOptions().effect) return;
$(obj).css("box-shadow", "0px 0px 5px 0px rgba(252,138,138,1)");
$(obj).css("-moz-box-shadow", "0px 0px 5px 0px rgba(252,138,138,1)");
$(obj).css("-webkit-box-shadow", "0px 0px 5px 0px rgba(252,138,138,1)");
},
error : function(obj) {
}
};
$.fn.validation.guide = {
guideMessage : function(msg) {
/**
* 알림메세지 출력 후 displayTime 후에 사라지게 처리한다.
*/
if (!getOptions().guideMessage && getOptions().notyId.lenth == undefined ) { return; }
else {
$("#"+getOptions().notyId).html(msg);
setTimeout("notyEmpty()",getOptions().displayTime);
}
}
};
$.fn.validation.error = {
configValueIsEmpty : "설정 값이 비어있습니다.",
valueIsEmpty : "필수 항목입니다.",
isNotNumber : "최소, 최대값 중 숫자가 아닌 값이 있습니다.\n구문이 정상 동작하지 않을 수 있습니다.",
isNotRangeArray : "최소, 최대값 설정에 오류가 있습니다.\n값은 ','를 기준으로 min max 쌍이 있어야 합니다.",
isNotExistMember : "'{0}'은 유효성 검사 대상이 아닙니다",
equalToError : "비교하려는 대상이 없습니다.",
equalToRePassword : "입력하신 새 비밀번호와 새 비밀번호 확인이 일치하지 않습니다",
equalToCurrentPassword : "현재 비밀번호와 일치하지 않습니다"
};
$.fn.validation.exception = {
defaultException : function(obj, msg) {
this.msg = msg;
this.name = "default exception";
this.targetObject = obj;
},
number_confValueNotValid : function(obj, msg) {
this.msg = msg;
this.name = "ex-length configuration value is not valid.";
this.targetObject = obj;
},
valid_confValueNotExist : function(obj, msg) {
this.msg = msg;
this.name = "ex-valid configuration value is not exist in valid list";
this.targetObject = obj;
},
valueIsRequired : function(obj, msg) {
this.msg = msg;
this.name = "Value is required.";
this.targetObject = obj;
},
equalToValid : function(obj, msg) {
this.msg = msg;
this.name = "equalTo configuration value is not valid.";
this.targetObject = obj;
}
};
$.fn.validation.clearShadowBox = function() {
// 초기황
$("[ex-valid], [ex-length]").filter(function() {
$(this).css("box-shadow", "");
$(this).css("-moz-box-shadow", "");
$(this).css("-webkit-box-shadow", "");
});
};
})(jQuery);<file_sep>/EDMS3/src/kr/co/exsoft/user/vo/ConnectLogVO.java
package kr.co.exsoft.user.vo;
import kr.co.exsoft.eframework.vo.VO;
/**
* 사용자 접속로그 VO
* @author <NAME>
* @since 2014.07.16
* @version 3.0
*
*/
public class ConnectLogVO extends VO {
// 테이블 객체(XR_CONNECT_LOG)
private long connect_log_seq; // XR_COUNTER SEQ
private String user_id; // 사용자 ID
private String user_nm; // 사용자명
private String group_id; // 소속그룹 ID
private String group_nm; // 소속그룹명
private String login_type; // 로그인타입 = NORMAL - 일반웹접속 / SSO - Single Sign On 접속
private String connect_ip; // 접속IP = IPV6 : fc00:db20:35b:7399::5
private String connect_type; // 접속구분 = LOGIN/LOGOUT
private String cert_yn; // 인증여부 = Y:인증성공 N:인증실패(패스워드/라이선스 에러등)
private String error_cd; // 에러코드
private String error_content; // 에러내용
private String connect_time; // 접속시간(종료시간) = 로그인-접속시간 / 로그아웃-종료시간
// 조회항목
private String cert_nm; // 인증코드값
public ConnectLogVO() {
this.connect_log_seq = 0;
this.user_id = "";
this.user_nm = "";
this.group_id = "";
this.group_nm = "";
this.login_type = "";
this.connect_ip = "";
this.connect_type = "";
this.cert_yn = "";
this.error_cd = "";
this.error_content = "";
this.connect_time = "";
this.cert_nm = "";
}
public String getCert_nm() {
return cert_nm;
}
public void setCert_nm(String cert_nm) {
this.cert_nm = cert_nm;
}
public long getConnect_log_seq() {
return connect_log_seq;
}
public void setConnect_log_seq(long connect_log_seq) {
this.connect_log_seq = connect_log_seq;
}
public String getUser_id() {
return user_id;
}
public void setUser_id(String user_id) {
this.user_id = user_id;
}
public String getUser_nm() {
return user_nm;
}
public void setUser_nm(String user_nm) {
this.user_nm = user_nm;
}
public String getGroup_id() {
return group_id;
}
public void setGroup_id(String group_id) {
this.group_id = group_id;
}
public String getGroup_nm() {
return group_nm;
}
public void setGroup_nm(String group_nm) {
this.group_nm = group_nm;
}
public String getLogin_type() {
return login_type;
}
public void setLogin_type(String login_type) {
this.login_type = login_type;
}
public String getConnect_ip() {
return connect_ip;
}
public void setConnect_ip(String connect_ip) {
this.connect_ip = connect_ip;
}
public String getConnect_type() {
return connect_type;
}
public void setConnect_type(String connect_type) {
this.connect_type = connect_type;
}
public String getCert_yn() {
return cert_yn;
}
public void setCert_yn(String cert_yn) {
this.cert_yn = cert_yn;
}
public String getError_cd() {
return error_cd;
}
public void setError_cd(String error_cd) {
this.error_cd = error_cd;
}
public String getError_content() {
return error_content;
}
public void setError_content(String error_content) {
this.error_content = error_content;
}
public String getConnect_time() {
return connect_time;
}
public void setConnect_time(String connect_time) {
this.connect_time = connect_time;
}
}
<file_sep>/EDMS3/src/kr/co/exsoft/eframework/util/UtilFileApp.java
package kr.co.exsoft.eframework.util;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.net.URLEncoder;
import javax.servlet.ServletException;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/***
* 파일처리 관련 클래스
* @author <NAME>
* @since 2014.07.15
* @version 3.0
*
*/
public class UtilFileApp {
protected static final Log logger = LogFactory.getLog(UtilFileApp.class);
public UtilFileApp() {
}
/***
* 새로운 디렉토리 생성
* @param dir
* @throws IOException
* @throws SecurityException
*/
public static void createDir(String dir) throws IOException,SecurityException {
checkSecure(dir);
File f = new File(dir);
if (!f.exists()) {
if (f.mkdirs() != true) {
throw new IOException();
}
}
}
/**
* 상위디렉토리 전부를 생성하고 자신을 생성한다.
* @param dir
* @throws IOException
* @throws SecurityException
*/
public static void createDirAllPath(String dir) throws IOException,SecurityException {
if (dir.length() > 2) {
checkSecure(dir);
File f = new File(dir);
//상위가 존재하지 않는경우
if (!f.exists()) {
//다시 상위를 체크한다.
createDirAllPath(dir.substring(0, dir.lastIndexOf(File.separator)));
if (f.mkdirs() != true) {
throw new IOException();
}
}
}
}
/**
* 디렉토리 및 파일 삭제
* @param dir
* @throws IOException
* @throws SecurityException
*/
public static void deleteDir(String dir) throws IOException,
SecurityException {
if (isWinOS())
deleteDirByFile(dir);
else
deleteDirByRuntime(dir);
}
/**
* 디렉토리 및 파일 삭제 - recursion을 적용하여 디렉토리 및 디렉토리내 파일과 디렉토리를 모두 삭제
* @param dir
* @throws IOException
* @throws SecurityException
*/
public static void deleteDirByFile(String dir) throws IOException,
SecurityException {
checkSecure(dir);
File f = new File(dir);
if (f.exists()) {
String[] file = f.list();
File df;
for (int i = 0; i < file.length; i++) {
df = new File(dir + File.separator + file[i]);
if (df.isDirectory()) {
deleteDirByFile(dir + File.separator + file[i]);
} else {
try {
df.delete();
}catch(Exception e) {
logger.error(e.getMessage());
}
}
}
if (f.delete() != true) {
throw new IOException();
}
}
}
/**
* 디렉토리 및 파일 삭제 runtime으로 처리
* @param dir
* @throws IOException
*/
public static void deleteDirByRuntime(String dir) throws IOException {
checkSecure(dir);
try {
String[] cmd = new String[3];
cmd[0] = "rm";
cmd[1] = "-rf";
cmd[2] = dir;
execRunTime(cmd);
} catch (Exception e) {
deleteDirByFile(dir);
}
}
/***
* 파일 삭제 - 디렉토리내 파일 모두 삭제
* @param dir
* @throws IOException
* @throws SecurityException
*/
public static void deletAllFile(String dir) throws IOException,
SecurityException {
checkSecure(dir);
File f = new File(dir);
if (f.exists()) {
String[] file = f.list();
File df;
for (int i = 0; i < file.length; i++) {
df = new File(dir + File.separator + file[i]);
df.delete();
}
}
}
/**
* 파일 삭제
* @param filename
* @throws IOException
* @throws SecurityException
*/
public static void deletFile(String filename) throws IOException,
SecurityException {
checkSecure(filename);
File f = new File(filename);
if (f.exists() && f.isFile())
if (!f.delete())
throw new IOException();
}
/**
* 파일 삭제
* @param files
* @throws IOException
* @throws SecurityException
*/
public static void deletFile(String[] files) throws IOException,
SecurityException {
if (files != null)
for (int i = 0; i < files.length; i++) {
checkSecure(files[i]);
deletFile(files[i]);
}
}
/**
* 파일 용량 체크
* @param filename
* @return long
* @throws IOException
*/
public static long getFileSize(String filename) throws IOException {
File f = new File(filename);
return f.length();
}
/**
* 디렉토리내 전체 파일 용량 체크 SunOS or Linux일 경우 runtime으로 처리
* @param dir
* @return long
* @throws IOException
*/
public static long getDirectorySize(String dir) throws IOException {
long filesize = 0;
if (isWinOS())
filesize = getDirectorySizeByFile(dir);
else
filesize = getDirectorySizeByRuntime(dir);
return filesize;
}
/**
* 디렉토리내 전체 파일 용량 체크 : recursion을 적용하여 디렉토리 및 디렉토리내 파일 전체 용량 체크
* @param dir
* @return long
* @throws IOException
*/
public static long getDirectorySizeByFile(String dir) throws IOException {
long filesize = 0;
java.io.File filepath = new java.io.File(dir);
if (filepath.isDirectory()) {
File files[] = filepath.listFiles();
for (int i = 0; i < files.length; i++) {
if (files[i].isDirectory())
filesize += getDirectorySizeByFile(files[i]
.getAbsolutePath());
else
filesize += getFileSize(files[i].getAbsolutePath());
}
} else {
filesize = getFileSize(filepath.getAbsolutePath());
}
return filesize;
}
/***
* 디렉토리내 전체 파일 용량 체크 runtime으로 처리
* @param dir
* @return long
* @throws IOException
*/
public static long getDirectorySizeByRuntime(String dir) throws IOException {
long filesize = 0;
try {
String[] cmd = new String[3];
cmd[0] = "du";
cmd[1] = "-sk";
cmd[2] = dir;
String s = execRunTime(cmd);
StringBuffer buf = new StringBuffer();
char[] c = s.toCharArray();
for (int i = 0; i < c.length; i++) {
if (c[i] == '\t')
break;
else
buf.append(c[i]);
}
filesize = Long.parseLong(buf.toString()) * 1024;
} catch (Exception e) {
filesize = getDirectorySizeByFile(dir);
}
return filesize;
}
/**
* 파일 용량 체크
* @param files
* @return long
* @throws IOException
* @throws SecurityException
*/
public static long getFileSize(String[] files) throws IOException,
SecurityException {
long lFileSize = 0;
if (files != null) {
for (int i = 0; i < files.length; i++) {
lFileSize += getFileSize(files[i]);
}
}
return lFileSize;
}
/**
* 디렉토리내 파일을 배열로 반환
* @param dir
* @return File
* @throws IOException
* @throws SecurityException
*/
public static File[] getAllFile(String dir) throws IOException,
SecurityException {
checkSecure(dir);
java.io.File filepath = new java.io.File(dir);
File[] files = filepath.listFiles();
return files;
}
/**
* 디렉토리내 파일 존재여부 확인
* @param dir
* @return boolean
* @throws IOException
* @throws SecurityException
*/
public static boolean isExistFile(String dir) throws IOException,
SecurityException {
java.io.File filepath = new java.io.File(dir);
File[] files = filepath.listFiles();
if (files != null && files.length > 0)
return true;
else
return false;
}
/**
* 파일배열을 받아 파일명을 배열로 반환
* @param files
* @return String
* @throws IOException
* @throws SecurityException
*/
@SuppressWarnings("null")
public static String[] getAllFileName(File[] files) throws IOException,
SecurityException {
String[] filename = null;
for (int i = 0; i < files.length; i++) {
checkSecure(files[i].getName());
filename[i] = files[i].getName();
}
return filename;
}
/**
* 디렉토리를 받아 절대파일명을 배열로 반환
* @param dir
* @return String
* @throws IOException
* @throws SecurityException
*/
public static String[] getAllFullFileName(String dir) throws IOException,
SecurityException {
checkSecure(dir);
java.io.File filepath = new java.io.File(dir);
String[] filename = filepath.list();
for (int i = 0; i < filename.length; i++) {
filename[i] = dir + File.separator + filename[i];
}
return filename;
}
/**
* 파일 다운로드
* @param res
* @param strFileName
* @param strDownFileName
* @throws IOException
*/
public static void downloadFile(HttpServletResponse res,
String strFileName, String strDownFileName) throws IOException {
checkSecure(strFileName);
OutputStream out = null;
FileInputStream in = null;
FileInputStream fis = null;
BufferedInputStream bis = null;
try {
strDownFileName = removeToken(strDownFileName);
out = res.getOutputStream();
in = new FileInputStream(new File(strFileName));
res.setHeader("Content-Disposition", "attachment; filename="+ CharConversion.K2E(strDownFileName) + ";");
String fileType[] = UtilFileApp.getFileType(strDownFileName.substring(strDownFileName.lastIndexOf('.') + 1));
res.setContentType(fileType[2]);
fis = new FileInputStream(strFileName);
bis = new BufferedInputStream(fis);
out = res.getOutputStream();
byte buffer[] = new byte[2048];
int bytesRead = 0;
while ((bytesRead = bis.read(buffer)) != -1)
out.write(buffer, 0, bytesRead);
out.flush();
} finally {
try {in.close();} catch (Exception e) {}
try {out.close();} catch (Exception e) {}
try {fis.close();} catch (Exception e) {}
try {bis.close();} catch (Exception e) {}
}
}
/**
* 파일다운로드
* @param res
* @param strFileName
* @param strDownFileName
* @throws IOException
*/
public static void showFile(HttpServletResponse res,
String strFileName, String strDownFileName) throws IOException {
checkSecure(strFileName);
OutputStream out = null;
FileInputStream in = null;
FileInputStream fis = null;
BufferedInputStream bis = null;
try {
strDownFileName = removeToken(strDownFileName);
out = res.getOutputStream();
in = new FileInputStream(new File(strFileName));
res.setHeader("Content-Disposition", "filename="+ CharConversion.K2E(strDownFileName) + ";");
String fileType[] = UtilFileApp.getFileType(strDownFileName.substring(strDownFileName.lastIndexOf('.') + 1));
res.setContentType(fileType[2]);
fis = new FileInputStream(strFileName);
bis = new BufferedInputStream(fis);
out = res.getOutputStream();
byte buffer[] = new byte[2048];
int bytesRead = 0;
while ((bytesRead = bis.read(buffer)) != -1)
out.write(buffer, 0, bytesRead);
out.flush();
} finally {
try {in.close();} catch (Exception e) {}
try {out.close();} catch (Exception e) {}
try {fis.close();} catch (Exception e) {}
try {bis.close();} catch (Exception e) {}
}
}
/**
* 웹부라우저 정보 확인
* @param request
* @return String
*/
private static String getUserAgent(HttpServletRequest request) {
String header = request.getHeader("User-Agent");
if(header.indexOf("MSIE") > -1) {
return "MSIE";
} else if(header.indexOf("Chrome") > -1) {
return "Chrome";
} else if(header.indexOf("Opera") > -1) {
return "Opera";
} else if(header.indexOf("Safari") > -1) {
return "Safari";
}
return "Firefox";
}
/***
* 웹부라우저 Dispostion 정보
* @param filename
* @param browser
* @return String
* @throws Exception
*/
private static String getDisposition(String filename, String browser) throws Exception {
String dispositionPrefix = "attachment;filename=";
String encodedFilename = null;
if(browser.equals("MSIE")) {
encodedFilename = URLEncoder.encode(filename, "UTF-8").replaceAll("\\+", "%20");
} else if(browser.equals("Firefox")) {
encodedFilename = "\"" + new String(filename.getBytes("UTF-8"), "ISO-8859-1") + "\"";
} else if(browser.equals("Opera")) {
encodedFilename = "\"" + new String(filename.getBytes("UTF-8"), "ISO-8859-1") + "\"";
} else if(browser.equals("Safari")) {
encodedFilename = "\"" + new String(filename.getBytes("UTF-8"), "ISO-8859-1") + "\"";
} else if(browser.equals("Chrome")) {
StringBuffer sb = new StringBuffer();
for(int idx = 0; idx < filename.length(); idx++) {
char c = filename.charAt(idx);
if(c > '~') {
sb.append(URLEncoder.encode("" + c, "UTF-8"));
} else {
sb.append(c);
}
}
encodedFilename = sb.toString();
} else {
throw new RuntimeException("Not supported browser");
}
return dispositionPrefix + encodedFilename;
}
/**
* 해당 입력 스트림으로부터 오는 데이터를 다운로드 한다.
* @param request
* @param response
* @param saveFilename 저장된 파일명 경로(파일명 포함)
* @param realFilename 저장할 원 파일명
* @throws ServletException
* @throws IOException
*/
public static void download(HttpServletRequest request, HttpServletResponse response,
String saveFilename, String realFilename) throws ServletException, IOException {
File saveFile = new File(saveFilename);
if (saveFile == null || !saveFile.exists() || saveFile.length() <= 0 || saveFile.isDirectory()) {
throw new IOException("파일 객체가 Null 혹은 존재하지 않거나 길이가 0, 혹은 파일이 아닌 디렉토리이다.");
}
realFilename = removeToken(realFilename);
String mimetype = request.getSession().getServletContext().getMimeType(saveFile.getName());
InputStream is = null;
String mime = null;
try {
is = new FileInputStream(saveFile);
if (mimetype == null || mimetype.length() == 0) {
mime = "application/octet-stream;";
}
byte[] buffer = new byte[2048];
response.setContentType(mime + "; charset=utf-8");
String browser = getUserAgent(request);
try {
response.setHeader("Content-Disposition", getDisposition(realFilename, browser));
} catch (Exception e) {
e.printStackTrace();
}
// 파일 사이즈가 정확하지 않을때는 아예 지정하지 않는다.
if (saveFile.length() > 0) {
response.setHeader("Content-Length", "" + saveFile.length());
}
BufferedInputStream fin = null;
BufferedOutputStream outs = null;
try {
fin = new BufferedInputStream(is);
outs = new BufferedOutputStream(response.getOutputStream());
int read = 0;
while ((read = fin.read(buffer)) != -1) {
outs.write(buffer, 0, read);
}
} finally {
try {
outs.close();
} catch (Exception ex1) {
}
try {
fin.close();
} catch (Exception ex2) {
}
} // end of try/catch
} finally {
try {
is.close();
} catch (Exception ex) {
}
}
}
/**
* 파일 존재 유무 확인
* @param filename
* @return boolean
* @throws IOException
*/
public static boolean isExists(String filename) throws IOException {
File f = new File(filename);
return f.exists();
}
/**
* 파일명 변경
* @param strFilename
* @param strNewFileName
* @return boolean
* @throws IOException
*/
public static boolean renameTo(String strFilename, String strNewFileName)
throws IOException {
checkSecure(strFilename);
File f = new File(strFilename);
File nf = new File(strNewFileName);
return f.renameTo(nf);
}
/**
* 파일 복사
* @param src
* @param dest
* @throws IOException
*/
public static void copyfile(String src, String dest) throws IOException {
checkSecure(src);
FileInputStream in = null;
FileOutputStream out = null;
try {
File inputFile = new File(src);
if (inputFile.isFile()) {
File outputFile = new File(dest);
if (outputFile.exists()) {
outputFile = new File(dest + "_1");
}
in = new FileInputStream(inputFile);
out = new FileOutputStream(outputFile);
int bytesRead = 0;
byte b[] = new byte[1024 * 2];
while ((bytesRead = in.read(b)) != -1)
out.write(b, 0, bytesRead);
out.flush();
}
}catch (Exception e) {
e.printStackTrace();
} finally {
try {
if (in != null)
in.close();
} catch (Exception e) {
}
try {
if (out != null)
out.close();
} catch (Exception e) {
}
}
}
/**
* 파일 최근 변경시간 가져오기
* @param filename
* @return long
* @throws IOException
*/
public static long getLastModified(String filename) throws IOException {
File f = new File(filename);
return f.lastModified();
}
/**
* 파일 여부 확인
* @param filename
* @return boolean
* @throws IOException
*/
public static boolean isFile(String filename) throws IOException {
File f = new File(filename);
return f.isFile();
}
/**
* 디렉토리 여부확인
* @param filename
* @return boolean
* @throws IOException
*/
public static boolean isDirectory(String filename) throws IOException {
File f = new File(filename);
return f.isDirectory();
}
/***
* 파일확장자에 따른 File Icon Image
* @param strExtension
* @return String
*/
public static String[] getFileType(String strExtension) {
if (strExtension != null)
strExtension = strExtension.toLowerCase();
String[] arrayFileType = { "", "", "" };
if (strExtension == null) {
arrayFileType[0] = "Unknown";
arrayFileType[1] = "file.png";
arrayFileType[2] = "application/octet-stream";
} else if (strExtension.equals("")) {
arrayFileType[0] = "Unknown";
arrayFileType[1] = "file.png";
arrayFileType[2] = "application/octet-stream";
} else if (strExtension.equals("jpg") || strExtension.equals("jpeg")
|| strExtension.equals("jpe")) {
arrayFileType[0] = "JPEG Image";
arrayFileType[1] = "jpg.png";
arrayFileType[2] = "image/jpeg";
} else if (strExtension.equals("gif")) {
arrayFileType[0] = "JPEG Image";
arrayFileType[1] = "gif.png";
arrayFileType[2] = "image/png";
} else if (strExtension.equals("doc") || strExtension.equals("docx") || strExtension.equals("rtf")) {
arrayFileType[0] = "MS Word";
arrayFileType[1] = "doc.png";
arrayFileType[2] = "application/msword";
} else if (strExtension.equals("xls") || strExtension.equals("xlsx")) {
arrayFileType[0] = "MS Excel";
arrayFileType[1] = "xls.png";
arrayFileType[2] = "application/vnd.ms-excel";
} else if (strExtension.equals("ppt") || strExtension.equals("pptx")) {
arrayFileType[0] = "MS PowerPoint";
arrayFileType[1] = "ppt.png";
arrayFileType[2] = "application/vnd.ms-powerpoint";
} else if (strExtension.equals("pds")) {
arrayFileType[0] = "MS PowerPoint";
arrayFileType[1] = "pds.png";
arrayFileType[2] = "application/vnd.ms-powerpoint";
} else if (strExtension.equals("txt")) {
arrayFileType[0] = "텍스트 문서";
arrayFileType[1] = "txt.png";
arrayFileType[2] = "text/plain";
} else if (strExtension.equals("hwp")) {
arrayFileType[0] = "한글워드문서";
arrayFileType[1] = "hwp.png";
arrayFileType[2] = "application/octet-stream";
} else if (strExtension.equals("html") || strExtension.equals("htm")) {
arrayFileType[0] = "HTML";
arrayFileType[1] = "html.png";
arrayFileType[2] = "text/html";
} else if (strExtension.equals("pdf")) {
arrayFileType[0] = "Adobe Acrobat";
arrayFileType[1] = "pdf.png";
arrayFileType[2] = "application/pdf";
} else if (strExtension.equals("bmp")) {
arrayFileType[0] = "BMP Image";
arrayFileType[1] = "bmp.png";
arrayFileType[2] = "image/vnd.wap.wbmp";
} else if (strExtension.equals("exe")) {
arrayFileType[0] = "실행파일";
arrayFileType[1] = "exe.png";
arrayFileType[2] = "application/octet-stream";
} else if (strExtension.equals("zip") || strExtension.equals("rar")
|| strExtension.equals("tar") || strExtension.equals("gzip")
|| strExtension.equals("gz")) {
arrayFileType[0] = "압축파일";
arrayFileType[1] = "zip.png";
arrayFileType[2] = "application/zip";
} else if (strExtension.equals("mp3")) {
arrayFileType[0] = "MP3 Sound";
arrayFileType[1] = "mp3.png";
arrayFileType[2] = "application/octet-stream";
} else if (strExtension.equals("eml")) {
arrayFileType[0] = "E-mail";
arrayFileType[1] = "eml.png";
arrayFileType[2] = "application/octet-stream";
} else if (strExtension.equals("pic") || strExtension.equals("pict")) {
arrayFileType[0] = "PICT Image";
arrayFileType[1] = "pict.png";
arrayFileType[2] = "application/octet-stream";
} else if (strExtension.equals("tif") || strExtension.equals("tiff")) {
arrayFileType[0] = "TIF Image";
arrayFileType[1] = "file.png";
arrayFileType[2] = "image/tiff";
} else if (strExtension.equals("avi") || strExtension.equals("mpeg")
|| strExtension.equals("mpg") || strExtension.equals("mpe")) {
arrayFileType[0] = "TIF Image";
arrayFileType[1] = "avi.png";
arrayFileType[2] = "video/x-msvideo";
} else if (strExtension.equals("reg")) {
arrayFileType[0] = "등록항목";
arrayFileType[1] = "reg.png";
arrayFileType[2] = "application/octet-stream";
} else if (strExtension.equals("ico")) {
arrayFileType[0] = "PICT Image";
arrayFileType[1] = "gif.png";
arrayFileType[2] = "image/ico";
} else if (strExtension.equals("png")) {
arrayFileType[0] = "png Image";
arrayFileType[1] = "gif.png";
arrayFileType[2] = "image/png";
}else if (strExtension.equals("dwg")) {
arrayFileType[0] = "autocad Image";
arrayFileType[1] = "dwg.png";
arrayFileType[2] = "image/png";
} else if (strExtension.equals("gul")) {
arrayFileType[0] = "gul Image";
arrayFileType[1] = "gul.png";
arrayFileType[2] = "image/png";
} else {
arrayFileType[0] = "Unknown";
arrayFileType[1] = "file.png";
arrayFileType[2] = "application/octet-stream";
}
return arrayFileType;
}
/**
* 디렉토리를 받아 디렉토리내 파일개수 반환
* @param dir
* @return int
* @throws IOException
* @throws SecurityException
*/
public static int getNumOfFile(String dir) throws IOException,
SecurityException {
int nNumOfFile = 0;
java.io.File filepath = new java.io.File(dir);
String[] filename = filepath.list();
if (filename != null)
nNumOfFile = filename.length;
return nNumOfFile;
}
/**
* 디렉토리내 파일/디렉토리 삭제(longDate 이전에 생성된 파일/디렉토리 삭제)
* @param dir
* @param longDate
* @throws IOException
* @throws SecurityException
*/
public static void deletAllFile(String dir, long longDate)
throws IOException, SecurityException {
checkSecure(dir);
File f = new File(dir);
if (dir == null || dir.equals(File.separator) || dir.equals(""))
throw new IOException();
if (f.exists()) {
String[] file = f.list();
File df;
for (int i = 0; i < file.length; i++) {
df = new File(dir + File.separator + file[i]);
if (longDate >= df.lastModified()) {
if (df.isDirectory())
deleteDirByFile(dir + File.separator + file[i]);
else
df.delete();
}
}
}
}
private static String execRunTime(String[] cmd) throws Exception {
Process p = Runtime.getRuntime().exec(cmd);
BufferedReader br = new BufferedReader(new InputStreamReader(p
.getInputStream()));
String strValue = br.readLine();
return strValue;
}
/**
* Windows 와 UNIX 계열구분
* @return boolean
*/
private static boolean isWinOS() {
boolean isWinOS = false;
if(ConfigData.getString("OS.NAME").equals("WINDOW")) {
isWinOS = true;
}
return isWinOS;
}
/**
* 디렉토리 유효성 체크
* @param str
* @throws IOException
*/
public static void checkSecure(String str) throws IOException {
if (str != null && str.indexOf("../") != -1)
throw new IOException();
}
/**
* Windows에서 사용하지 못하는 문자 제거(파일명 깨짐 방지를 위해)
* @param str
* @return String
*/
public static String removeToken(String str) {
return str.replaceAll("[\\/:*?<>|;]", "");
}
/**
* 이미지 파일 여부
* @param fileExtName
* @return boolean
*/
public static boolean isImage(String fileExtName){
String[] imageExt = {"image/gif","image/vnd.wap.wbmp","image/jpeg"};
for( int i=0; i< imageExt.length; i++ ){
if( fileExtName.toLowerCase().equals(imageExt[i]))
return true;
}
return false;
}
/**
* 엑셀다운로드
* @param res
* @param strContent
* @param strDownFileName
* @throws IOException
*/
public static void downloadContentExcel(HttpServletResponse res,
String strContent, String strDownFileName) throws IOException {
checkSecure(strDownFileName);
ServletOutputStream out = null;
try {
strDownFileName = removeToken(strDownFileName);
out = res.getOutputStream();
res.setHeader("Content-Disposition", "attachment; filename="+ strDownFileName + ";");
res.setContentType("application/vnd.ms-excel");
out.println(CharConversion.K2E(strContent));
} finally {
try {
out.close();
} catch (Exception e) {
}
}
}
}
<file_sep>/EDMS3/WebContent/js/popup/registFavoriteFolderWindow.js
var favoriteFolderWindow = {
wrapperClass : "favorite_register_wrapper",
layerClass : "favorite_register",
callback : null,
target_folder_id : "",
folder_nm : "",
folder_id : "",
type : "",
is_virtual : "",
// 폼 초기화(생성 / 수정)
initForm : function(node, folderId) {
if (folderId === undefined) {
// 등록시
this.target_folder_id = node.id;
this.is_virtual = "Y";
} else if (folderId !== undefined) {
// 이름변경시
$("#fav_fol_name").val(node.text);
this.folder_id = node.id;
}
},
// 화면 Event 처리
event : {
// 폴더 등록/수정
submit : function() {
var postdata = {
folder_id : favoriteFolderWindow.folder_id,
target_folder_id : favoriteFolderWindow.target_folder_id,
folder_nm : $("#fav_fol_name").val(),
is_virtual : favoriteFolderWindow.is_virtual,
type : favoriteFolderWindow.type
}
exsoft.util.ajax.ajaxDataFunctionWithCallback(postdata, exsoft.contextRoot+"/folder/favoriteControl.do", "", function(data) {
exsoft.util.layout.divLayerClose(favoriteFolderWindow.wrapperClass, favoriteFolderWindow.layerClass);
$("#fav_fol_name").val("");
favoriteFolderWindow.callback();
});
}
},
//취소버튼 클릭시
cancelButtonClick : function() {
exsoft.util.layout.divLayerClose(favoriteFolderWindow.wrapperClass, favoriteFolderWindow.layerClass);
}
}<file_sep>/EDMS3/src/kr/co/exsoft/common/vo/ConfVO.java
package kr.co.exsoft.common.vo;
import kr.co.exsoft.eframework.configuration.Constant;
/**
* 시스템환경설정 VO
* @author <NAME>
* @since 2015.02.24
* @version 3.0
*
*/
public class ConfVO {
// 테이블 객체(XR_SYSCONFIG)
// 해당 테이블은 기존(XR_URL_CONFIG/XR_FILE_CONFIG/XR_VERSION_CONFIG)를 대체한다.
private String skey; // 키
private String sval; // 값
private String stype; // 구분(FILE-파일관리 || VERSION-버전관리 || URL-URL복사유통기간)
private String is_use; // 사용유무
public ConfVO() {
this.skey = "";
this.sval = "";
this.stype = "";
this.is_use = Constant.YES;
}
public String getSkey() {
return skey;
}
public void setSkey(String skey) {
this.skey = skey;
}
public String getSval() {
return sval;
}
public void setSval(String sval) {
this.sval = sval;
}
public String getStype() {
return stype;
}
public void setStype(String stype) {
this.stype = stype;
}
public String getIs_use() {
return is_use;
}
public void setIs_use(String is_use) {
this.is_use = is_use;
}
}
<file_sep>/EDMS3/src/kr/co/exsoft/eframework/util/CommonUtil.java
package kr.co.exsoft.eframework.util;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.lang.reflect.Method;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import net.sf.json.JSONArray;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.ui.Model;
import org.apache.commons.collections.map.CaseInsensitiveMap;
import kr.co.exsoft.document.service.DocumentService;
import kr.co.exsoft.document.vo.AttrVO;
import kr.co.exsoft.eframework.configuration.Constant;
import kr.co.exsoft.eframework.vo.VO;
import kr.co.exsoft.folder.service.FolderService;
import kr.co.exsoft.permission.vo.AclExItemVO;
import kr.co.exsoft.permission.vo.AclItemListVO;
import kr.co.exsoft.permission.vo.AclItemVO;
import kr.co.exsoft.process.vo.ProcessExecutorVO;
import kr.co.exsoft.common.service.CacheService;
import kr.co.exsoft.common.vo.DocumentHtVO;
import kr.co.exsoft.common.vo.MenuAuthVO;
import kr.co.exsoft.common.vo.SessionVO;
import kr.co.exsoft.common.vo.HistoryVO;
/**
* Common Utility
* @author <NAME>
* @since 2014.07.15
* @version 3.0
*
*/
public class CommonUtil {
protected static final Log logger = LogFactory.getLog(CommonUtil.class);
/**
* StackTrace 반환.
* @param e
* @return String
*/
public static String getPrintStackTrace(Exception e) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
e.printStackTrace(new PrintStream(out));
return out.toString();
}
/**
* 오늘 날짜로 폴더 경로를 생성한다.
* @param rootFolder
* @return String
*/
public static String getContentPathByDate(String rootFolder) {
int year = Calendar.getInstance().get(Calendar.YEAR);
int month = Calendar.getInstance().get(Calendar.MONTH) + 1;
int date = Calendar.getInstance().get(Calendar.DATE);
int hour = Calendar.getInstance().get(Calendar.HOUR_OF_DAY);
String dirPath = String.format("%04d%s%02d%s%02d%s%02d", year, "/", month, "/", date, "/", hour);
String folder = rootFolder + "/" + dirPath + "/";
return folder;
}
/**
* 새 버전 값을 얻는다.
* @param oldVersion
* @param version_type
* @return String
*/
public static String getUpVersion(String oldVersion, String version_type) {
logger.debug("oldVersion = " + oldVersion);
logger.debug("version_type = " + version_type);
String[] versions = StringUtil.split2Array(oldVersion, ".", false);
String majorVersion = versions[versions.length - 2];
String minorVersion = versions[versions.length - 1];
String stemVersion = oldVersion.substring(0, oldVersion.length() - majorVersion.length() - minorVersion.length() - 1);
String newVersion = "";
if (version_type.equals(Constant.VERSION_MAJOR_VERSION)) {
newVersion = stemVersion + (Integer.parseInt(majorVersion) + Integer.parseInt(Constant.MAJOR_VERSION_UP)) + ".0";
} else if (version_type.equals(Constant.VERSION_MINOR_VERSION)) {
newVersion = stemVersion + majorVersion + "." + (Integer.parseInt(minorVersion) + Integer.parseInt(Constant.MINOR_VERSION_UP));
}
logger.debug("Return value=" + newVersion);
return newVersion;
}
/**
* 기간 구하기 (날짜 기준)
* @param term
* @param search_conditions
* @return HashMap
*/
public static HashMap<String, Object> getRecentTerm(int term, HashMap<String, Object> search_conditions) {
SimpleDateFormat sf = new SimpleDateFormat("yyyy-MM-dd");
Calendar cal = Calendar.getInstance();
// start date setting
cal.add ( Calendar.DATE, term );
Date currentDate = cal.getTime();
// end date setting
cal = Calendar.getInstance();
cal.add ( Calendar.DATE, +1 );
Date toDate = cal.getTime();
search_conditions.put(Constant.DATE_START, sf.format(currentDate));
search_conditions.put(Constant.DATE_END, sf.format(toDate));
return search_conditions;
}
/**
* Response Header 세팅.
* @param fileName
* @param fileSize
* @param request
* @param response
* @throws Exception
*/
public static void setResponseHeader(String fileName, long fileSize, HttpServletRequest request,
HttpServletResponse response) throws Exception {
// 1. Content Type 세팅.
String mimetype = request.getSession().getServletContext().getMimeType(fileName);
if (mimetype == null || mimetype.length() == 0) {
response.setContentType("application/octet-stream; charset=utf-8");
}else {
response.setContentType(mimetype + "; charset=utf-8");
}
// 2. Content Disposition 세팅.
response.setHeader("Content-Disposition", "attachment; filename="+ CharConversion.K2E(fileName) + ";");
// 3. Content Length 세팅.
if (fileSize > 0) {
response.setHeader("Content-Length", "" + fileSize);
}
response.flushBuffer();
}
/**
*
* <pre>
* 1. 개요 :총 페이지수 구하기
* 2. 처리내용 :
* </pre>
* @Method Name : getTotPageSize
* @param nTotLineNum 총 라인수
* @param nMaxListLine 한페이지에 보여지는 최대 라인수
* @return
*/
public static int getTotPageSize(int nTotLineNum,int nMaxListLine) {
int nTotPageSize = 0;
if (nTotLineNum == 0) // 총라인수
nTotPageSize = 1;
else if ((nTotLineNum % nMaxListLine) != 0)
nTotPageSize = nTotLineNum / nMaxListLine + 1;
else
nTotPageSize = nTotLineNum / nMaxListLine;
return nTotPageSize;
}
/**
*
* <pre>
* 1. 개용 : Grid 페이지번호 구하기
* 2. 처리내용 :
* </pre>
* @Method Name : getPage
* @param map
* @return int
*/
public static int getPage(HashMap<String,Object> map) {
String is_search = "";
String page_init = "";
int ret = 0;
is_search = map.get("is_search") != null ? map.get("is_search").toString() : Constant.RESULT_FALSE;
page_init = map.get("page_init") != null ? map.get("page_init").toString() : Constant.RESULT_FALSE;
if(is_search.equals(Constant.RESULT_TRUE) || page_init.equals(Constant.RESULT_TRUE)) {
ret = 1;
}else {
ret = map.get("page") != null ? Integer.parseInt(map.get("page").toString()) : 1 ;
}
return ret;
}
public static String getPageSize(HashMap<String,Object> map,SessionVO sessionVO) {
String ret = "";
if(map.get("rows") != null) {
if(map.get("rows").toString().equals(sessionVO.getSessPage_size())) {
// 세션 변경없을 경우
ret = map.get("rows").toString();
}else {
// 세션 변경시
ret =sessionVO.getSessPage_size();
}
}else {
ret = sessionVO.getSessPage_size();
}
return ret;
}
/**
*
* <pre>
* 1. 개용 : 문서, 파일, 폴더 pk값 구하기
* 2. 처리내용 :
* </pre>
* @Method Name : getStringID
* @param prefix
* @param id
* @return String
*/
public static String getStringID(String prefix, int id) {
return prefix + String.format("%012d", id);
}
/**
*
* <pre>
* 1. 개용 : 폴더/권한/문서유형 객체 생성하기
* 2. 처리내용 :
* </pre>
* @Method Name : setHistoryVO
* @param history_seq
* @param target_id
* @param action_id
* @param target_type
* @param sessionVO
* @return HistoryVO
*/
public static HistoryVO setHistoryVO(long history_seq,String target_id,String action_id,String target_type,SessionVO sessionVO) {
HistoryVO historyVO = new HistoryVO();
historyVO.setHistory_seq(history_seq);
historyVO.setActor_id(sessionVO.getSessId());
historyVO.setActor_nm(sessionVO.getSessName());
historyVO.setGroup_id(sessionVO.getSessGroup_id());
historyVO.setGroup_nm(sessionVO.getSessGroup_nm());
historyVO.setAction_id(action_id);
historyVO.setTarget_id(target_id);
historyVO.setTarget_type(target_type);
historyVO.setAction_place(Constant.ACTION_PLACE);
return historyVO;
}
/**
*
* <pre>
* 1. 개용 : 문서유형 이력 객체 생성하기
* 2. 처리내용 :
* </pre>
* @Method Name : setDocumentHistory
* @param doc_seq
* @param root_id
* @param target_id
* @param action_id
* @param type_id
* @param doc_name
* @param version_no
* @param sessionVO
* @return DocumentHtVO
*/
public static DocumentHtVO setDocumentHistory(long doc_seq,String root_id,String target_id,String action_id,
String type_id,String doc_name,String version_no,SessionVO sessionVO) {
DocumentHtVO vo = new DocumentHtVO();
vo.setDoc_seq(doc_seq);
vo.setRoot_id(root_id);
vo.setAction_id(action_id);
vo.setTarget_id(target_id);
vo.setType_id(type_id);
vo.setDoc_name(doc_name);
vo.setVersion_no(version_no);
vo.setActor_id(sessionVO.getSessId());
vo.setActor_nm(sessionVO.getSessName());
vo.setGroup_id(sessionVO.getSessGroup_id());
vo.setGroup_nm(sessionVO.getSessGroup_nm());
vo.setConnect_ip(sessionVO.getSessRemoteIp());
vo.setAction_place(Constant.ACTION_PLACE);
return vo;
}
/**
*
* <pre>
* 1. 개용 : 문서유형 이력 객체 생성하기 : 배치프로그램
* 2. 처리내용 :
* </pre>
* @Method Name : setDocumentHistory
* @param doc_seq
* @param root_id
* @param target_id
* @param action_id
* @param type_id
* @param doc_name
* @param version_no
* @param systemUser : 시스템관리자 정보
* @return DocumentHtVO
*/
public static DocumentHtVO setDocumentHistory(long doc_seq,String root_id,String target_id,String action_id,
String type_id,String doc_name,String version_no,HashMap<String,Object> systemUser) {
DocumentHtVO vo = new DocumentHtVO();
vo.setDoc_seq(doc_seq);
vo.setRoot_id(root_id);
vo.setAction_id(action_id);
vo.setTarget_id(target_id);
vo.setType_id(type_id);
vo.setDoc_name(doc_name);
vo.setVersion_no(version_no);
vo.setActor_id(systemUser.get("user_id").toString());
vo.setActor_nm(systemUser.get("user_name").toString());
vo.setGroup_id(systemUser.get("group_id").toString());
vo.setGroup_nm(systemUser.get("group_name").toString());
vo.setConnect_ip(Constant.BATCH_IP);
vo.setAction_place(Constant.ACTION_PLACE);
return vo;
}
/**
*
* <pre>
* 1. 개용 : 문서유형 이력 객체 생성하기 :: 문서이동/소유권변경에 해당함
* 2. 처리내용 :
* </pre>
* @Method Name : setDocumentHistory
* @param doc_seq
* @param root_id
* @param target_id
* @param action_id
* @param type_id
* @param doc_name
* @param version_no
* @param targetMap
* @param sessionVO
* @return DocumentHtVO
*/
public static DocumentHtVO setDocumentHistory(long doc_seq,String root_id,String target_id,String action_id,
String type_id,String doc_name,String version_no,HashMap<String,Object> targetMap,SessionVO sessionVO) {
DocumentHtVO vo = new DocumentHtVO();
vo.setDoc_seq(doc_seq);
vo.setRoot_id(root_id);
vo.setAction_id(action_id);
vo.setTarget_id(target_id);
vo.setType_id(type_id);
vo.setDoc_name(doc_name);
vo.setVersion_no(version_no);
vo.setActor_id(sessionVO.getSessId());
vo.setActor_nm(sessionVO.getSessName());
vo.setGroup_id(sessionVO.getSessGroup_id());
vo.setGroup_nm(sessionVO.getSessGroup_nm());
vo.setConnect_ip(sessionVO.getSessRemoteIp());
vo.setAction_place(Constant.ACTION_PLACE);
if(targetMap != null && (
action_id.equals(Constant.ACTION_MOVE) || action_id.equals(Constant.ACTION_CHANGE_CREATOR) )) {
vo.setBefore_id(targetMap.get("before_id").toString());
vo.setBefore_nm(targetMap.get("before_nm").toString());
vo.setAfter_id(targetMap.get("after_id").toString());
vo.setAfter_nm(targetMap.get("after_nm").toString());
}
return vo;
}
/**
*
* <pre>
* 1. 개요 : 리소스 ID의 Prefix를 주어진 Prefix로 바꾸어 반환한다.
* 2. 처리내용 :
* </pre>
*
* @Method Name : getChangedResourceIDByPrefix
* @param resource_id
* @param prefix
* @return
*/
public static String getChangedResourceIDByPrefix(String resource_id, String prefix) {
if (resource_id == null)
return null;
else
return prefix + resource_id.substring(3);
}
/**
*
* <pre>
* 1. 개용 : AclItemVO을 이용하여 AclItemListVO객체 조합
* 2. 처리내용 : XR_ACLITEM에서 존재하는 폴더 및 문서에 대한 2개의 권한을 하나로 합칩
* </pre>
* @Method Name : getAclItemListVOFromAclItemVO
* @param folderACLItemVO
* @param documentAclItemVO
* @return AclItemListVO
*/
public static AclItemListVO getAclItemListVOFromAclItemVO(AclItemVO folderACLItemVO, AclItemVO documentAclItemVO) {
AclItemListVO aclItemListVO = new AclItemListVO();
//공통권한 셋팅
aclItemListVO.setAcl_id(!StringUtil.isEmpty(folderACLItemVO.getAcl_id()) ? folderACLItemVO.getAcl_id() : documentAclItemVO.getAcl_id());
aclItemListVO.setAccessor_id(!StringUtil.isEmpty(folderACLItemVO.getAccessor_id()) ? folderACLItemVO.getAccessor_id() : documentAclItemVO.getAccessor_id());
aclItemListVO.setAccessor_name(!StringUtil.isEmpty(folderACLItemVO.getAccessor_name()) ? folderACLItemVO.getAccessor_name() : documentAclItemVO.getAccessor_name());
aclItemListVO.setAccessor_isgroup(!StringUtil.isEmpty(folderACLItemVO.getAccessor_isgroup()) ? folderACLItemVO.getAccessor_isgroup() : documentAclItemVO.getAccessor_isgroup() );
aclItemListVO.setAccessor_isalias(!StringUtil.isEmpty(folderACLItemVO.getAccessor_isalias()) ? folderACLItemVO.getAccessor_isalias() : documentAclItemVO.getAccessor_isalias());
//폴더권한 셋팅
aclItemListVO.setFol_act_browse(folderACLItemVO.getAct_browse());
aclItemListVO.setFol_act_read(folderACLItemVO.getAct_read());
aclItemListVO.setFol_act_update(folderACLItemVO.getAct_update());
aclItemListVO.setFol_act_delete(folderACLItemVO.getAct_delete());
aclItemListVO.setFol_act_create(folderACLItemVO.getAct_create());
aclItemListVO.setFol_act_change_permission(folderACLItemVO.getAct_change_permission());
//문서권한 셋팅
aclItemListVO.setDoc_act_browse(documentAclItemVO.getAct_browse());
aclItemListVO.setDoc_act_read(documentAclItemVO.getAct_read());
aclItemListVO.setDoc_act_update(documentAclItemVO.getAct_update());
aclItemListVO.setDoc_act_delete(documentAclItemVO.getAct_delete());
aclItemListVO.setDoc_act_create(documentAclItemVO.getAct_create());
aclItemListVO.setDoc_act_cancel_checkout(documentAclItemVO.getAct_cancel_checkout());
aclItemListVO.setDoc_act_change_permission(documentAclItemVO.getAct_change_permission());
//문서 및 폴더 기본권한 값 셋팅
if(aclItemListVO.getFol_act_delete().equals(Constant.T))
aclItemListVO.setFol_default_acl(Constant.ACL_DELETE);
else if(aclItemListVO.getFol_act_update().equals(Constant.T))
aclItemListVO.setFol_default_acl(Constant.ACL_UPDATE);
else if(aclItemListVO.getFol_act_read().equals(Constant.T))
aclItemListVO.setFol_default_acl(Constant.ACL_READ);
else if(aclItemListVO.getFol_act_browse().equals(Constant.T))
aclItemListVO.setFol_default_acl(Constant.ACL_BROWSE);
if(aclItemListVO.getDoc_act_delete().equals(Constant.T))
aclItemListVO.setDoc_default_acl(Constant.ACL_DELETE);
else if(aclItemListVO.getDoc_act_update().equals(Constant.T))
aclItemListVO.setDoc_default_acl(Constant.ACL_UPDATE);
else if(aclItemListVO.getDoc_act_read().equals(Constant.T))
aclItemListVO.setDoc_default_acl(Constant.ACL_READ);
else if(aclItemListVO.getDoc_act_browse().equals(Constant.T))
aclItemListVO.setDoc_default_acl(Constant.ACL_BROWSE);
else
aclItemListVO.setDoc_default_acl(Constant.ACL_NONE);
return aclItemListVO;
}
/**
*
* <pre>
* 1. 개용 : AclItemListVO를 이용하여 AclItemVO를 만듬
* 2. 처리내용 : 즉, AclItemListVO에 폴더, 문서권한을 AclItemVO로 분리하는 작업
* </pre>
* @Method Name : aclWriteValid
* @param map
* @return
* @throws Exception List<AclItemVO>
*/
public static List<AclItemVO> getAclItemVOFromAclItemListVO(String aclItemListArrayList, String acl_id) throws Exception{
List<AclItemVO> aclItemList = new ArrayList<AclItemVO>();
if(StringUtil.isEmpty(aclItemListArrayList)){
throw new Exception("common.required.error");
}
// JsonArray 객체 생성하기
JSONArray jsonArray = JSONArray.fromObject(aclItemListArrayList);
if(jsonArray.size() > 0 ) {
for(int j=0; j<jsonArray.size(); j++){
AclItemVO folderAclItemVo = new AclItemVO();
AclItemVO documentAclItemVo = new AclItemVO();
//folder 권한을 매핑
folderAclItemVo.setAcl_id(acl_id); // PK
folderAclItemVo.setIs_type("F"); // PK
folderAclItemVo.setAccessor_id(jsonArray.getJSONObject(j).getString("accessor_id")); // PK
folderAclItemVo.setAccessor_isgroup(jsonArray.getJSONObject(j).getString("accessor_isgroup"));
folderAclItemVo.setAccessor_isalias(jsonArray.getJSONObject(j).getString("accessor_isalias"));
folderAclItemVo.setAct_create(jsonArray.getJSONObject(j).getString("fol_act_create"));
folderAclItemVo.setAct_cancel_checkout(Constant.F);
folderAclItemVo.setAct_change_permission(jsonArray.getJSONObject(j).getString("fol_act_change_permission"));
folderAclItemVo = setAclItemBRCD(folderAclItemVo, jsonArray.getJSONObject(j).getString("fol_default_acl"));
//document 권한을 매핑
documentAclItemVo.setAcl_id(acl_id); // PK
documentAclItemVo.setIs_type("D"); // PK
documentAclItemVo.setAccessor_id(jsonArray.getJSONObject(j).getString("accessor_id")); // PK
documentAclItemVo.setAccessor_isgroup(jsonArray.getJSONObject(j).getString("accessor_isgroup"));
documentAclItemVo.setAccessor_isalias(jsonArray.getJSONObject(j).getString("accessor_isalias"));
documentAclItemVo.setAct_create(jsonArray.getJSONObject(j).getString("doc_act_create"));
documentAclItemVo.setAct_cancel_checkout(jsonArray.getJSONObject(j).getString("doc_act_cancel_checkout"));
documentAclItemVo.setAct_change_permission(jsonArray.getJSONObject(j).getString("doc_act_change_permission"));
documentAclItemVo = setAclItemBRCD(documentAclItemVo, jsonArray.getJSONObject(j).getString("doc_default_acl"));
aclItemList.add(folderAclItemVo);
aclItemList.add(documentAclItemVo);
}
}
return aclItemList;
}
public static AclItemVO setAclItemBRCD(AclItemVO aclItemVO, String acl_brcd){
// only JDK1.7
switch (acl_brcd) {
case Constant.ACL_DELETE:
aclItemVO.setAct_browse(Constant.T);aclItemVO.setAct_read(Constant.T);
aclItemVO.setAct_update(Constant.T);aclItemVO.setAct_delete(Constant.T);
break;
case Constant.ACL_UPDATE:
aclItemVO.setAct_browse(Constant.T);aclItemVO.setAct_read(Constant.T);
aclItemVO.setAct_update(Constant.T);aclItemVO.setAct_delete(Constant.F);
break;
case Constant.ACL_READ:
aclItemVO.setAct_browse(Constant.T);aclItemVO.setAct_read(Constant.T);
aclItemVO.setAct_update(Constant.F);aclItemVO.setAct_delete(Constant.F);
break;
case Constant.ACL_BROWSE:
aclItemVO.setAct_browse(Constant.T);aclItemVO.setAct_read(Constant.F);
aclItemVO.setAct_update(Constant.F);aclItemVO.setAct_delete(Constant.F);
break;
default:
break;
}
return aclItemVO;
}
/**
*
* <pre>
* 1. 개용 : 감사정책 기준초과자 메일 본문 생성
* 2. 처리내용 :
* </pre>
* @Method Name : getAuditReportMessage
* @param auditDate
* @param auditConfig
* @param auditList
* @return
* @throws Exception StringBuffer
*/
public static StringBuffer getAuditReportMessage(String auditDate, Map<String,Object> auditConfig, List<HashMap<String,Object>> auditList) throws Exception {
StringBuffer message = new StringBuffer();
message.append("<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">");
message.append("<html xmlns=\"http://www.w3.org/1999/xhtml\">");
message.append("<head>");
message.append("<meta http-equiv=\"Content-Type\" content=\"text/html; charset=UTF-8\" />");
message.append("<title>Audit Report, " + auditDate + "</title>");
message.append("<style type=\"text/css\">");
message.append("html, body {");
message.append(" height:100%;");
message.append(" background-color:#ffffff;");
message.append("}");
message.append("body {");
message.append(" margin:0;");
message.append(" padding:0;");
message.append(" font-size: 12px;");
message.append(" color:#666666;");
message.append(" font-family:Gulim, GulimChe;");
message.append(" text-decoration: none;");
message.append(" scrollbar-highlight-color: #e7e7e7;");
message.append(" scrollbar-shadow-color: #e7e7e7;");
message.append(" scrollbar-arrow-color: #6591c2;");
message.append(" scrollbar-face-color: #FFFFFF;");
message.append(" scrollbar-3dlight-color: #FFFFFF;");
message.append(" scrollbar-darkshadow-color: #FFFFFF;");
message.append(" scrollbar-track-color: #FFFFFF;");
message.append("}");
message.append("h1, h2, h3 {");
message.append(" color:white;");
message.append("}");
message.append("h4 {");
message.append(" font-size:14px;");
message.append(" margin:10px 0 10px 0;");
message.append("}");
message.append("</style>");
message.append("</head>");
message.append("<body>");
message.append("<table width=\"500\" border=\"0\" cellspacing=\"0\" cellpadding=\"0\" style=\"border:1px solid #6C97C8;\">");
message.append(" <tr>");
message.append(" <td colspan=\"2\" style=\"background-color:#6C97C8; border-top:10px solid #333;\"><table border=\"0\" cellspacing=\"0\" cellpadding=\"0\" style=\"margin-top:5px;\">");
message.append(" <tr>");
message.append(" <td> </td>");
message.append(" <td style=\"padding:10px\" <h3>[LG Chem EDMS] Audit Report</h3></td>");
message.append(" </tr>");
message.append(" </table></td>");
message.append(" </tr>");
message.append(" <tr>");
message.append(" <td style=\"padding:15px;background:#EBF4FF;\" colspan=\"2\" align=\"center\" valign=\"middle\"><table width=\"500\" border=\"0\" cellspacing=\"0\" cellpadding=\"0\" style=\"background:#fff; border:5px solid #fff;\">");
message.append(" <tr>");
message.append(" <td> </td>");
message.append(" <td align=\"left\"><h4>Audit Trail</h4></td>");
message.append(" </tr>");
message.append(" <tr>");
message.append(" <td> </td>");
message.append(" <td align=\"left\">Audit date : " + auditDate + "</td>");
message.append(" </tr>");
message.append(" <tr>");
message.append(" <td> </td>");
message.append(" <td align=\"left\">Audit condition : Read count (" + auditConfig.get("read_count_threshold") + " counts / 1 day)</td>");
message.append(" </tr>");
message.append(" </table></td>");
message.append(" </tr>");
int i = 0;
for (HashMap<String,Object> audit : auditList) {
// 새로운 행의 시작의 경우.
if (i % 2 == 0) {
message.append(" <tr>");
message.append(" <td colspan=\"2\" height=\"30\"> </td>");
message.append(" </tr>");
message.append(" <tr>");
message.append(" <td align=\"left\" valign=\"top\" style=\"padding-left:15;\"><table border=\"1\" cellpadding=\"4\" cellspacing=\"0\" style=\"border-collapse: collapse; border:1px solid #b6c0df; margin-left:20px;\">");
message.append(" <tr>");
message.append(" <td width=\"15\" rowspan=\"4\" align=\"center\" style=\"background:#6C97C8; color:#fff;\" > " + (i + 1) + " </td>");
message.append(" <td width=\"70\" align=\"left\" style=\"border-right-style:dashed;\" >Name</td>");
message.append(" <td width=\"240\" align=\"left\">" + audit.get("user_name") + "</td>");
message.append(" </tr>");
message.append(" <tr>");
message.append(" <td align=\"left\" style=\"border-right-style:dashed;\" >ID</td>");
message.append(" <td align=\"left\" >" + audit.get("user_id") + "</td>");
message.append(" </tr>");
message.append(" <tr>");
message.append(" <td align=\"left\" style=\"border-right-style:dashed;\" >Department</td>");
message.append(" <td align=\"left\" >" + audit.get("group_name") + "</td>");
message.append(" </tr>");
message.append(" <tr>");
message.append(" <td align=\"left\" style=\"border-right-style:dashed;\" >Read count</td>");
message.append(" <td align=\"left\" >" + audit.get("read_count") + "</td>");
message.append(" </tr>");
message.append(" </table></td>");
}
// 두번째 열의 경우.
else {
message.append(" <td align=\"left\" valign=\"top\" style=\"padding-left:30;\"><table border=\"1\" cellpadding=\"4\" cellspacing=\"0\" style=\"border-collapse:collapse; border:1px solid #b6c0df;\">");
message.append(" <tr>");
message.append(" <td width=\"15\" rowspan=\"4\" align=\"center\" style=\"background:#6C97C8; color:#fff;\" > " + (i + 1) + " </td>");
message.append(" <td width=\"70\" align=\"left\" style=\"border-right-style:dashed;\" >Name</td>");
message.append(" <td width=\"240\" align=\"left\">" + audit.get("user_name") + "</td>");
message.append(" </tr>");
message.append(" <tr>");
message.append(" <td align=\"left\" style=\"border-right-style:dashed;\" >ID</td>");
message.append(" <td align=\"left\" >" + audit.get("user_id") + "</td>");
message.append(" </tr>");
message.append(" <tr>");
message.append(" <td align=\"left\" style=\"border-right-style:dashed;\" >Department</td>");
message.append(" <td align=\"left\" >" + audit.get("group_name")+ "</td>");
message.append(" </tr>");
message.append(" <tr>");
message.append(" <td align=\"left\" style=\"border-right-style:dashed;\" >Read count</td>");
message.append(" <td align=\"left\" >" + audit.get("read_count") + "</td>");
message.append(" </tr>");
message.append(" </table></td>");
message.append(" </tr>");
}
// 마지막 아이템이면서 첫번째 열의 경우.
if (i + 1 == auditList.size() && i % 2 != 0) {
message.append(" <td></td>");
}
i++;
}
message.append(" </tr>");
message.append(" <tr>");
message.append(" <td colspan=\"2\" height=\"20\"> </td>");
message.append(" </tr>");
message.append("</table>");
message.append("</body>");
message.append("</html>");
return message;
}
/**
*
* <pre>
* 1. 개용 :
* 2. 처리내용 :
* </pre>
* @Method Name : jsonArrayToList
* @param map
* @param param1
* @param param2
* @return List<String>
*/
public static List<String> jsonArrayToList(HashMap<String,Object> map,String key,String value){
List<String> ret = new ArrayList<String>();
if(map.get(key) != null && !map.get(key).toString().equals("")) {
JSONArray jsonArray = JSONArray.fromObject(map.get(key));
if(jsonArray.size() > 0 ) {
for(int j=0;j < jsonArray.size();j++) {
ret.add(jsonArray.getJSONObject(j).getString(value).toString());
}
}
}
return ret;
}
/**
*
* <pre>
* 1. 개용 : 문서 등록/수정 공통 :: 신규등록 파일 목록 생성하기
* 2. 처리내용 :
* </pre>
* @Method Name : jsonArrayToFileList
* @param jsonArray
* @return List<HashMap<String,Object>>
*/
public static List<HashMap<String,Object>> jsonArrayToFileList(HashMap<String,Object> map){
List<HashMap<String,Object>> ret = new ArrayList<HashMap<String,Object>>();
if(map.get("fileList") != null && !map.get("fileList").toString().equals("")) {
JSONArray jsonArray = JSONArray.fromObject(map.get("fileList") );
if(jsonArray.size() > 0 ) {
// try{
//
// }catch(Exception e){
// throw e;
// }
for(int j=0;j < jsonArray.size();j++) {
HashMap<String, Object> param = new HashMap<String, Object>();
param.put("orgFile",jsonArray.getJSONObject(j).getString("orgFile").toString());
param.put("contentPath",jsonArray.getJSONObject(j).getString("contentPath").toString());
param.put("fileSize",jsonArray.getJSONObject(j).getString("fileSize").toString());
param.put("volumeId",jsonArray.getJSONObject(j).getString("volumeId").toString());
ret.add(param);
}
}
}
return ret;
}
/**
*
* <pre>
* 1. 개용 : 문서수정시 삭제될 파일목록
* 2. 처리내용 :
* </pre>
* @Method Name : jsonArrayToDelFileList
* @param map
* @return List<HashMap<String,Object>>
*/
public static List<HashMap<String,Object>> jsonArrayToDelFileList(HashMap<String,Object> map){
List<HashMap<String,Object>> ret = new ArrayList<HashMap<String,Object>>();
if(map.get("dFileList") != null && !map.get("dFileList").toString().equals("")) {
JSONArray jsonArray = JSONArray.fromObject(map.get("dFileList") );
if(jsonArray.size() > 0 ) {
for(int j=0;j < jsonArray.size();j++) {
HashMap<String, Object> param = new HashMap<String, Object>();
param.put("page_id",jsonArray.getJSONObject(j).getString("page_id").toString());
ret.add(param);
}
}
}
return ret;
}
public static List<ProcessExecutorVO> jsonArrayToProcessExecutorList(HashMap<String,Object> map){
List<ProcessExecutorVO> ret = new ArrayList<ProcessExecutorVO>();
// 대표 작성자
if(map.get("authorList") != null && !map.get("authorList").toString().equals("")) {
JSONArray jsonArray = JSONArray.fromObject(map.get("authorList") );
if(jsonArray.size() > 0 ) {
for(int j=0;j < jsonArray.size();j++) {
ProcessExecutorVO author = new ProcessExecutorVO();
String sort_index = StringUtil.isEmpty(jsonArray.getJSONObject(j).getString("sort_index")) ? "0" : jsonArray.getJSONObject(j).getString("sort_index");
author.setType(Constant.PROCESS_TYPE_AUTHOR);
author.setExecutor_id(jsonArray.getJSONObject(j).getString("user_id"));
author.setExecutor_name(jsonArray.getJSONObject(j).getString("user_nm"));
author.setSort_index(Integer.parseInt(sort_index));
ret.add(author);
}
}
}
// 공동 작성자
if(map.get("coauthorList") != null && !map.get("coauthorList").toString().equals("")) {
JSONArray jsonArray = JSONArray.fromObject(map.get("coauthorList") );
if(jsonArray.size() > 0 ) {
for(int j=0;j < jsonArray.size();j++) {
ProcessExecutorVO coAuthor = new ProcessExecutorVO();
String sort_index = StringUtil.isEmpty(jsonArray.getJSONObject(j).getString("sort_index")) ? "0" : jsonArray.getJSONObject(j).getString("sort_index");
coAuthor.setType(Constant.PROCESS_TYPE_COAUTHOR);
coAuthor.setExecutor_id(jsonArray.getJSONObject(j).getString("user_id"));
coAuthor.setExecutor_name(jsonArray.getJSONObject(j).getString("user_nm"));
coAuthor.setSort_index(Integer.parseInt(sort_index));
ret.add(coAuthor);
}
}
}
// 승인자
if(map.get("approverList") != null && !map.get("approverList").toString().equals("")) {
JSONArray jsonArray = JSONArray.fromObject(map.get("approverList") );
if(jsonArray.size() > 0 ) {
for(int j=0;j < jsonArray.size();j++) {
ProcessExecutorVO applover = new ProcessExecutorVO();
String sort_index = StringUtil.isEmpty(jsonArray.getJSONObject(j).getString("sort_index")) ? "0" : jsonArray.getJSONObject(j).getString("sort_index");
applover.setType(Constant.PROCESS_TYPE_APPROVER);
applover.setExecutor_id(jsonArray.getJSONObject(j).getString("user_id"));
applover.setExecutor_name(jsonArray.getJSONObject(j).getString("user_nm"));
applover.setSort_index(Integer.parseInt(sort_index));
ret.add(applover);
}
}
}
// 수신자
if(map.get("receiverList") != null && !map.get("receiverList").toString().equals("")) {
JSONArray jsonArray = JSONArray.fromObject(map.get("receiverList") );
if(jsonArray.size() > 0 ) {
for(int j=0;j < jsonArray.size();j++) {
ProcessExecutorVO receiver = new ProcessExecutorVO();
String sort_index = StringUtil.isEmpty(jsonArray.getJSONObject(j).getString("sort_index")) ? "0" : jsonArray.getJSONObject(j).getString("sort_index");
receiver.setType(Constant.PROCESS_TYPE_RECEIVER);
receiver.setExecutor_id(jsonArray.getJSONObject(j).getString("user_id"));
receiver.setExecutor_name(jsonArray.getJSONObject(j).getString("user_nm"));
receiver.setSort_index(Integer.parseInt(sort_index));
ret.add(receiver);
}
}
}
return ret;
}
public static List<AclExItemVO> jsonArrayToExAclItemList(HashMap<String,Object> map){
List<AclExItemVO> ret = new ArrayList<AclExItemVO>();
if(map.get("aclExItem_list") != null && !map.get("aclExItem_list").toString().equals("")) {
JSONArray jsonArray = JSONArray.fromObject(map.get("aclExItem_list") );
if(jsonArray.size() > 0 ) {
for(int j=0;j < jsonArray.size();j++) {
AclExItemVO tempAclExIemVO = new AclExItemVO();
tempAclExIemVO.setAccessor_id(jsonArray.getJSONObject(j).getString("accessor_id").toString());
tempAclExIemVO.setAccessor_isgroup(jsonArray.getJSONObject(j).getString("accessor_isgroup").toString());
tempAclExIemVO.setAccessor_isalias(jsonArray.getJSONObject(j).getString("accessor_isalias").toString());
// only JDK1.7
switch (jsonArray.getJSONObject(j).getString("doc_default_acl").toString()) {
case Constant.ACL_DELETE:
tempAclExIemVO.setAct_browse(Constant.T);tempAclExIemVO.setAct_read(Constant.T);
tempAclExIemVO.setAct_update(Constant.T);tempAclExIemVO.setAct_delete(Constant.T);
break;
case Constant.ACL_UPDATE:
tempAclExIemVO.setAct_browse(Constant.T);tempAclExIemVO.setAct_read(Constant.T);
tempAclExIemVO.setAct_update(Constant.T);tempAclExIemVO.setAct_delete(Constant.F);
break;
case Constant.ACL_READ:
tempAclExIemVO.setAct_browse(Constant.T);tempAclExIemVO.setAct_read(Constant.T);
tempAclExIemVO.setAct_update(Constant.F);tempAclExIemVO.setAct_delete(Constant.F);
break;
case Constant.ACL_BROWSE:
tempAclExIemVO.setAct_browse(Constant.T);tempAclExIemVO.setAct_read(Constant.F);
tempAclExIemVO.setAct_update(Constant.F);tempAclExIemVO.setAct_delete(Constant.F);
break;
default:
tempAclExIemVO.setAct_browse(Constant.F);tempAclExIemVO.setAct_read(Constant.F);
tempAclExIemVO.setAct_update(Constant.F);tempAclExIemVO.setAct_delete(Constant.F);
break;
}
tempAclExIemVO.setAct_create(jsonArray.getJSONObject(j).getString("doc_act_create").toString());
tempAclExIemVO.setAct_cancel_checkout(jsonArray.getJSONObject(j).getString("doc_act_cancel_checkout").toString());
tempAclExIemVO.setAct_change_permission(jsonArray.getJSONObject(j).getString("doc_act_change_permission").toString());
ret.add(tempAclExIemVO);
}
}
}
return ret;
}
/**
*
* <pre>
* 1. 개용 : 파일명의 확장자 구하기
* 2. 처리내용 :
* </pre>
* @Method Name : getFileExtension
* @param file_name
* @return String
*/
public static String getFileExtension(String file_name) {
String ext = "";
int ext_index = file_name.lastIndexOf(".");
if (ext_index > 0) {
ext = file_name.substring(ext_index + 1, (file_name.length()));
if(ext.length() > 6){
ext = "";
}
}
return ext;
}
/**
*
* <pre>
* 1. 개용 : Client 에 넘겨줄 공통 세션객체 리스트
* 2. 처리내용 :
* </pre>
* @Method Name : setSessionToModel
* @param model
* @param sessionVO void
*/
public static void setSessionToModel(Model model, SessionVO sessionVO) {
model.addAttribute("contextRoot",sessionVO.getSessContextRoot());
model.addAttribute("user_id",sessionVO.getSessId());
model.addAttribute("user_name",sessionVO.getSessName());
model.addAttribute("group_id",sessionVO.getSessGroup_id());
model.addAttribute("theme",sessionVO.getSessTheme()); // 테마=SKIN
model.addAttribute("pageSize",sessionVO.getSessPage_size());
// 권한 레벨 체크(시스템관리자,전사,하위부서포함,부서,작성자)
String aclMenuPart;
if(sessionVO.getSessRole_id().equals(Constant.SYSTEM_ROLE)) {
aclMenuPart = Constant.SYSTEM_ROLE;
} else if(sessionVO.getSessRole_id().equals(Constant.USER_ROLE)) {
aclMenuPart = Constant.USER_ROLE;
} else {
aclMenuPart = CommonUtil.getMenuPart(sessionVO, Constant.USER_ACL_MENU_CODE);
}
model.addAttribute("acl_menu_part", aclMenuPart);
model.addAttribute("manage_group_id", sessionVO.getSessManage_group());
model.addAttribute("manage_group_nm", sessionVO.getSessManage_group_nm());
model.addAttribute("user_email", sessionVO.getSessEmail());
// File upload 관련
// 파일첨부 기본값 :: 환경설정에서 사용안함 옵션 적용시
model.addAttribute("defaultFileCnt",ConfigData.getInt("DOC.DEFAULT.FILECNT"));
model.addAttribute("defaultFileSize",ConfigData.getInt("DOC.DEFAULT.FILESIZE"));
model.addAttribute("defaultFileTotal",ConfigData.getInt("DOC.DEFAULT.TOTAL"));
}
/**
*
* <pre>
* 1. 개용 : 에러페이지에 넘겨줄 메세지 정의
* 2. 처리내용 :
* </pre>
* @Method Name : setErrorMsg
* @param model
* @param errorCode
* @param errorMessage void
*/
public static void setErrorMsg(Model model,String errorCode,String errorMessage,String contextRoot) {
model.addAttribute("errorCode",errorCode);
model.addAttribute("errorMessage",errorMessage);
model.addAttribute("contextRoot",contextRoot);
}
/**
* 1. 개요 : Client 에 넘겨줄 문서 객체 리스트
* 2. 처리내용 :
* @param model
* @param resultMap
* @param sessionVO
*/
public static void docSessionToModel(Model model, Map<String, Object> resultMap,SessionVO sessionVO) {
@SuppressWarnings("rawtypes")
Set set = resultMap.keySet();
Object[] argArray = set.toArray();
//[position, FILETOTAL, FILESIZE, sercurity, FILECNT, typeList, EXT, preservation_year]
for( int i = 0; i < argArray.length; i++ ){
String key = (String)argArray[i];
// 파일확장자인경우 문자열 가공 처리 후 전달함
if(key.equals(Constant.FILE_EXT)) {
CaseInsensitiveMap caseMap = (CaseInsensitiveMap)resultMap.get(key);
String value = caseMap.get("fval").toString();
caseMap.put("fval",value.substring(0,value.length()-1).replaceAll(";",","));
model.addAttribute(key,caseMap);
}else {
Object value = resultMap.get(key);
model.addAttribute(key,value);
}
}
// setSessionToModel로 이동
/* // 파일첨부 기본값 :: 환경설정에서 사용안함 옵션 적용시
model.addAttribute("defaultFileCnt",ConfigData.getInt("DOC.DEFAULT.FILECNT"));
model.addAttribute("defaultFileSize",ConfigData.getInt("DOC.DEFAULT.FILESIZE"));
model.addAttribute("defaultFileTotal",ConfigData.getInt("DOC.DEFAULT.TOTAL"));*/
model.addAttribute("defaultRefDocCnt",ConfigData.getInt("DOC.REF.FILECNT"));
// 문서검색기간 기본설정
model.addAttribute("startDt",sessionVO.getSessStartDt());
model.addAttribute("endDt",sessionVO.getSessEndDt());
model.addAttribute("pageSize",sessionVO.getSessPage_size()); // 환경설정 기본 리스트 사이즈
model.addAttribute("versionInfo",ConfigData.getString("VERSION_INFO")); // 제품타입
}
/**
* 문서상세검색 조건 CallBack 함수
* @param sessionVO
* @param req
* @param map
* @param cacheService
* @param folderService
* @param documentService
* @param param
*/
public static void docDetailSearch(SessionVO sessionVO,HttpServletRequest req,HashMap<String,Object> map,CacheService cacheService,
FolderService folderService,DocumentService documentService,HashMap<String,Object> param) {
List<String> folder_id_list = new ArrayList<String>(); // 하위폴더리스트(하위폴더 포함)
List<AttrVO> attrList = new ArrayList<AttrVO>(); // 확장문서유형 속성리스트
String[] group_id_list = sessionVO.getSessProjectGroup().toArray(new String[sessionVO.getSessProjectGroup().size()+1]); // 그룹+프로젝트 그룹 ID
String folder_menu_part = CommonUtil.getMenuPart(sessionVO, Constant.USER_FOLDER_MENU_CODE); // 폴더관리 권한(ALL/GROUP/TEAM) - CREATOR가 아닌경우
String document_menu_part = CommonUtil.getMenuPart(sessionVO, Constant.USER_DOC_MENU_CODE); // 문서관리 권한 (ALL/GROUP/TEAM) - CREATOR가 아닌경우
String folder_id = map.get("folder_id") != null ? map.get("folder_id").toString() : ""; // 선택폴더ID (내소유문서/내수정중인문서/내만기문서/휴지통 해당 사항없음)
String is_extended = map.get("is_extended") != null ? map.get("is_extended").toString() : ""; // 확장문서유형여부(T/F)
String doc_type = map.get("doc_type") != null ?map.get("doc_type").toString() : ""; // 선택한 문서유형명
String child_include = map.get("child_include") != null ?map.get("child_include").toString() : ""; // 하위폴더포함 여부
String acl_check = Constant.RESULT_TRUE; // ACL 권한 체크 여부 false 이면 skipped
boolean isFolderMenuPart = false; // 하위폴더권한 여부
try {
// 0.그룹(부서그룹/프로젝트그룹)
group_id_list[sessionVO.getSessProjectGroup().size()] = sessionVO.getSessGroup_id();
param.put("group_id_list", group_id_list);
// 1. 문서권한관리자인 경우 && 하위폴더 체크된 경우
if(!StringUtil.isEmpty(child_include) && child_include.equals("on")) {
if(document_menu_part.equals(Constant.MENU_ALL)){
// 전체문서 관리자인 경우 권한체크 안함
acl_check = Constant.RESULT_FALSE;
isFolderMenuPart = true;
}else if(document_menu_part.equals(Constant.MENU_GROUP) || document_menu_part.equals(Constant.MENU_TEAM)){
// 하위부서포함, 소속부서 관리는 관리부서 ID일 경우 acl_check 안함
if(cacheService.menuAuthByFolderID(folder_id, sessionVO.getSessManage_group())){
acl_check = Constant.RESULT_FALSE;
isFolderMenuPart = true;
}
}
folder_id_list = folderService.childFolderIdsByfolderId(folder_id, isFolderMenuPart ? folder_menu_part : "");
}
// 2.확장문서 상세 검색 처리
if(is_extended.equals(Constant.T) && !StringUtil.isEmpty(doc_type)) {
attrList = documentService.extendedAttrListByDocType(req, doc_type);
param.put("attrList", attrList); // 확장문서유형 속성리스트
param.put("tbl_name", Constant.TABLE_PREFIX + doc_type); // 확장문서유형 테이블명
}
// 3.선택폴더ID (내소유문서/내수정중인문서/내만기문서/휴지통 해당 사항없음)
if(!StringUtil.isEmpty(folder_id)) {
folder_id_list.add(folder_id);
}
param.put("folder_id_list", folder_id_list);
param.put("document_menu_part",document_menu_part);
param.put("manage_group_id", sessionVO.getSessManage_group());
param.put("acl_check",acl_check);
param.put("is_extended",is_extended);
}catch(Exception e){
logger.error(e);
}
}
public static String getMenuPart(SessionVO sessionVO, String menu_code) {
List<MenuAuthVO> menu_list = sessionVO.getSessMenuAuth();
for(MenuAuthVO menu_auth : menu_list){
if(!StringUtil.isEmpty(menu_auth.getMenu_cd()) && menu_auth.getMenu_cd().equals(menu_code)){
return menu_auth.getPart();
}
}
return "";
}
/**
*
* <pre>
* 1. 개용 : Excel Download 목록 리스트 가져오기.
* 2. 처리내용 :
* </pre>
* @Method Name : getExcelList
* @param resultMap
* @return Object
*/
public static void getExcelList(Map<String, Object> resultMap,Model model) {
Object value = null;
@SuppressWarnings("rawtypes")
Set set = resultMap.keySet();
Object[] argArray = set.toArray();
for( int i = 0; i < argArray.length; i++ ){
String key = (String)argArray[i];
if(key.equals(Constant.EXCEL_LIST)) {
value = resultMap.get(key);
model.addAttribute(key,value);
}
}
}
/**
*
* <pre>
* 1. 개용 : JfreeChart 그래프 목록 리스트 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : getChartList
* @param resultMap
* @param model void
*/
public static void getChartList(Map<String, Object> resultMap,HashMap<String,Object> model) {
Object value = null;
@SuppressWarnings("rawtypes")
Set set = resultMap.keySet();
Object[] argArray = set.toArray();
for( int i = 0; i < argArray.length; i++ ){
String key = (String)argArray[i];
if(key.equals(Constant.EXCEL_LIST)) {
value = resultMap.get(key);
model.put(key,value);
}
}
}
/**
*
* <pre>
* 1. 개용 : 주어진 VO 객체의 멤버들을 Key, Value의 형식의 HashMap으로 얻는다.
* 2. 처리내용 :
* </pre>
* @Method Name : getMemberFields
* @param vo
* @return
* @throws Exception HashMap<String,Object>
*/
public static HashMap<String, Object> getMemberFields(VO vo) throws Exception {
HashMap<String, Object> members = new HashMap<String, Object>();
if(vo != null) {
Class<? extends Object> c = vo.getClass();
Method [] methods = c.getMethods();
for (int i = 0; i < methods.length; i++) {
String methodName = methods[i].getName();
if (!methodName.equals("getClass") && methodName.subSequence(0, 3).equals("get")) {
String fieldName = methodName.substring(3);
Object fieldValue = methods[i].invoke(vo, new Object[]{});
members.put(fieldName.toLowerCase(), fieldValue);
}
}
}
return members;
}
/**
* 선택조건에 따른 검색대상 컬럼명값 설정처리
* @param inMap
* @param param
*/
public static void setColumNm(HashMap<String,Object> inMap,HashMap<String, Object> param) {
// strIndex에 따른 검색조건 추가 strKeyword1
switch(StringUtil.getMapString(inMap, "strIndex")) {
case "doc_name":
param.put("strIndexColumn","D.DOC_NAME");
break;
case "doc_description":
param.put("strIndexColumn","D.DOC_DESCRIPTION");
break;
case "creator_name":
param.put("strIndexColumn","D.CREATOR_NAME");
break;
case "author_list":
param.put("strIndexColumn","D.AUTHOR_LIST");
break;
case "keyword":
param.put("strIndexColumn","D.KEYWORD");
break;
}
}
/**
* 문서목록 엑셀다운로드 설정값 처리
* @param model
* @param listType
*/
public static void setExcelFormat(Model model,String listType) {
String[] members = null;
String[] cell_headers = null;
int[] cell_widths = null;
if(Constant.DOCUMENT_LIST_TYPE_EXPIRED.equals(listType)){
// 내 만기 문서
members = new String[]{"doc_name", "type_name", "version_no", "owner_name", "create_date", "expired_date"};
cell_headers = new String[]{"문서명", "문서유형", "버전", "소유자", "등록일", "만기일"};
cell_widths = new int[]{50, 20, 20, 20, 30, 30};
} else if(Constant.DOCUMENT_LIST_TYPE_TRASHCAN.equals(listType)) {
// 개인 휴지통 문서
members = new String[]{"doc_name","type_name","version_no","creator_name","create_date","deleter_name","delete_date","owner_name"};
cell_headers = new String[]{"문서명", "문서유형", "버전", "등록자", "등록일", "삭제자", "삭제일", "소유자"};
cell_widths = new int[]{50, 20, 20, 20, 20, 20, 20, 20};
} else {
// 그외 문서리스트(업무문서함/개인문서함)
members = new String[]{"doc_name","type_name","version_no","creator_name","create_date"};
cell_headers = new String[]{"문서명", "문서유형", "버전", "등록자", "등록일"};
cell_widths = new int[]{50, 20, 20, 20, 20};
}
model.addAttribute("members",members);
model.addAttribute("cell_headers",cell_headers);
model.addAttribute("cell_widths",cell_widths);
model.addAttribute("fileName","downLoad.xls");
}
/**
* 나의문서 목록 검색조건 설정값 처리
* @param listType
* @param param
*/
public static void setSearchColumn(String listType,HashMap<String, Object> param) {
switch(listType) {
case Constant.DOCUMENT_LIST_TYPE_TRASHCAN : // 개인휴지통(권한체크 필요없음)
param.put("dateColumn","D.DELETE_DATE");
param.put("userColumn","D.DELETER_ID");
param.put("acl_check",Constant.RESULT_FALSE);
param.put("doc_status",Constant.DOC_STATUS_DELETE);
param.put("is_expired",Constant.F);
break;
case Constant.DOCUMENT_LIST_TYPE_CHECKOUT : // 내수정중문서(권한체크 필요없음)
param.put("dateColumn","D.LOCK_DATE");
param.put("userColumn","D.LOCK_OWNER");
param.put("acl_check",Constant.RESULT_FALSE);
param.put("doc_status",Constant.DOC_STATUS_CREATE);
param.put("is_expired",Constant.F);
param.put("is_locked",Constant.T);
break;
case Constant.DOCUMENT_LIST_TYPE_EXPIRED : // 내만기문서(권한체크 필요없음)
param.put("dateColumn","D.EXPIRED_DATE");
param.put("userColumn","D.CREATOR_ID");
param.put("acl_check",Constant.RESULT_FALSE);
param.put("doc_status",Constant.DOC_STATUS_CREATE);
param.put("is_expired",Constant.T);
break;
case Constant.DOCUMENT_LIST_TYPE_OWNER : // 내소유문서(권한체크 필요없음)
param.put("dateColumn","D.CREATE_DATE");
param.put("userColumn","D.OWNER_ID");
param.put("acl_check",Constant.RESULT_FALSE);
param.put("doc_status",Constant.DOC_STATUS_CREATE);
param.put("is_expired",Constant.F);
break;
case Constant.DOCUMENT_LIST_TYPE_SHARE : // 공유문서(권한체크함)
param.put("dateColumn","D.CREATE_DATE");
param.put("doc_status",Constant.DOC_STATUS_CREATE);
param.put("is_expired",Constant.F);
param.put("is_share",Constant.T);
break;
case Constant.DOCUMENT_LIST_TYPE_TEMPDOC : // 임시문서(권한체크함)
param.put("dateColumn","D.CREATE_DATE");
param.put("userColumn","TD.USER_ID");
param.put("doc_status",Constant.DOC_STATUS_CREATE);
param.put("is_expired",Constant.F);
break;
case Constant.DOCUMENT_LIST_TYPE_RECENTLYDOC : // 최신문서함(권한체크함)
param.put("dateColumn","D.CREATE_DATE");
//param.put("userColumn","D.OWNER_ID");
param.put("doc_status",Constant.DOC_STATUS_CREATE);
param.put("is_expired",Constant.F);
break;
default :
param.put("dateColumn","D.CREATE_DATE");
param.put("doc_status",Constant.DOC_STATUS_CREATE);
param.put("is_expired",Constant.F);
break;
}
}
/**
*
* <pre>
* 1. 개용 : DB에 저장할 date type
* 2. 처리내용 : 현재 시간
* </pre>
* @Method Name : getCurruentTime
* @return java.sql.Date
*/
public static java.sql.Date getCurruentTime(){
return new java.sql.Date(new java.util.Date().getTime());
}
/**
*
* <pre>
* 1. 개용 : DB에 저장할 date type
* 2. 처리내용 : 입력된 시간
* </pre>
* @Method Name : getCurruentTimeByDate
* @param date
* @return
* @throws Exception java.sql.Date
*/
public static java.sql.Date getCurruentTimeByDate(String date) throws Exception{
DateFormat dateFormat =new SimpleDateFormat("yyyy-MM-dd");
Date currentDate = dateFormat.parse(date);
return new java.sql.Date(currentDate.getTime());
}
}
<file_sep>/EDMS3/src/kr/co/exsoft/eframework/util/Thumbnail.java
package kr.co.exsoft.eframework.util;
import java.io.File;
import java.io.IOException;
import java.awt.Graphics2D;
import java.awt.image.BufferedImage;
import java.awt.image.renderable.ParameterBlock;
import javax.imageio.ImageIO;
import javax.media.jai.JAI;
import javax.media.jai.RenderedOp;
/**
* Thumbnail 클래스 - jai_core.jar include
* @author 패키지 개발팀
* @since 2014.07.22
* @version 3.0
*
*/
public class Thumbnail {
/**
* 확장자 고정(JPG) 썸네일 생성
* @param url - 이미지 파일 디렉토리 절대경로
* @param filename - 이미지 파일명
* @param reFile - 변경된 이미지 파일명
* @param width - 썸네일 가로 길이
* @param height - 썸네일 세로 길이
* @return boolean
*/
public static boolean thumbnailMake(String url,String filename,String reFile,int width,int height) {
boolean check = true;
try {
ParameterBlock pb = new ParameterBlock();
pb.add(url + File.separator + filename);
RenderedOp rop = JAI.create("fileload", pb);
if(rop.getWidth() < width) {
width = rop.getWidth();
}
if(rop.getHeight() < height) {
height = rop.getHeight();
}
BufferedImage bi = rop.getAsBufferedImage();
BufferedImage thumb = new BufferedImage(width,height,BufferedImage.TYPE_INT_RGB);
Graphics2D g = thumb.createGraphics();
g.drawImage(bi,0,0,width,height,null);
g.dispose();
File file = new File(url+File.separator+reFile);
ImageIO.write(thumb, "jpg", file);
}catch(IOException e) {
check = false;
}
return check;
}
/**
* 확장자 유동 썸네일 생성
* @param url - 이미지 파일 디렉토리 절대경로
* @param filename - 이미지 파일명
* @param reFile - 변경된 이미지 파일명
* @param extension - 이미지 확장자
* @param width - 썸네일 가로 길이
* @param height - 썸네일 세로 길이
* @return boolean
*/
public static boolean makeThumbnail(String url, String filename, String reFile, String extension, int width, int height) {
boolean check = true;
try {
ParameterBlock pb = new ParameterBlock();
pb.add(url + File.separator + filename);
RenderedOp rop = JAI.create("fileload", pb);
if(rop.getWidth() < width) {
width = rop.getWidth();
}
if(rop.getHeight() < height) {
height = rop.getHeight();
}
BufferedImage bi = rop.getAsBufferedImage();
BufferedImage thumb = new BufferedImage(width,height,BufferedImage.TYPE_INT_RGB);
Graphics2D g = thumb.createGraphics();
g.drawImage(bi,0,0,width,height,null);
g.dispose();
File file = new File(url+File.separator+reFile);
ImageIO.write(thumb, extension, file);
}catch(IOException e) {
check = false;
}
return check;
}
}
<file_sep>/EDMS3/src/kr/co/exsoft/eframework/library/ExcelFileView.java
package kr.co.exsoft.eframework.library;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import kr.co.exsoft.eframework.util.CharConversion;
import org.springframework.util.FileCopyUtils;
import org.springframework.web.servlet.view.AbstractView;
/**
* 엑셀일괄업로드 샘플 다운로드 처리
*
* @author 패키지팀
* @since 2014. 12. 2.
* @version 1.0
*
*/
public class ExcelFileView extends AbstractView {
public ExcelFileView() {
setContentType("application/octet-stream; charset=UTF-8");
}
@Override
protected void renderMergedOutputModel(@SuppressWarnings("rawtypes") Map model, HttpServletRequest request,
HttpServletResponse response) throws Exception {
String downloadFile = (String)model.get("filePath");
File file = new File(downloadFile);
response.setContentType(getContentType());
response.setContentLength((int)file.length());
response.setHeader("Content-Disposition", "attachment; fileName=\"" + CharConversion.K2E(file.getName()) + "\";");
response.setHeader("Content-Transfer-Encoding", "binary");
OutputStream out = response.getOutputStream();
FileInputStream fis = null;
try {
fis = new FileInputStream(file);
FileCopyUtils.copy(fis, out);
}catch(Exception e){
response.setHeader("Content-Disposition", "attachment; fileName=\"" + "File Not Found" + "\";");
response.setContentLength(0);
}finally {
if(fis != null) {
try {
fis.close();
} catch(IOException ex) { }
}
}
out.flush();
}
}
<file_sep>/EDMS3/WebContent/js/layout/layout.js
$(function(){
$('.quickMenu_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.quickMenu').addClass('hide');
$('.quickMenu_wrapper').addClass('hide');
});
//환경설정 > 퀵메뉴 추가 - 창 닫기 : 음영진 부분 클릭 시 닫기
$('.quickMenu_wrapper').bind("click", function(){
$(this).addClass('hide');
$('.quickMenu').addClass('hide');
});
});
/**
* Layout 관련 스크립트
*/
var exsoftLayoutFunc = {
// 사용자 환경설정
userConfigUrl : "/user/userConfig.do",
userConfigTarget : "popFrm",
// 쪽지관리
noteMainUrl : "/note/noteMain.do",
noteMainTarget : "popNoteFrm",
// 퀵메뉴관리
quickMenuCnt : 5,
// 시스템관리
userRole : "CREATOR",
adminUrl : "/adminPage.do",
adminTarget : "adminFrm",
tabHeight : {
'myinfo' : '572',
'passwdConf' : '592',
'myconfig' : '552',
},
init : {
formInit : function(formName,url,tabType,targetName){
var frm = formName;
frm.action = url;
frm.method = "post";
frm.target = targetName;
frm.tabType.value = tabType;
frm.submit();
},
// 퀵메뉴 선택 초기화
quickMenuInit : function() {
$("input[name=quickMenu]:checkbox").each(function() {
$(this).prop("checked",false);
});
},
// userTopMenu 퀵메뉴 초기화
quickTopMenuInit : function() {
exsoftLayoutFunc.event.quickMenuProc('top','header');
},
// 메인문서목록 리스트 가져오기
mainDocList : function(tableId,actionType) {
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback({"actionType":actionType}, exsoft.contextRoot + "/document/mainDocumentList.do" , "mainDocList",
function(data,e) {
if(data.result == "true"){
// 문서상세보기 링크 추가 TODD
/*******************************************************************************************
* 권한정보 : acl_level / acl_create / acl_checkoutCancel / acl_change_Permission
*******************************************************************************************/
exsoft.util.table.tablePrintMainList(tableId, data.list, false, true,true,exsoft.contextRoot);
exsoftLayoutFunc.ui.addTableNoData(tableId,3);;
}else {
exsoftLayoutFunc.ui.addTableNoData(tableId,3);
}
});
},
// 메인 받은쪽지 현황
mainNoteList : function(tableId) {
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback({"note_name":"Receive"}, exsoft.contextRoot + "/note/noteReceiveSendList.do" , "noteList",
function(data,e) {
if(data.result == "true"){
exsoft.util.table.tablePrintNoteList(tableId, data.list, false, true);
exsoftLayoutFunc.ui.addTableNoData(tableId,2);
$('#'+tableId).delegate('tr', 'click', function (event) {
exsoftLayoutFunc.open.noteMain('RECEIVE');
});
}else {
exsoftLayoutFunc.ui.addTableNoData(tableId,2);
}
});
}
},
open : {
// 새창 띄우기
openWindow : function(targetName,width,height,resizable) {
var win;
win = window.open("",targetName,"width="+width+", height="+height+", toolbar=no, menubar=no, scrollbars=no, resizable="+resizable );
win.focus(); // 새창의 경우 항상 맨위로
},
// 사용자 환경설정 새창 CALL
userConfig : function(tabType) {
exsoftLayoutFunc.open.openWindow(exsoftLayoutFunc.userConfigTarget,740,exsoftLayoutFunc.tabHeight[tabType],"no");
exsoftLayoutFunc.init.formInit(document.popFrm,exsoft.contextRoot+exsoftLayoutFunc.userConfigUrl,tabType,exsoftLayoutFunc.userConfigTarget);
},
// 쪽지관리 메인 새창 CALL
noteMain : function(tabType) {
exsoftLayoutFunc.open.openWindow(exsoftLayoutFunc.noteMainTarget,730,690,"no");
exsoftLayoutFunc.init.formInit(document.popNoteFrm,exsoft.contextRoot+exsoftLayoutFunc.noteMainUrl,tabType,exsoftLayoutFunc.noteMainTarget);
},
// 시스템관리 페이지 새창 CALL
adminUrl : function(roleId) {
if(exsoftLayoutFunc.userRole == roleId) {
jAlert('접근권한이 없습니다.','확인',0);
return false;
}
exsoftLayoutFunc.open.openWindow(exsoftLayoutFunc.adminTarget,1024,730,"yes");
exsoftLayoutFunc.init.formInit(document.adminFrm,exsoft.contextRoot+exsoftLayoutFunc.adminUrl,roleId,exsoftLayoutFunc.adminTarget);
},
// 통합검색 Window
searchDetail : function() {
if($("#searchDetailView").hasClass('hide')) {
$("#searchDetailView").removeClass('hide');
exsoft.util.common.ddslick('#strDocType', 'srch_type6', '', 80, function(){});
exsoft.util.common.formClear("#searchForm");
$("#strSdate").datepicker({dateFormat:'yy-mm-dd'});
$("#strEdate").datepicker({dateFormat:'yy-mm-dd'});
} else {
$("#searchDetailView").addClass('hide');
}
}
},
layer : {
},
close : {
searchClose : function() {
$("#searchDetailView").addClass('hide');
}
},
event : {
// 퀵메뉴 클릭시 컨텐츠 이동처리
goContent : function(mode) {
exsoft.util.layout.goUserContent(exsoft.contextRoot+'/mypage/myPageDocList.do?myMenuType='+mode);
},
// 통합검색 PAGE 이동처리
goSearch : function() {
exsoft.util.layout.goUserContent(exsoft.contextRoot+'/search/searchList.do');
},
// 퀵메뉴 Layout CALL
quickMenuConfig : function(wrapperClass,layerClass) {
exsoft.util.layout.divLayerOpen(wrapperClass,layerClass);
exsoftLayoutFunc.event.quickMenuProc('select','footer');
},
// 퀵메뉴 저장처리
quickMenuUpdate : function() {
var jsonArrIndex = 0;
// Validation
var chkLength = exsoft.util.common.checkBoxCheckedLength('quickMenu');
if(chkLength > 0 && chkLength > exsoftLayoutFunc.quickMenuCnt) {
jAlert('퀵메뉴는 최대 5개까지 선택가능합니다.','확인',0);
return false;
}
var jsonArr = [];
$("input[name='quickMenu']:checked").each(function(index,val){
var rowData = {menu_cd:""};
rowData['menu_cd'] = val.defaultValue;
jsonArr[jsonArrIndex] = rowData;
jsonArrIndex++;
});
var jsonObject = {"type":"update","menu_cd":JSON.stringify(jsonArr)}; // type:update
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback(jsonObject, exsoft.contextRoot + "/user/quickMenu.do" , "quickMenu",
function(data,e) {
if(data.result == "true"){
exsoftLayoutFunc.event.quickMenuProc('top','header'); // TOP 메뉴 변경처리 함수 CALL
}else {
jAlert('퀵메뉴 저장하는데 실패했습니다','확인',0);
exsoft.util.layout.divLayerClose('quickMenu_wrapper', 'quickMenu')
}
});
},
// 퀵메뉴 데이터 가져오기
quickMenuProc : function(type,location) {
var jsonObject = {"type":type};
var buffer = "";
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback(jsonObject, exsoft.contextRoot + "/user/quickMenu.do" , "quickMenu",
function(data,e) {
if(data.result == "true"){
if(data.quickMenuListCnt > 0) {
if(location == "footer") {
$(".quickMenu_sub_wrapper").empty();
for(var m in data.quickMenuList) {
// 기본값
buffer += "<label><input type='checkbox' name='quickMenu' value='"+data.quickMenuList[m].menu_cd+"'><span>"+data.quickMenuList[m].menu_nm+"</span></label>";
}
$(".quickMenu_sub_wrapper").append(buffer);
if(data.userSelectMenuCnt > 0) {
for(var n in data.userSelectMenu) {
$("input[name=quickMenu]:checkbox").each(function() {
if($(this).val() == data.userSelectMenu[n].menu_cd ) {
$(this).prop("checked",true);
}
});
}
}
}else if(location == "header") {
$(".quick_sub_menu").empty();
for(var m in data.quickMenuList) {
buffer += "<li><a href=\"javascript:exsoftLayoutFunc.event.goContent('"+data.quickMenuList[m].menu_cd+"')\">"+data.quickMenuList[m].menu_nm+"</a></li>"; // 기본값
}
$(".quick_sub_menu").append(buffer);
}
}else {
if(location == "header") {
$(".quick_sub_menu").empty();
}
}
}else {
jAlert('퀵메뉴 로드하는데 실패했습니다','확인',0);
exsoft.util.layout.divLayerClose('quickMenu_wrapper', 'quickMenu')
}
});
},
},
ui : {
addTableNoData : function(tableId,colspan){
if($(exsoft.util.common.getIdFormat(tableId)+' tbody').children('tr').length == 0){
$(exsoft.util.common.getIdFormat(tableId)+' tbody').append('<tr id="'+tableId+'_noData"><td colspan='+colspan+' class="nodata">데이터가 없습니다.</td></tr>');
}
},
},
callback : {
// 새쪽지, 신규문서, 승인대상문서, 업무작성중문서
infoCount : function(data, param){
if(param == "#tempDocNewCnt" || param == "#newDocCnt") { // 작업카트,최신문서
$(param).text(data.records);
}else {
$(param).text(data.count);
}
},
// 새쪽지 개수 및 목록 표시
noteInfo : function(data,param) {
$(param).text(data.count);
$("#newNoteList").empty();
var buffer = "";
var content = "";
if(data.count == 0) {
buffer += "<li><a href=\"javascript:exsoftLayoutFunc.open.noteMain('RECEIVE');\"><span class='note_title'>새 쪽지가 없습니다.</span></a></li>";
}else {
for (var n in data.list) {
// TODO :: 쪽지선택시 해당 쪽지 바로 열기 적용
if(data.list[n].content.length > 10) {
content = data.list[n].content.substring(0,10) + "...";
}else {
content = data.list[n].content;
}
buffer += "<li><a href=\"javascript:exsoftLayoutFunc.open.noteMain('RECEIVE');\">";
buffer += "<span class='sender_name'>["+data.list[n].rsender_name+"]</span><span class='note_title'>"+content+"</span></a></li>"
if(n == 4) break; // 미리목록개수 5개
}
}
$("#newNoteList").append(buffer);
}
}
}<file_sep>/EDMS3/src/kr/co/exsoft/common/controller/CommonAdminController.java
package kr.co.exsoft.common.controller;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.List;
import java.util.ArrayList;
import javax.servlet.http.HttpServletRequest;
import kr.co.exsoft.eframework.util.CommonUtil;
import kr.co.exsoft.eframework.util.ConfigData;
import kr.co.exsoft.common.service.CommonService;
import kr.co.exsoft.common.vo.SessionVO;
import kr.co.exsoft.eframework.configuration.Constant;
import kr.co.exsoft.eframework.exception.BizException;
import kr.co.exsoft.eframework.library.LocaleLibrary;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.MessageSource;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.SessionAttributes;
import org.springmodules.validation.commons.DefaultBeanValidator;
import org.springframework.validation.BindingResult;
import org.springframework.validation.ObjectError;
import org.springframework.validation.FieldError;
import kr.co.exsoft.common.vo.CodeVO;
import kr.co.exsoft.common.vo.MenuAuthVO;
/**
* 메뉴/코드 관련 관리자 컨트롤러
* @author <NAME>
* @since 2014.07.17
* @version 3.0
*
*/
@Controller
@RequestMapping("/admin")
@SessionAttributes("sessionVO")
public class CommonAdminController {
@Autowired
private MessageSource messageSource;
@Autowired
private CommonService commonService;
@Autowired
private DefaultBeanValidator beanValidator;
protected static final Log logger = LogFactory.getLog(CommonAdminController.class);
/**
*
* <pre>
* 1. 개용 : 메뉴 접근권한 관리
* 2. 처리내용 :
* </pre>
* @Method Name : userAdmin
* @param sessionVO
* @param model
* @param map
* @return String
*/
@RequestMapping("menuAuthManager.do")
public String menuAuthManager(@ModelAttribute SessionVO sessionVO,Model model,@RequestParam HashMap<String,Object> map) {
Locale locale = LocaleLibrary.setLocale(sessionVO.getSessLanguage() != null ? sessionVO.getSessLanguage() : ConfigData.getString("LANGUAGE"));
HashMap<String,Object> param = new HashMap<String,Object>();
Map<String, Object> menuInfo = new HashMap<String, Object>();
param.put("menu_cd",map.get("menu_cd") != null ? map.get("menu_cd").toString() : "" );
param.put("role_id",sessionVO.getSessRole_id());
String part = ""; // ALL/GROUP/TEAM - 관리자 페이지마다 다르게 적용처리한다.
try {
// 1. 관리자 ROLE 접근권한
part = commonService.getMenuAuth(param);
// 2. 페이지 네비게이션 :: 상위메뉴명 / 현재메뉴명
menuInfo = commonService.pageMenuInfo(param);
}catch(BizException e){
logger.error(e.getMessage());
}catch(Exception e) {
logger.error(e.getMessage());
}
// 접근권한이 없거나 메뉴코드가 없는 경우 403 ERROR 페이지 이동 처리
if(part.equals("") || param.get("menu_cd").toString().equals("")) {
CommonUtil.setErrorMsg(model, Constant.ERROR_403, messageSource.getMessage("common.connect.error",new Object[0],locale),sessionVO.getSessContextRoot());
}
// call by reference
CommonUtil.setSessionToModel(model, sessionVO);
model.addAttribute("menuInfo",menuInfo);
model.addAttribute("topSelect",Constant.TOPMENU_SYSTEM);
model.addAttribute("subSelect",Constant.SYSTEM_MENUMANAGER);
return "sysadmin/menuAuthManager";
}
/**
*
* <pre>
* 1. 개요 : 코드 목록 가져오기 - ROLE
* 2. 처리내용 :
* </pre>
* @Method Name : codePageList
* @param model
* @param sessionVO
* @param map
* @param request
* @return Map
*/
@RequestMapping(value="/codePage.do", method=RequestMethod.POST)
@ResponseBody
public Map<String,Object> codePageList(Model model, @ModelAttribute SessionVO sessionVO, @RequestParam HashMap<String,Object> map,
HttpServletRequest request) {
Locale locale = LocaleLibrary.setLocale(sessionVO.getSessLanguage() != null ? sessionVO.getSessLanguage() : ConfigData.getString("LANGUAGE"));
Map<String, Object> resultMap = new HashMap<String, Object>();
HashMap<String, Object> param = new HashMap<String, Object>();
// 입력 파라미터 유효성 체크
param.put("strKeyword",map.get("strKeyword") != null ? map.get("strKeyword") : "" );
param.put("orderCol",map.get("sidx") != null ? map.get("sidx") : "SORT_INDEX");
param.put("orderType",map.get("sord") != null ? map.get("sord") : "ASC");
param.put("page_size",map.get("rows") != null ? map.get("rows") : sessionVO.getSessPage_size());
param.put("gcode_id",map.get("gcode_id") != null ? map.get("gcode_id") : Constant.CODE_ROLE);
param.put("is_use",map.get("is_use") != null ? map.get("is_use") : Constant.YES );
// page 설정
param.put("nPage",CommonUtil.getPage(map));
try {
// param 정의 : orderCol , orderType , page_size , nPage , gcode_id , is_use
resultMap = commonService.codePageList(param);
}catch(BizException e){
resultMap.put("result",Constant.RESULT_FALSE);
resultMap.put("message",e.getMessage());
}catch(Exception e) {
resultMap.put("result",Constant.RESULT_FALSE);
resultMap.put("message",messageSource.getMessage("common.system.error",new Object[0],locale));
}
return resultMap;
}
/**
*
* <pre>
* 1. 개요 : 코드 등록/삭제/수정 처리
* 2. 처리내용 :
* </pre>
* @Method Name : codeWrite
* @param model
* @param sessionVO
* @param map
* @param request
* @return
*/
@RequestMapping(value = "/codeWrite.do", method = RequestMethod.POST)
@ResponseBody
public Map<String,Object> codeWrite(Model model, @ModelAttribute SessionVO sessionVO, @ModelAttribute("codeVO") CodeVO codeVO,
@RequestParam HashMap<String,Object> map,BindingResult bindingResult,HttpServletRequest request) {
Locale locale = LocaleLibrary.setLocale(sessionVO.getSessLanguage() != null ? sessionVO.getSessLanguage() : ConfigData.getString("LANGUAGE"));
Map<String, Object> resultMap = new HashMap<String, Object>();
try {
// 서버 파라미터 유효성 체크
beanValidator.validate(codeVO, bindingResult);
if (bindingResult.hasErrors()) {
for(ObjectError error : bindingResult.getAllErrors()) {
if(error instanceof FieldError) {
FieldError fieldError = (FieldError)error;
logger.info(error.getCode() + " : " + error.getDefaultMessage());
logger.info(error.getCode() + " : " + error.getObjectName() );
Object[] args = error.getArguments();
if(error.getCode().equals("typeMismatch")) {
resultMap.put("message",messageSource.getMessage("typeMismatch."+fieldError.getField(),args,locale));
}else {
resultMap.put("message",messageSource.getMessage(error.getDefaultMessage(),args,locale));
}
}
break;
}
resultMap.put("result",Constant.RESULT_FALSE);
}else {
// 중복값 체크
resultMap = commonService.codeManager(codeVO, map);
}
}catch(BizException e){
resultMap.put("result",Constant.RESULT_FALSE);
resultMap.put("message",e.getMessage());
}catch(Exception e) {
resultMap.put("result",Constant.RESULT_FALSE);
resultMap.put("message",messageSource.getMessage("common.system.error",new Object[0],locale));
}
return resultMap;
}
/**
*
* <pre>
* 1. 개요 : 권한 삭제 처리
* 2. 처리내용 :
* </pre>
* @Method Name : codeDelete
* @param model
* @param sessionVO
* @param codeVO
* @param map
* @param bindingResult
* @param request
* @return Map
*/
@RequestMapping(value = "/codeDelete.do", method = RequestMethod.POST)
@ResponseBody
public Map<String,Object> codeDelete(Model model, @ModelAttribute SessionVO sessionVO,
@RequestParam HashMap<String,Object> map,HttpServletRequest request) {
Locale locale = LocaleLibrary.setLocale(sessionVO.getSessLanguage() != null ? sessionVO.getSessLanguage() : ConfigData.getString("LANGUAGE"));
Map<String, Object> resultMap = new HashMap<String, Object>();
String[] arrData = map.get("inputStr") != null ? map.get("inputStr").toString().split(",") : null ;
try {
if(arrData != null) {
for(String data : arrData) {
CodeVO codeVO = new CodeVO();
codeVO.setCode_id(data);
codeVO.setGcode_id(Constant.CODE_ROLE);
resultMap = commonService.codeManager(codeVO, map);
}
}
}catch(BizException e){
resultMap.put("result",Constant.RESULT_FALSE);
resultMap.put("message",e.getMessage());
}catch(Exception e) {
resultMap.put("result",Constant.RESULT_FALSE);
resultMap.put("message",messageSource.getMessage("common.system.error",new Object[0],locale));
}
return resultMap;
}
/**
*
* <pre>
* 1. 개요 : 메뉴권한 목록 Tree 형태로 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : menuAuthList
* @param model
* @param sessionVO
* @param map
* @param request
* @return Map
*/
@RequestMapping("/menuAuth.do")
@ResponseBody
public Map<String,Object> menuAuthList(Model model, @ModelAttribute SessionVO sessionVO, @RequestParam HashMap<String,Object> map,
HttpServletRequest request) {
Locale locale = LocaleLibrary.setLocale(sessionVO.getSessLanguage() != null ? sessionVO.getSessLanguage() : ConfigData.getString("LANGUAGE"));
Map<String, Object> resultMap = new HashMap<String, Object>();
HashMap<String, Object> param = new HashMap<String, Object>();
param.put("role_id",map.get("role_id") != null ? map.get("role_id") : "" );
try {
// [APPLIANCE VERSION]
if(ConfigData.getString("VERSION_INFO") != null &&
ConfigData.getString("VERSION_INFO").equals(Constant.PRODUCT_EDMS_APPLIANCE)) {
param.put("is_appliance",Constant.T);
}
resultMap = commonService.menuAuthList(param);
}catch(BizException e){
resultMap.put("result",Constant.RESULT_FALSE);
resultMap.put("message",e.getMessage());
}catch(Exception e) {
resultMap.put("result",Constant.RESULT_FALSE);
resultMap.put("message",messageSource.getMessage("common.system.error",new Object[0],locale));
}
return resultMap;
}
/**
*
* <pre>
* 1. 개요 : 메뉴권한 등록/수정/삭제 처리
* 2. 처리내용 :
* </pre>
* @Method Name : menuAuthManager
* @param model
* @param sessionVO
* @param map
* @param bindingResult
* @param request
* @return Map
*/
@RequestMapping(value = "/menuAuthManager.do", method = RequestMethod.POST)
@ResponseBody
public Map<String,Object> menuAuthManager(Model model, @ModelAttribute SessionVO sessionVO,
@RequestParam HashMap<String,Object> map,BindingResult bindingResult,HttpServletRequest request) {
Locale locale = LocaleLibrary.setLocale(sessionVO.getSessLanguage() != null ? sessionVO.getSessLanguage() : ConfigData.getString("LANGUAGE"));
List<MenuAuthVO> menuAuthList = new ArrayList<MenuAuthVO>();
Map<String, Object> resultMap = new HashMap<String, Object>();
String[] arrData = map.get("inputStr") != null ? map.get("inputStr").toString().split(",") : null;
String type = map.get(Constant.TYPE) != null ? map.get(Constant.TYPE).toString() : "";
try {
// 처리할 메뉴 목록 가져오기.
if(type.equals(Constant.INSERT)) {
menuAuthList = commonService.setMenuAuthParam(arrData,map);
}else {
menuAuthList = commonService.setMenuAuthParam(arrData) ;
}
// 타입에 따른 메뉴권한 처리
resultMap = commonService.menuAuthManager(menuAuthList, map);
}catch(BizException e){
resultMap.put("result",Constant.RESULT_FALSE);
resultMap.put("message",e.getMessage());
}
catch(Exception e) {
resultMap.put("result",Constant.RESULT_FALSE);
resultMap.put("message",messageSource.getMessage("common.system.error",new Object[0],locale));
}
return resultMap;
}
/**
*
* <pre>
* 1. 개요 : 메뉴권한 관리 메뉴 목록 리스트 가져오기.www
* 2. 처리내용 :
* </pre>
* @Method Name : menuList
* @param model
* @param sessionVO
* @param map
* @param request
* @return Map
*/
@RequestMapping("/menuList.do")
@ResponseBody
public Map<String,Object> menuList(Model model, @ModelAttribute SessionVO sessionVO, @RequestParam HashMap<String,Object> map,
HttpServletRequest request) {
Locale locale = LocaleLibrary.setLocale(sessionVO.getSessLanguage() != null ? sessionVO.getSessLanguage() : ConfigData.getString("LANGUAGE"));
Map<String, Object> resultMap = new HashMap<String, Object>();
try {
// 검색조건없음 / 파라미터 없음 - 등록된 모믄 메뉴 한번에 보여준다.
resultMap = commonService.menuList(map);
}catch(BizException e){
resultMap.put("result",Constant.RESULT_FALSE);
resultMap.put("message",e.getMessage());
}catch(Exception e) {
resultMap.put("result",Constant.RESULT_FALSE);
resultMap.put("message",messageSource.getMessage("common.system.error",new Object[0],locale));
}
return resultMap;
}
/**
*
* <pre>
* 1. 개용 : 시스템관리 - 환경설정관리
* 2. 처리내용 :
* </pre>
* @Method Name : localExtManager
* @param sessionVO
* @param model
* @param map
* @return String
*/
@RequestMapping("confManager.do")
public String confManager(@ModelAttribute SessionVO sessionVO,Model model,@RequestParam HashMap<String,Object> map) {
Locale locale = LocaleLibrary.setLocale(sessionVO.getSessLanguage() != null ? sessionVO.getSessLanguage() : ConfigData.getString("LANGUAGE"));
List<CodeVO> versionList = new ArrayList<CodeVO>();
HashMap<String,Object> param = new HashMap<String,Object>();
Map<String, Object> menuInfo = new HashMap<String, Object>();
param.put("menu_cd",map.get("menu_cd") != null ? map.get("menu_cd").toString() : "" );
param.put("role_id",sessionVO.getSessRole_id());
String part = ""; // ALL/GROUP/TEAM - 관리자 페이지마다 다르게 적용처리한다.
String productInfo = "";
try {
// 1. 관리자 ROLE 접근권한
part = commonService.getMenuAuth(param);
// 2. 페이지 네비게이션 :: 상위메뉴명 / 현재메뉴명
menuInfo = commonService.pageMenuInfo(param);
// 3. 버전관리 목록 정보 가져오기 :: parameter gcode_id
param.put("gcode_id", Constant.CODE_VERSION);
versionList = commonService.codeList(param);
// 4. [APPLIANCE VERSION]
productInfo = ConfigData.getString("VERSION_INFO");
}catch(BizException e){
logger.error(e.getMessage());
}catch(Exception e) {
logger.error(e.getMessage());
}
// 접근권한이 없거나 메뉴코드가 없는 경우 403 ERROR 페이지 이동 처리
if(part.equals("") || param.get("menu_cd").toString().equals("")) {
CommonUtil.setErrorMsg(model, Constant.ERROR_403, messageSource.getMessage("common.connect.error",new Object[0],locale),sessionVO.getSessContextRoot());
return "error/message";
}
// call by reference
CommonUtil.setSessionToModel(model, sessionVO);
model.addAttribute("defaultFileSize",ConfigData.getInt("DOC.DEFAULT.FILESIZE"));
model.addAttribute("menuInfo",menuInfo);
model.addAttribute("versionList",versionList);
model.addAttribute("productInfo",productInfo);
model.addAttribute("topSelect",Constant.TOPMENU_SYSTEM);
model.addAttribute("subSelect",Constant.SYSTEM_CONFMANAGER);
return "sysadmin/confManager";
}
/**
*
* <pre>
* 1. 개용 : 시스템 환경설정 정보 조회 및 변경처리
* 2. 처리내용 :
* </pre>
* @Method Name : confControl
* @param model
* @param sessionVO
* @param map
* @param request
* @return Map<String,Object>
*/
@RequestMapping(value="/confControl.do", method=RequestMethod.POST)
@ResponseBody
public Map<String,Object> confControl(Model model, @ModelAttribute SessionVO sessionVO, @RequestParam HashMap<String,Object> map,
HttpServletRequest request) {
Locale locale = LocaleLibrary.setLocale(sessionVO.getSessLanguage() != null ? sessionVO.getSessLanguage() : ConfigData.getString("LANGUAGE"));
Map<String, Object> resultMap = new HashMap<String, Object>();
// 업무구분자 - 환경설정 조회 및 수정처리
String type = map.get(Constant.TYPE) != null ? map.get(Constant.TYPE).toString() : "";
try {
if(type.equals(Constant.UPDATE) ) {
// 환경설정 수정처리
resultMap = commonService.confProc(map);
}else {
// 환경설정 조회처리
resultMap = commonService.confDetail(map);
}
}catch(BizException e){
resultMap.put("result",Constant.RESULT_FALSE);
resultMap.put("message",e.getMessage());
}catch(Exception e) {
resultMap.put("result",Constant.RESULT_FALSE);
resultMap.put("message",messageSource.getMessage("common.system.error",new Object[0],locale));
}
return resultMap;
}
}
<file_sep>/EDMS3/src/kr/co/exsoft/common/dao/CommonDao.java
package kr.co.exsoft.common.dao;
import java.util.HashMap;
import java.util.List;
import kr.co.exsoft.common.vo.RecentlyObjectVO;
import org.apache.commons.collections.map.CaseInsensitiveMap;
import org.springframework.stereotype.Repository;
/**
* 공통처리 매퍼 클래스
* @author 패키지 개발팀
* @since 2014.07.28
* @version 3.0
*
*/
@Repository(value = "commonDao")
public interface CommonDao {
/**
*
* <pre>
* 1. 개요 : NEXT_VAL FUNCTION
* 2. 처리내용 :
* </pre>
* @Method Name : commonNextVal
* @param map
* @return int
*/
public int comNextVal(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개요 : CURRENT_VAL FUNCITON
* 2. 처리내용 :
* </pre>
* @Method Name : commonCurrentVal
* @param map
* @return
*/
public int comCurrentVal(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개요 : 테이블 이용해서 카운터 증가시키기
* 2. 처리내용 :
* </pre>
* @Method Name : comNextValInc
* @param map
* @return
*/
public int comNextValInc(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개요 : 테이블 이용해서 현재값 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : comCurrvalTable
* @param map
* @return
*/
public int comCurrvalTable(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개요 : 다음에디터 등록처리 샘플
* 2. 처리내용 :
* </pre>
* @Method Name : editorWrite
* @param map
* @return int
*/
public int editorWrite(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개요 : 다음에디터 등록 내용 보기 최신거 1개
* 2. 처리내용 :
* </pre>
* @Method Name : editorDetailInfo
* @param map
* @return CaseInsensitiveMap
*/
public CaseInsensitiveMap editorDetailInfo(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 최근 등록(문서,폴더,협업)에 대한 목록 삭제
* 2. 처리내용 :
* </pre>
* @Method Name : deleteRecently
* @param map
* @return int
*/
public int deleteRecently(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 첨부파일 삭제 처리
* 2. 처리내용 : 문서, 협업 등록 실패 시 exRep ECM에 등록된 파일은 XR_DELETEFILE_QUEUE에 삽입 후
* 배치 작업으로 해당 파일을 삭제 한다.
* </pre>
* @Method Name : insertDeleteFileQueue
* @param map
* @return int
*/
public int insertDeleteFileQueue(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 최근 등록 현황 가져오기
* 2. 처리내용 : 문서, 폴더, 협업 등록에 대한 사용자 등록 현황
* </pre>
* @Method Name : selectRecentlyObject
* @return List<RecentlyObjectVO>
*/
public List<RecentlyObjectVO> selectRecentlyObject(RecentlyObjectVO recentlyVo);
/**
*
* <pre>
* 1. 개용 : 최근 등록 현황 등록
* 2. 처리내용 : 문서, 폴더, 협업 최근 등록 현황 등록
* </pre>
* @Method Name : insertRecentlyObject
* @param recentlyVo
* @return int
*/
public int insertRecentlyObject(RecentlyObjectVO recentlyVo);
}
<file_sep>/EDMS3/src/kr/co/exsoft/document/dao/WorkDocumentDao.java
package kr.co.exsoft.document.dao;
import java.util.HashMap;
import java.util.List;
import org.springframework.stereotype.Repository;
import kr.co.exsoft.document.vo.DocumentVO;
/**
*
* MyDocument 매퍼클래스
* @author <NAME>
* @since 2015.01.05
* @version 3.0
*
*/
@Repository(value = "workDocumentDao")
public interface WorkDocumentDao {
/**
*
* <pre>
* 1. 개용 : 업무문서/개인문서 목록 수
* 2. 처리내용 : acl 필터링
* </pre>
* @Method Name : workDocumentListCnt
* @param map
* @return int
*/
public int workDocumentListCnt(HashMap<String, Object> map);
/**
*
* <pre>
* 1. 개용 : 업무문서/개인문서 목록
* 2. 처리내용 : acl 필터링
* </pre>
* @Method Name : workDocumentList
* @param map
* @return List<DocumentVO>
*/
public List<DocumentVO> workDocumentList(HashMap<String, Object> map);
/**
*
* <pre>
* 1. 개용 : 메인페이지 문서목록 리스트 구하기
* 2. 처리내용 :
* </pre>
* @Method Name : mainDocumentList
* @param map
* @return List<DocumentVO>
*/
public List<DocumentVO> mainDocumentList(HashMap<String, Object> map);
}
<file_sep>/EDMS3/WebContent/js/common/ecm.js
//우클릭 방지 관련 : 퍼블리싱 완료되면 주석 제거
//document.oncontextmenu=function(){return false;}
$(function(){
// //[수정완료]div 드롭다운(동적 사용을 위해 on 사용)
// $("body").on("click", 'a[class="dropDown_txt"]', function(e){
// e.preventDefault();
// var span = $(this).parent().find("span");
// var divLength = $(this).parent().find('div').children('div').length;
//
// if(span.hasClass("down")) {
// span.removeClass("down");
// span.addClass("up");
//
// $(this).parent().find('div').removeClass('hide');
//
// if(divLength == 0) {
// $(this).parent().find('div').removeClass('hide');
// } else {
// $(this).next().removeClass('hide');
// }
//
// } else {
// span.removeClass("up");
// span.addClass("down");
//
// $(this).parent().find('div').addClass('hide');
//
// if(divLength == 0) {
// $(this).parent().find('div').addClass('hide');
// } else {
// $(this).next().addClass('hide');
// }
//
// }
// });
/*
$('a[class="dropDown_img"]').bind("click", function(e){
e.preventDefault();
var div = $(this).next();
if(div.hasClass('hide')) {
div.removeClass('hide');
} else {
div.addClass('hide');
}
});
*/
//문서 상세조회 > 드롭다운 - 추가기능 선택, 메뉴 클릭 시
$('.doc_detail').find('.extFunction_dropDown_menu').find('li > a').bind("click", function(e){
e.preventDefault();
$(this).parents('.extFunction_dropDown_wrapper').prev().trigger('click');
var clsName = $(this).attr('class');
if(clsName == 'copy') {
//이벤트 기술
$('.doc_folder_choose2').removeClass('hide');
$('.doc_folder_choose2').find('input:hidden.context_choose').val(clsName);
} else if(clsName == 'move') {
//이벤트 기술
$('.doc_folder_choose2').removeClass('hide');
$('.doc_folder_choose2').find('input:hidden.context_choose').val(clsName);
} else if(clsName == 'favorite') {
//이벤트 기술
$('.doc_favorite_choose').removeClass('hide');
} else if(clsName == 'tempbox') {
//이벤트 기술
/*jConfirm("작업카트에 추가하시겠습니까?", "확인", 0, function(r){
if(r){
jAlert("작업카트에 추가 되었습니다.", "확인", 0);
//jAlert("이미 작업카트에 추가된<br>문서입니다.", "확인", 0);
//jAlert("작업카트에는 30개 이상의 문서를 추가할 수 없습니다.", "확인", 0);
}
});*/
} else if(clsName == 'email_send') {
$('.url_email').removeClass('hide');
}
});
//컨텍스트메뉴 - 메뉴 클릭 시
$('.tbl_context_menu > ul').find('li > a').bind("click", function(e){
e.preventDefault();
var clsName = $(this).attr('class');
//버튼을 누르면 컨텍스트메뉴가 닫힘
$('.tbl_context_menu').addClass('hide');
if(clsName == 'copy') {
var chkLength = $('.tbl_data_body > li.check').find('input:checkbox:checked').length;
if(chkLength != 0) {
//이벤트 기술
$('.doc_folder_choose2').removeClass('hide');
$('.doc_folder_choose2').find('input:hidden.context_choose').val(clsName);
} else {
jAlert("복사할 문서를 선택하세요!", "확인", 0);
}
} else if(clsName == 'move') {
var chkLength = $('.tbl_data_body > li.check').find('input:checkbox:checked').length;
if(chkLength != 0) {
//이벤트 기술
$('.doc_folder_choose2').removeClass('hide');
$('.doc_folder_choose2').find('input:hidden.context_choose').val(clsName);
} else {
jAlert("이동할 문서를 선택하세요!", "확인", 0);
}
} else if(clsName == 'delete') {
jConfirm("삭제 하시겠습니까?", "확인", 0, function(r){
if(r){
var isChkOut = true;
if(isChkOut){
jConfirm("체크아웃된 문서가 존재합니다.<br>체크아웃 취소 후 다시<br>작업하시기 바랍니다.", "확인", 0, function(r){
});
} else {
jAlert("삭제 확인", "확인", 0);
}
} else {
jAlert("삭제 취소", "취소", 0);
}
});
} else if(clsName == 'favorite'){
$('.doc_favorite_choose').removeClass('hide');
} else if(clsName == 'tempbox'){
jConfirm("작업카트에 추가하시겠습니까?", "확인", 0, function(r){
if(r){
jAlert("이미 작업카트에 추가된<br>문서입니다.", "확인", 0);
//jAlert("작업카트에는 30개 이상의 문서를 추가할 수 없습니다.", "확인", 0);
}
});
}
});
//통합 - 일자 달력 선택 활성화
//$("#datepicker3").datepicker({dateFormat:'yy-mm-dd'});
//$("#datepicker4").datepicker({dateFormat:'yy-mm-dd'});
//통계 로그인이력 검색
//$("#datepicker6").datepicker({dateFormat:'yy-mm-dd'});
//$("#datepicker7").datepicker({dateFormat:'yy-mm-dd'});
//통합검색 작성기간 검색
$("#datepicker8").datepicker({dateFormat:'yy-mm-dd'});
$("#datepicker9").datepicker({dateFormat:'yy-mm-dd'});
//관련문서 추가 - 일자 달력 선택 활성화
$("#relativeAdd_datepicker1").datepicker({dateFormat:'yy-mm-dd'});
$("#relativeAdd_datepicker2").datepicker({dateFormat:'yy-mm-dd'});
//풀스크린
$('a.full_screen').bind("click", function(e){
e.preventDefault();
var el = document.documentElement
, rfs = // for newer Webkit and Firefox
el.requestFullScreen
|| el.webkitRequestFullScreen
|| el.mozRequestFullScreen
|| el.msRequestFullscreen
;
if(typeof rfs!="undefined" && rfs){
rfs.call(el);
} else if(typeof window.ActiveXObject!="undefined"){
// for Internet Explorer
var wscript = new ActiveXObject("WScript.Shell");
if (wscript!=null) {
wscript.SendKeys("{F11}");
}
}
});
//depth navigation
$('.depth_navi > span').mouseover(function(){
var path = $(this).parent().find(".depth_navi_path");
if(!path.is(":visible")) {
path.removeClass('hide');
}
}).mouseout(function(){
var path = $(this).parent().find(".depth_navi_path");
if(path.is(":visible")) {
path.addClass('hide');
}
});
//문서등록 - 창 닫기
$('.doc_register_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.doc_register').addClass('hide');
$('.doc_register_wrapper').addClass('hide');
});
//문서등록 - 창 닫기 : 음영진 부분 클릭 시 닫기
$('.doc_register_wrapper').bind("click", function(){
$(this).addClass('hide');
$('.doc_register').addClass('hide');
});
//문서수정 - 창 닫기
$('.doc_modify_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.doc_modify').addClass('hide');
$('.doc_modify_wrapper').addClass('hide');
});
//문서수정 - 창 닫기 : 음영진 부분 클릭 시 닫기
$('.doc_modify_wrapper').bind("click", function(){
$(this).addClass('hide');
$('.doc_modify').addClass('hide');
});
//내 문서등록 - 창 닫기
$('.myDoc_register_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.myDoc_register').addClass('hide');
$('.myDoc_register_wrapper').addClass('hide');
});
//내 문서등록 - 창 닫기 : 음영진 부분 클릭 시 닫기
$('.myDoc_register_wrapper').bind("click", function(){
$(this).addClass('hide');
$('.myDoc_register').addClass('hide');
});
//탭 요소 클릭 시 폼 변경
$('.tab_element').bind("click", function(){
var idx = $(this).index();
var targetFrm = $(this).parent().parent().parent().find('div[class^="tab_form"]');
targetFrm.addClass('hide');
targetFrm.eq(idx).removeClass('hide');
$('.tab_element').removeClass('selected');
$(this).addClass('selected');
});
/*
//문서 상세조회 - 창 닫기
$('.doc_detail_close').delegate("click", function(e){
e.preventDefault();
$(this).parents('.doc_detail').addClass('hide');
$('.doc_detail_wrapper').addClass('hide');
});
//문서 상세조회 - 창 닫기 : 음영진 부분 클릭 시 닫기
$('.doc_detail_wrapper').delegate("click", function(){
$(this).addClass('hide');
$('.doc_detail').addClass('hide');
});
*/
//문서 상세조회 > 폴더 선택 - 창 닫기
$('.doc_folder_close').bind("click", function(e){
e.preventDefault();
$(this).parents('div[id^="doc_folder_choose"]').addClass('hide');
$('.folder_choose_wrapper').addClass('hide');
});
//문서등록 - 창 닫기 : 음영진 부분 클릭 시 닫기
$('div[class^="folder_choose_wrapper"]').bind("click", function(){
$(this).addClass('hide');
$('.doc_folder_choose').addClass('hide');
});
//컨텍스트메뉴 > 폴더 선택 - 확인버튼
$('.doc_folder_btnmenu2').find('button.confirm').bind("click", function(){
var r = true;
var contextChoose = $('.doc_folder_choose2').find('input:hidden.context_choose').val();
var chooseVal = "";
var removeWindow = function() {
$('div[id^="doc_folder_choose"]').addClass('hide');
$('div[id^="doc_folder_choose"]').prev().addClass('hide');
};
if(contextChoose == 'copy') chooseVal = "복사";
else if(contextChoose == 'move') chooseVal = "이동";
if(r){
jAlert("문서 " + chooseVal + "에 성공하였습니다.", "확인", 0, function(r){
removeWindow();
});
} else {
jAlert("문서 " + chooseVal + "에 실패하였습니다.", "확인", 0, function(r){
removeWindow();
});
}
});
//컨텍스트메뉴 > 폴더 선택 - 취소버튼
$('.doc_folder_btnmenu2').find('button.cancel').bind("click", function(){
$(this).parents('div[id^="doc_folder_choose"]').addClass('hide');
$(this).parents('div[id^="doc_folder_choose"]').prev().addClass('hide');
});
//컨텍스트메뉴 > 즐겨찾기 선택 - 창 닫기
$('.doc_favorite_close').bind("click", function(e){
e.preventDefault();
$(this).parents('div[id="doc_favorite_choose"]').addClass('hide');
});
//컨텍스트메뉴 > 즐겨찾기 선택 - 확인
$('.doc_favorite_btnmenu').find('button.confirm').bind("click", function(){
var r = true;
if(r){
jAlert("즐겨찾기 추가에 성공하였습니다.", "확인", 0);
} else {
jAlert("이미 즐겨찾기에 추가에 추가된<br>문서입니다.", "확인", 0);
}
});
//컨텍스트메뉴 > 즐겨찾기 선택 - 취소
$('.doc_favorite_btnmenu').find('button.cancel').bind("click", function(){
$(this).parents('div[id="doc_favorite_choose"]').addClass('hide');
});
//문서 상세조회 > 폴더 선택 - 창 열기
$('.doc_register_cnts').find('button.doc_folder_srch').bind("click", function(){
$('.doc_folder_choose').removeClass('hide');
});
//문서 상세조회 - 자물쇠 모양 over시 정보 표출
$('.cnts_locked').find('img').mouseover(function(){
$('.locked_info').removeClass('hide');
}).mouseout(function(){
$('.locked_info').addClass('hide');
});
//문서 상세조회 - 버튼메뉴
//삭제버튼
$('.doc_detail').find('button.delete').bind("click", function(){
var tabIdx = $('.doc_detail').find('span[class*="selected"]').index();
//0 : 기본, 1 : 버전, 2: 이력, 3 : 의견
if(tabIdx == 0) {
jConfirm("삭제하시겠습니까?", "확인", 0, function(r){
if(r){
jAlert('삭제 되었습니다.', "확인", 0, function(r){
$('.doc_detail').addClass('hide');
});
}
});
} else if(tabIdx == 1) {
jConfirm("해당 버전을<br>영구 삭제하시겠습니까?", "확인", 0, function(r){
});
} else if(tabIdx == 3) {
jConfirm("의견을 삭제하시겠습니까?", "확인", 0, function(r){
});
}
});
//체크아웃 취소
$('.doc_detail').find('button.cancel_checkout').bind("click", function(){
/*jConfirm("체크아웃을 취소하시겠습니까?", "확인", 0, function(r){
if(r){
jAlert("체크아웃이 취소 되었습니다.", "확인", 0);
}
});*/
});
//최근문서 리스트 지우기
$('.recent_del').bind("click", function(e){
e.preventDefault();
$(this).parent().remove();
});
//권한변경 - 창 닫기
$('.doc_authModify_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.doc_folder_authModify').addClass('hide');
$('.folder_authModify_wrapper').addClass('hide');
});
//권한변경 - 창 닫기 : 음영진 부분 클릭 시 닫기
$('.folder_authModify_wrapper').bind("click", function(){
$(this).addClass('hide');
$('.doc_folder_authModify').addClass('hide');
});
//URL복사 - 창 열기
$('.btn_urlCopy').bind("click", function(){
$('.url_copy').removeClass('hide');
});
//URL복사 - 창 닫기
$('.url_copy_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.url_copy').addClass('hide');
});
//URL붙여넣기 - 창 닫기
$('.url_paste_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.url_paste').addClass('hide');
});
//URL메일발송 - 창 닫기
$('.url_email_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.url_email').addClass('hide');
});
//권한설정 닫기
$('.doc_authSet_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.doc_authSet').addClass('hide');
$('.doc_authSet_wrapper').addClass('hide');
});
//권한설정 - 창 닫기 : 음영진 부분 클릭 시 닫기
$('.doc_authSet_wrapper').bind("click", function(){
$(this).addClass('hide');
$('.doc_authSet').addClass('hide');
});
//권한설정 - 선택대상 지우기
$('.chosen_user_list > li').find('a').bind("click", function(e){
e.preventDefault();
var idVal = $(this).parent().find('input:hidden').val();
$(this).parent().remove();
//사용자 체크 된 부분 체크 해제
$('.tbl_choose_list > tr').find('input:checkbox[id="' + idVal + '"]').attr('checked', false);
});
//하위폴더 추가 - 창 닫기
$('.subFolder_add_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.subFolder_add').addClass('hide');
$('.subFolder_add_wrapper').addClass('hide');
});
//하위폴더 추가 - 창 닫기 : 음영진 부분 클릭 시 닫기
$('.subFolder_add_wrapper').bind("click", function(){
$(this).addClass('hide');
$('.subFolder_add').addClass('hide');
});
//하위폴더 수정 - 창 닫기
$('.subFolder_modify_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.subFolder_modify').addClass('hide');
$('.subFolder_modify_wrapper').addClass('hide');
});
//하위폴더 수정 - 창 닫기 : 음영진 부분 클릭 시 닫기
$('.subFolder_modify_wrapper').bind("click", function(){
$(this).addClass('hide');
$('.subFolder_modify').addClass('hide');
});
//하위폴더 추가 > 권한변경 - 창 닫기
$('.subFolder_authModify_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.subFolder_authModify').addClass('hide');
$('.subFolder_authModify_wrapper').addClass('hide');
});
//하위폴더 추가 > 권한변경 - 창 닫기 : 음영진 부분 클릭 시 닫기
$('.subFolder_authModify_wrapper').bind("click", function(){
$(this).addClass('hide');
$('.subFolder_authModify').addClass('hide');
});
//하위폴더 추가 > 권한변경 - 창 닫기
$('.subFolder_authModifyCopy_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.subFolder_authModifyCopy').addClass('hide');
$('.subFolder_authModifyCopy_wrapper').addClass('hide');
});
//하위폴더 추가 > 권한변경 > 권한등록 창 닫기 : 음영진 부분 클릭 시 닫기
$('.subFolder_authModifyCopy_wrapper').bind("click", function(){
$(this).addClass('hide');
$('.subFolder_authModifyCopy').addClass('hide');
});
//하위폴더 추가 > 권한변경 - 창 닫기
$('.subFolder_authModifyUpdate_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.subFolder_authModifyUpdate').addClass('hide');
$('.subFolder_authModifyUpdate_wrapper').addClass('hide');
});
//하위폴더 추가 > 권한변경 > 권한등록 창 닫기 : 음영진 부분 클릭 시 닫기
$('.subFolder_authModifyUpdate_wrapper').bind("click", function(){
$(this).addClass('hide');
$('.subFolder_authModifyUpdate').addClass('hide');
});
//보존기간 연장 - 창 닫기
$('.extend_preserve_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.extend_preserve').addClass('hide');
$('.extend_preserve_wrapper').addClass('hide');
});
//보존기간 연장 - 창 닫기 : 음영진 부분 클릭 시 닫기
$('.extend_preserve_wrapper').bind("click", function(){
$(this).addClass('hide');
$('.extend_preserve').addClass('hide');
});
//즐겨찾기 등록 - 창 닫기
$('.favorite_choose_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.favorite_choose').addClass('hide');
$('.favorite_choose_wrapper').addClass('hide');
});
//즐겨찾기 등록 - 창 닫기 : 음영진 부분 클릭 시 닫기
$('.favorite_choose_wrapper').bind("click", function(){
$(this).addClass('hide');
$('.favorite_choose').addClass('hide');
});
//즐겨찾기 폴더 등록 - 창 닫기
$('.favorite_register_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.favorite_register').addClass('hide');
$('.favorite_register_wrapper').addClass('hide');
});
//즐겨찾기 폴더 등록 - 창 닫기 : 음영진 부분 클릭 시 닫기
$('.favorite_register_wrapper').bind("click", function(){
$(this).addClass('hide');
$('.favorite_register').addClass('hide');
});
//즐겨찾기 폴더 수정 - 창 닫기
$('.favorite_modify_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.favorite_modify').addClass('hide');
$('.favorite_modify_wrapper').addClass('hide');
});
//즐겨찾기 폴더 수정 - 창 닫기 : 음영진 부분 클릭 시 닫기
$('.favorite_modify_wrapper').bind("click", function(){
$(this).addClass('hide');
$('.favorite_modify').addClass('hide');
});
//즐겨찾기 폴더 이동 - 창 닫기
$('.favorite_move_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.favorite_move').addClass('hide');
$('.favorite_move_wrapper').addClass('hide');
});
//즐겨찾기 폴더 이동 - 창 닫기 : 음영진 부분 클릭 시 닫기
$('.favorite_move_wrapper').bind("click", function(){
$(this).addClass('hide');
$('.favorite_move').addClass('hide');
});
//작업카트 > 관련문서 - 창 닫기
$('.relativeDocs_choose_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.relativeDocs_choose').addClass('hide');
$('.relativeDocs_choose_wrapper').addClass('hide');
});
//작업카트 > 관련문서 - 창 닫기 : 음영진 부분 클릭 시 닫기
$('.relativeDocs_choose_wrapper').bind("click", function(){
$(this).addClass('hide');
$('.relativeDocs_choose').addClass('hide');
});
//작업카트 > 관련문서 - 창 닫기
$('.url_emailSend_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.url_emailSend').addClass('hide');
$('.url_emailSend_wrapper').addClass('hide');
});
//작업카트 > 관련문서 - 창 닫기 : 음영진 부분 클릭 시 닫기
$('.url_emailSend_wrapper').bind("click", function(){
$(this).addClass('hide');
$('.url_emailSend').addClass('hide');
});
//이메일 송부 > 사용자 선택 - 창 닫기
$('.user_choose_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.user_choose').addClass('hide');
$('.user_choose_wrapper').addClass('hide');
});
//이메일 송부 > 사용자 선택 - 창 닫기 : 음영진 부분 클릭 시 닫기
$('.user_choose_wrapper').bind("click", function(){
$(this).addClass('hide');
$('.user_choose').addClass('hide');
});
//협업 등록 - 창 닫기
$('.coop_register_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.coop_register').addClass('hide');
$('.coop_register_wrapper').addClass('hide');
});
//협업 등록 - 창 닫기 : 음영진 부분 클릭 시 닫기
$('.coop_register_wrapper').bind("click", function(){
$(this).addClass('hide');
$('.coop_register').addClass('hide');
});
//협업 등록 > 사용자 선택 - 창 닫기
$('.coopUser_choose_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.coopUser_choose').addClass('hide');
$('.coopUser_choose_wrapper').addClass('hide');
});
//협업 등록 > 사용자 선택 - 창 닫기 : 음영진 부분 클릭 시 닫기
$('.coopUser_choose_wrapper').bind("click", function(){
$(this).addClass('hide');
$('.coopUser_choose').addClass('hide');
});
//협업 업무상세조회 - 창 닫기
$('.coopUser_detail_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.coopUser_detail').addClass('hide');
$('.coopUser_detail_wrapper').addClass('hide');
});
//협업 업무상세조회 - 창 닫기 : 음영진 부분 클릭 시 닫기
$('.coopUser_detail_wrapper').bind("click", function(){
$(this).addClass('hide');
$('.coopUser_detail').addClass('hide');
});
/*
* 버튼 액션
*/
//문서 등록
$('.reg_doc').bind("click", function(){
var doc = $('.doc_register');
doc.removeClass('hide');
doc.prev().removeClass('hide');
lyrPopupWindowResize($('.doc_register'));
});
//문서 수정
$('.modify_docs').bind("click", function(){
});
//문서 수정 취소
$('.cancel_modify').bind("click", function(){
jConfirm("수정작업을 취소하시겠습니까?<br>"예"선택시 잠금상태가 해제됩니다.", "확인", 0, function(r){
});
});
//보존기간 연장 팝업 호출
$('.extend_preserve_btn').bind("click", function(){
var lyrWindow = $('.extend_preserve');
lyrWindow.removeClass('hide');
lyrWindow.prev().removeClass('hide');
//보존기간 연장
lyrPopupWindowResize($('.extend_preserve'));
});
//즐겨찾기
$('.favorite_confirm_btn').bind("click", function(){
var isDup = false;
if(isDup){
jAlert("이미 즐겨찾기에 추가된 폴더입니다.", "확인", 1);
} else {
jAlert("즐겨찾기에 추가 되었습니다.", "확인", 1, function(r){
$('.favorite_choose').addClass('hide');
$('.favorite_choose').prev().addClass('hide');
});
}
});
//즐겨찾기 제외
$('.exclude_favorite').bind("click", function(){
jConfirm("즐겨찾기를 제외하시겠습니까?", "확인", 3, function(r){
if(r){
jAlert("즐겨찾기가 제외되었습니다.", "확인", 3, function(r){
});
}
});
});
//즐겨찾기 폺더 구성
//저장버튼
$('.favorite_save_btn').bind("click", function(){
});
//작업카트 - 관련문서 설정
$('.set_relative_docs').bind("click", function(){
var length = $('.cnts_list').find('input:checked').length;
if(length < 2) {
jAlert("관련문서 설정 시 2개 이상의 문서를<br>선택해주세요!", "확인", 4);
} else {
$('.relativeDocs_choose').removeClass('hide');
$('.relativeDocs_choose').prev().removeClass('hide');
//작업카트 - 관련문서 선택
lyrPopupWindowResize($('.relativeDocs_choose'));
}
});
//작업카트 - 관련문서 등록
$('.relativeDocs_confirm_btn').bind("click", function(){
jConfirm("선택한 메인문서에 이미 관련문서가 있습니다.<br>나머지 문서를 추가하시겠습니까?", "확인", 4, function(r){
});
});
//작업카트 - URL메일송부
$('.url_email_send').bind("click", function(){
//작업카트 - URL 메일 송부
$('.url_emailSend').removeClass('hide');
$('.url_emailSend').prev().removeClass('hide');
lyrPopupWindowResize($('.url_emailSend'));
});
//작업카트 - 작업카트 제외
$('.exclude_tempbox').bind("click", function(){
var chkLength = $('.tbody').find("input:checkbox:checked").length;
if(chkLength > 0) {
jConfirm("선택한 문서를 작업카트에서<br>제외하시겠습니까?", "확인", 4, function(r){
});
} else {
jAlert("작업카트에서 제외할 문서를 선택해 주세요.", "확인", 4);
}
});
//작업카트 - 즐겨찾기 추가
$('.add_favorite').bind("click", function(){
});
//작업카트 - 다운로드
$('.docs_download').bind("click", function(){
var chkLength = $('.tbody').find("input:checkbox:checked").length;
if(chkLength > 10) {
jAlert("최대 10개까지 다운로드가 가능합니다.<br>확인 바랍니다.", "확인", 4);
} else if(chkLength > 0 && chkLength <= 10) {
jConfirm("선택한 문서를 일괄 다운로드<br>하시겠습니까?", "확인", 4, function(r){
});
} else {
jAlert("다운로드 할 문서를 선택해 주세요.", "확인", 4);
}
});
//협업 등록 버튼
$('.tbl_coop_reg').bind("click", function(){
$('.coop_register').removeClass('hide');
$('.coop_register').prev().removeClass('hide');
lyrPopupWindowResize($('.coop_register'));
});
//협업요청
$('.requestApproval_btn').bind("click", function(){
var cnts = $(this).next().val();
if(cnts != ""){
jConfirm("승인 하시겠습니까?", "확인", 7, function(r){
});
} else {
jAlert("승인요청 사항을 입력하시기 바랍니다.", "확인", 7);
}
});
//협업요청 삭제
$('.delete_requestApproval_btn').bind("click", function(){
jConfirm("삭제하시겠습니까?", "확인", 7, function(r){
});
});
//협업요청 승인
$('.requestApproval_accept').bind("click", function(){
jPrompt("승인 내용을 입력해주세요", "", "확인", 7, function(r){
});
});
//협업요청 반려
$('.requestApproval_reject').bind("click", function(){
jPrompt("반려 내용을 입력해주세요", "", "확인", 7, function(r){
});
});
//통계 그래프 레이어 팝업 보기
$('.tbl_thumbMenu').find('li.chart_view > a').bind("click", function(e){
e.preventDefault();
var div = $('.statics_view')
div.removeClass('hide');
div.prev().removeClass('hide');
//통계 그래프 보기
lyrPopupWindowResize($('.statics_view'));
});
//문서 상세조회 > 전체 선택
$('.relative_docs_checkAll').bind("click", function(){
var checkbox = $('.relative_docs_wrapper').find('.relative_docs_checkbox');
checkbox.prop("checked",true);
});
//문서 상세조회 > 전체 해제
$('.relative_docs_uncheckAll').bind("click", function(){
var checkbox = $('.relative_docs_wrapper').find('.relative_docs_checkbox');
checkbox.prop("checked",false);
});
//통합검색 버튼 - 토글
$('.integratedSrch_detail_btn').bind("click", function(){
var div = $(this).parent().find('.integratedCnts_dropDown_menu');
if(div.hasClass('hide')) {
div.removeClass('hide');
} else {
div.addClass('hide');
}
});
//휴지통 삭제버튼
$('.tbl_completeDel').bind("click", function(){
jConfirm("선택한 문서를 삭제하시겠습니까?", "삭제", 2, function(r){
});
});
//휴지통 복원 버튼
$('.tbl_restore').bind("click", function(){
jConfirm("선택한 문서를 복원하시겠습니까?", "복원", 2, function(r){
});
});
/*
* 메뉴관련
*/
//공통메뉴 리스트 설명 열기/닫기
$('.header_icon_menu').find('li').mouseover(function(){
var menu_tooltip = $(this).find('div');
if(menu_tooltip.hasClass('hide')){
menu_tooltip.removeClass('hide');
menu_tooltip.css({
left : (-1) * (menu_tooltip.outerWidth()-$(this).width())/2
});
$(this).find('a').addClass('selected');
}
}).mouseout(function(){
var menu_tooltip = $(this).find('div');
menu_tooltip.addClass('hide');
if(!$(this).hasClass('selected')){
$(this).find('a').removeClass('selected');
}
}).click(function(){
$(this).parent().find('li, li > a').removeClass('selected');
$(this).addClass('selected');
$(this).find('a').addClass('selected');
});
//탑 사용자 메뉴 드롭다운
$('.header_user_menu').find('li > a').bind('click', function(e){
//e.preventDefault();
var dropDown_menu = $(this).parent().find('div[class*="dropDown_menu"]');
if(dropDown_menu.hasClass('hide')){
$('.header_user_menu').find('li > div[class*="dropDown_menu"]').addClass('hide');
dropDown_menu.removeClass('hide');
} else {
dropDown_menu.addClass('hide');
}
});
//협업관리 승인/열람 툴팁 보기
$('.tbl_data_list li[class^="data_list"]').find('li[class^="coop_approval"], li[class^="coop_read"]').mouseover(function(){
var tooltip = $(this).find('div');
if(tooltip.hasClass('hide')){
tooltip.removeClass('hide');
}
}).mouseout(function(){
var tooltip = $(this).find('div');
tooltip.addClass('hide');
});
//문서 새창으로 열기
$('.tblRow > span.subject').find('a.open_doc_newWindow').bind("click", function(e){
window.open("about:blank", "", "width=500, height=500");
});
//문서리스트 - 마우스 오버 시 행 색 변경
$('.tblRow > span.subject').mouseover(function(){
$(this).parents('.overview').find('.tblRow').removeClass('current');
$(this).parents('.tblRow').addClass('current');
});
//문서리스트 - 우클릭 시 나오는 context 메뉴
$('.tblRow').find('span.subject').mousedown(function(e){
//var pgType = "mydoc";
//var pgType = "doclist";
//var pgType = "mydocmodify"
//var pgType = "bin";
//var pgType = "favorite";
var pgType;
var context_menu;
if(pgType == "mydoc") {
context_menu = $('.mydoc_context_menu');
} else if(pgType == "mydocmodify") {
context_menu = $('.mydocmodify_context_menu');
} else if(pgType == "bin"){
context_menu = $('.mydocbin_context_menu');
} else if(pgType == "favorite"){
return false;
} else {
context_menu = $('.tbl_context_menu');
}
if(e.which == 3) {
context_menu.css({
left:e.pageX,
top:e.pageY
});
$(this).parents('div.tbody').find('ul[class^="tbl_data_list"]').removeClass('current');
$(this).parents('ul[class^="tbl_data_list"]').addClass('current');
$(this).parent().find('input:checkbox').attr('checked', true);
if(context_menu.hasClass('hide')){
context_menu.removeClass('hide');
}
} else if(e.which == 1) {
context_menu.addClass('hide');
}
});
//첨부파일 열기
$('img[class="attach_file"]').bind("click", function(e){
e.preventDefault();
var lyr_popup = $('.attach_window');
if(lyr_popup.hasClass('hide')){
lyr_popup.removeClass('hide');
lyr_popup.css({
left:e.pageX,
top:e.pageY
});
}
});
//첨부파일 닫기
$('.attach_window').find('a[class="close"]').bind("click", function(e){
e.preventDefault();
$('.attach_window').addClass('hide');
});
//관련문서 열기
$('img[class="relative_docs"]').bind("click", function(e){
e.preventDefault();
var lyr_popup = $('.relative_docs_window');
if(lyr_popup.hasClass('hide')){
lyr_popup.removeClass('hide');
lyr_popup.css({
left:e.pageX,
top:e.pageY
});
}
});
//관련문서 닫기
$('.relative_docs_window').find('a[class="close"]').bind("click", function(e){
e.preventDefault();
$(this).parents('.relative_docs_window').addClass('hide');
});
//반출정보 열기/닫기
$('img[class="doc_lock"]').mouseover(function(e){
e.preventDefault();
var lyr_popup = $('div[class^="doc_lock_info"]');
if(lyr_popup.hasClass('hide')){
lyr_popup.removeClass('hide');
lyr_popup.css({
left:e.pageX+($(this).width()-e.offsetX)+5,
top:e.pageY
});
}
}).mouseout(function(){
$('div[class^="doc_lock_info').addClass('hide');
});
//권한조회 열기/닫기
$('img[class="previlege_grade"]').mouseover(function(e){
e.preventDefault();
var lyr_popup = $('.previlege_inquiry');
if(lyr_popup.hasClass('hide')){
lyr_popup.removeClass('hide');
lyr_popup.css({
left:e.pageX + ((-1) * (lyr_popup.width()+25+e.offsetX)),
top:e.pageY
});
}
}).mouseout(function(){
$('.previlege_inquiry').addClass('hide');
});
//권한정보도움말 열기/닫기
$('.previlege_listImg').mouseover(function(){
var lyr_popup = $('.previlege_list');
if(lyr_popup.hasClass('hide')){
lyr_popup.removeClass('hide');
lyr_popup.css({
left:(-1) * (lyr_popup.width()+30)/2
});
}
}).mouseout(function(){
$('.previlege_list').addClass('hide');
});
//문서 상세조회 - 의견 컨텍스트 메뉴(우클릭 시 나오는 context)
$('.opinion_wrapper > table').find('tbody > tr > td').mousedown(function(e){
var context_menu = $('.opinion_contextMenu');
if(e.which == 3) {
var offsetX = e.pageX - $('.opinion_wrapper').offset().left;
var offsetY = e.pageY - $('.opinion_wrapper').offset().top;
context_menu.css({
left:offsetX,
top:offsetY
});
context_menu.removeClass('hide');
$(this).parents('table').find('tr').removeClass('current');
$(this).parent().addClass('current');
} else if(e.which == 1) {
context_menu.addClass('hide');
}
});
//트리 컨텍스트 메뉴
$('.lnb_tree_list > ul').find('li[class^="jqtree_common"]').bind("mousedown", function(e){
var context_menu = $('.doc_tree_context');
if(e.which == 3) {
context_menu.css({
left:e.pageX+10,
top:e.pageY+10
});
if(context_menu.hasClass('hide')){
context_menu.removeClass('hide');
}
} else if(e.which == 1) {
context_menu.addClass('hide');
}
});
//트리 컨텍스트 메뉴요소 클릭 시
$('.doc_tree_context > ul').find('li > a').bind("click", function(e){
e.preventDefault();
var clsNm = $(this).attr('class');
if(clsNm == 'modify') {
} else if(clsNm == 'delete') {
var disabledFolder = false;
var hasSubFolderFile = false;
if(disabledFolder) {
jAlert("비활성화된 폴더가 존재합니다.<br>확인하시기 바랍니다.", "확인", 1);
}
if(hasSubFolderFile){
jAlert("폴더삭제시 하위 폴더 또는<br>문서 존재 시 삭제할 수 없습니다.<br>하위 폴더 및 문서를 확인하시기 바랍니다.", "확인", 1);
}
if(!disabledFolder && !hasSubFolderFile) {
jConfirm("삭제하시겠습니까?", "확인", 1, function(r){
});
}
} else if(clsNm == 'move') {
} else if(clsNm == 'copy') {
} else if(clsNm == 'favorite') {
} else if(clsNm == 'tempbox') {
}
});
//휴지통 컨텍스트 메뉴 클릭
$('.mydocbin_context_menu > ul').find('li > a').bind("click", function(e){
e.preventDefault();
var clsNm = $(this).attr('class');
if(clsNm == 'delete') {
jConfirm("휴지통의 모든 문서를 영구삭제 하시겠습니까?", "확인", 2, function(r){
$('.mydocbin_context_menu').addClass('hide');
});
} else if(clsNm == 'restore') {
jConfirm("선택한 문서를 복원 하시겠습니까?", "확인", 2, function(r){
$('.mydocbin_context_menu').addClass('hide');
$('.doc_folder_choose').removeClass('hide');
$('.doc_folder_choose').prev().removeClass('hide');
});
}
});
//즐겨찾기 트리 우클릭 - 컨텍스트 메뉴 호출
$('.favorite_folder_tree').mousedown(function(e){
var context_menu = $('.favorite_context_menu');
if(e.which == 3) {
var offsetX = e.pageX - $('.favorite_choose_cnts').offset().left;
var offsetY = e.pageY - $('.favorite_choose_cnts').offset().top;
context_menu.css({
left:offsetX,
top:offsetY
});
context_menu.removeClass('hide');
} else if(e.which == 1) {
context_menu.addClass('hide');
}
});
//즐겨찾기 컨텍스트 메뉴 > 메뉴 항목 클릭
$('.favorite_context_menu > ul').find('li > a').bind("click", function(e){
e.preventDefault();
var clsNm = $(this).attr('class');
if(clsNm == 'register') {
//즐겨찾기 등록 열기
$('.favorite_register').removeClass('hide');
$('.favorite_register').prev().removeClass('hide');
lyrPopupWindowResize($('.favorite_register'));
} else if(clsNm == 'modify') {
//즐겨찾기 수정 열기
$('.favorite_modify').removeClass('hide');
$('.favorite_modify').prev().removeClass('hide');
lyrPopupWindowResize($('.favorite_modify'));
} else if(clsNm == 'delete') {
jConfirm("삭제하시겠습니까?", "확인", 1, function(r){
if(r){
} else {
}
});
} else if(clsNm == 'move') {
//즐겨찾기 이동 열기
$('.favorite_choose_close').trigger('click');
$('.favorite_move').removeClass('hide');
$('.favorite_move').prev().removeClass('hide');
lyrPopupWindowResize($('.favorite_move'));
}
$('.favorite_context_menu').addClass('hide');
});
//협업상세 - 수신자 툴팁
// $('.approvalResult_receiver').mouseover(function(){
// var div = $(this).find('div');
//
// if(div.hasClass('hide')){
// div.removeClass('hide');
// }
// }).mouseout(function(){
// $(this).find('div').addClass('hide');
// });
//메시지창, 컨텍스트 메뉴 다른 곳 클릭 시 없어지게 하기
$('body').on("click", '.wrap', function(e){
var headerTargetDiv = $(e.target).parents(".header_user_menu").attr('class');
var extFunctionTargetDiv = $(e.target).parents(".tbl_extFunction").attr('class');
var integratedSearchTargetDiv = $(e.target).parents('.search_detail').find('.integrated_dropDown_menu').attr('class');
if(headerTargetDiv == undefined) {
//헤더 사용자 메뉴 hide
var headerUserMenu = $('.header_user_menu').find('div[class*="dropDown_menu"]');
$.each(headerUserMenu, function(i){
headerUserMenu.eq(i).addClass('hide');
});
}
if(extFunctionTargetDiv == undefined) {
//추가기능 드롭다운 메뉴 hide
var dd_txt = $('.tbl_extFunction > a.dropDown_txt');
var extFuncDDMenu = $('.extFunction_dropDown_wrapper');
if(!extFuncDDMenu.hasClass('hide')){
dd_txt.trigger("click");
}
}
// 문서상세보기 FocusOut - 소스정리대상
var detailSearchDiv = $(e.target).parents(".srch_detail_dropDown").attr('class');
if(detailSearchDiv == undefined) {
if(!$(".detail_dropDown_menu").hasClass('hide')) {
$(".detail_dropDown_menu").addClass('hide');
$("#docDetailDropDown").removeClass('up');
$("#docDetailDropDown").addClass('down');
}
}
if(integratedSearchTargetDiv == undefined) {
//통합검색 메뉴 hide
if(!$('#searchDetailView').hasClass('hide')){
$('#searchDetailView').addClass('hide');
}
}
});
});
<file_sep>/EDMS3/src/kr/co/exsoft/eframework/exception/BaseException.java
package kr.co.exsoft.eframework.exception;
import java.text.MessageFormat;
import java.util.Locale;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.context.MessageSource;
/**
* 예외 상속구조의 최상단
* @author <NAME>
* @since 2014.07.15
* @version 3.0
*
*/
public class BaseException extends Exception {
protected static final Log logger = LogFactory.getLog(BaseException.class);
private static final long serialVersionUID = 1L;
protected String message = null;
protected String messageKey = null;
protected Object[] messageParameters = null;
protected Exception wrappedException = null;
public String getMessage() {
return this.message;
}
public void setMessage(String message) {
this.message = message;
}
public String getMessageKey() {
return this.messageKey;
}
public void setMessageKey(String messageKey) {
this.messageKey = messageKey;
}
public Object[] getMessageParameters() {
return this.messageParameters;
}
public void setMessageParameters(Object[] messageParameters) {
this.messageParameters = messageParameters;
}
public Throwable getWrappedException() {
return this.wrappedException;
}
public void setWrappedException(Exception wrappedException) {
this.wrappedException = wrappedException;
}
public BaseException() {
this("BaseException without message", null, null);
}
public BaseException(String defaultMessage) {
this(defaultMessage, null, null);
}
public BaseException(Throwable wrappedException) {
this("BaseException without message", null, wrappedException);
}
public BaseException(String defaultMessage, Throwable wrappedException) {
this(defaultMessage, null, wrappedException);
}
public BaseException(String defaultMessage, Object[] messageParameters,Throwable wrappedException) {
super(wrappedException);
String userMessage = defaultMessage;
if (messageParameters != null) {
userMessage = MessageFormat.format(defaultMessage,messageParameters);
}
this.message = userMessage;
}
public BaseException(MessageSource messageSource, String messageKey) {
this(messageSource, messageKey, null, null, Locale.getDefault(), null);
}
public BaseException(MessageSource messageSource, String messageKey,
Throwable wrappedException) {
this(messageSource, messageKey, null, null, Locale.getDefault(),wrappedException);
}
public BaseException(MessageSource messageSource, String messageKey,
Locale locale, Throwable wrappedException) {
this(messageSource, messageKey, null, null, locale, wrappedException);
}
public BaseException(MessageSource messageSource, String messageKey,
Object[] messageParameters, Locale locale,Throwable wrappedException) {
this(messageSource, messageKey, messageParameters, null, locale,wrappedException);
}
public BaseException(MessageSource messageSource, String messageKey,
Object[] messageParameters, Throwable wrappedException) {
this(messageSource, messageKey, messageParameters, null, Locale.getDefault(), wrappedException);
}
public BaseException(MessageSource messageSource, String messageKey,
Object[] messageParameters, String defaultMessage,Throwable wrappedException) {
this(messageSource, messageKey, messageParameters, defaultMessage,Locale.getDefault(), wrappedException);
}
public BaseException(MessageSource messageSource, String messageKey,Object[] messageParameters,
String defaultMessage, Locale locale,Throwable wrappedException) {
super(wrappedException);
this.messageKey = messageKey;
this.messageParameters = messageParameters;
this.message = messageSource.getMessage(messageKey, messageParameters,defaultMessage, locale);
}
}
<file_sep>/EDMS3/WebContent/js/popup/excelUploadWindow.js
var excelUploadWindow = {
// 0. 초기화
init : {
},
// 1. 팝업
open : {
},
//2. layer + show
layer : {
},
//3. 닫기 + hide
close : function() {
},
//4. 화면 이벤트 처리
event : {
// 그룹일괄등록 메인
groupUploadView : function() {
// 부서 일괄업로드 - 창 닫기
$('.window_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.batch_upload').addClass('hide');
$('.batch_upload_wrapper').addClass('hide');
});
// 부서 일괄업로드 구성 - 창 닫기 : 음영진 부분 클릭 시 닫기
$('.batch_upload_wrapper').bind("click", function(){
$(this).addClass('hide');
$('.batch_upload').addClass('hide');
});
$("#upFile").val(""); // input type=file 초기화
exsoft.util.layout.divLayerOpen("batch_upload_wrapper", "batch_upload");
},
// 그룹다운로드
groupDownload : function() {
$(location).attr("href", exsoft.contextRoot + "/common/downExcel.do?type=GROUP");
},
//사용자일괄등록 메인
userUploadView : function() {
//즐겨찾기 구성 - 창 닫기
$('.window_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.user_batchUpload').addClass('hide');
$('.user_batchUpload_wrapper').addClass('hide');
});
//즐겨찾기 구성 - 창 닫기 : 음영진 부분 클릭 시 닫기
$('.user_batchUpload_wrapper').bind("click", function(){
$(this).addClass('hide');
$('.user_batchUpload').addClass('hide');
});
$("#userUpFile").val(""); // input type=file 초기화
exsoft.util.layout.divLayerOpen("user_batchUpload_wrapper", "user_batchUpload");
},
// 사용자다운로드
userDownload : function() {
$(location).attr("href", exsoft.contextRoot + "/common/downExcel.do?type=USER");
}
},
//5. 화면 UI 변경 처리
ui : {
},
//6. callback 처리
callback : {
},
}<file_sep>/EDMS3/src/kr/co/exsoft/quartz/controller/ExpireDocCheckQuartz.java
package kr.co.exsoft.quartz.controller;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import kr.co.exsoft.document.service.DocumentService;
import kr.co.exsoft.eframework.configuration.Constant;
import kr.co.exsoft.eframework.exception.BizException;
import kr.co.exsoft.eframework.util.StringUtil;
import kr.co.exsoft.common.service.CommonService;
import kr.co.exsoft.quartz.service.QuartzService;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.quartz.JobExecutionContext;
/**
* 보존년한이 지난 문서에 대해 만기 처리한다
*
* @author 패키지팀
* @since 2014. 10. 01.
* @version 1.0
*
*/
public class ExpireDocCheckQuartz extends QuartzJob {
protected static final Log logger = LogFactory.getLog(ExpireDocCheckQuartz.class);
protected SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss");
@Override
protected void executeJob(JobExecutionContext context) {
CommonService commonService = (CommonService)super.getBean("commonService");
DocumentService documentService = (DocumentService)super.getBean("documentService");
QuartzService quartzService = (QuartzService)super.getBean("quartzService");
List<HashMap<String,Object>> expiredDocList = new ArrayList<HashMap<String,Object>>();
HashMap<String,Object> resultMap = new HashMap<String,Object>();
Map<String,Object> result = new HashMap<String,Object>();
HashMap<String,Object> param = new HashMap<String,Object>();
// 만기문서처리일 :: 현재시간기준
String expiredDate = StringUtil.getCurrentTime();
long work_idx = 0;
long sTime = System.currentTimeMillis();
try {
logger.info("ExpireDocCheckQuartz START ="+df.format(sTime));
// 1. 로그등록처리
work_idx = commonService.commonNextVal(Constant.COUNTER_ID_BATCH_WORK);
quartzService.batchWorkWrite(work_idx,Constant.WORK_BATCH,Constant.BATCH_EXPIRED_DOC);
// 2.만기문서 목록 리스트 가져오기 : 전체문서 대상
param.put("workType",Constant.EXPIRED);
param.put("expiredDate", expiredDate);
expiredDocList = quartzService.batchDocList(param);
if(expiredDocList != null && expiredDocList.size() > 0 ) {
// 3.만기문서 처리 :: param : 사용안함
result = documentService.expiredDocProc(expiredDocList, param);
resultMap.put("message","현재버전 문서수::"+expiredDocList.size()+"/" + "모든 버전 문서수::"+result.get("total"));
}else {
resultMap.put("message","만기문서가 존재하지 않습니다.");
}
// 4. 로그결과처리
resultMap.put("work_state",Constant.T);
}catch(BizException e){
logger.error(e.getMessage());
resultMap.put("work_state",Constant.F);
resultMap.put("message","비지니스 로직 에러");
}catch(Exception e) {
logger.error(e.getMessage());
resultMap.put("work_state",Constant.F);
resultMap.put("message","EXCEPTION ERROR");
}finally{
Date now = new Date();
resultMap.put("work_idx",work_idx);
resultMap.put("work_edate",df.format(now));
try {
// 배치로그 수정처리 : 완료시간/상태/메세지
quartzService.batchWorkUpdate(resultMap);
}catch(Exception e){
logger.error(e.getMessage());
}
}
long eTime = System.currentTimeMillis();
logger.info("ExpireDocCheckQuartz END ="+df.format(eTime));
}
}
<file_sep>/EDMS3/WebContent/js/common/common.js
/**
* EDMS HTML5 Global Common
*/
if( typeof exsoft == 'undefined') {
window.exsoft = {};
}
// context path
exsoft.contextRoot = contextRoot;
exsoft.notyDivId = null;
$.extend(exsoft, {
/**
* Global 객체 정의(namespace pattern)
* @param name : namespace 명
*/
namespace : function ( name ) {
var names = name.split(".");
var topClass = exsoft;
var i=0;
for ( i=( names[0]=='exsoft'? 1 : 0); i<names.length; i++ ) {
topClass[ names[i]] = topClass[names[i]] || {}; // typeof short-hand 방식
topClass = topClass[names[i]];
}
},
/**
* prototype만 상속 받을 시 사용함
* @param parent : 상속 받을 namespace명
* @param child : 상속 받은 객체에 추가 정의
*
*/
prototypeExtend : function( parent, child) {
var fn = function() {}
fn.prototype = $.extend( true, {}, parent.prototype, child);
var sub = function(){};
sub.prototype = new fn();
//sub.prototype.constructor = sub;
//sub.superClass = parent.prototype;
//prototype으로 정의되지 않은 상속은 DOM에 매달리지 않으므로 직접 붙여준다.
var instance = new sub();
instance.prototype = sub.prototype;
instance.prototype.superClass = parent.prototype;
return instance;
}
});
exsoft.namespace('user');
exsoft.namespace("document");
exsoft.namespace("process");
exsoft.namespace("common.bind");
exsoft.namespace("common.file");
/***********************************************
* loginUser
**********************************************/
/**
* user 관련 util
*
* @namespace : exsoft.user
*
*/
exsoft.user = {
user_id : null,
user_name : null,
acl_menu_part : null,
manage_group_id : null,
manage_group_nm : null,
user_email : null,
}; // exsoft.util.error end...
/***********************************************
* document
**********************************************/
/**
* 문서 common
* namespace로 관리
*/
exsoft.document = {
//binder : new DataBinder("#documentWrite"),
commDocBinder : null,
wFileUploadJsonArr : new Array(),
actionType : null,
init : {
//1 문서 기본
docDetailInit : function(docId){
exsoft.document.prototype.gDocId = docId;
//exsoft.util.common.formClear("frm_docListLayer_detail_search");
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback({doc_id:docId}, exsoft.contextRoot+"/document/documentDetail.do", "select",
function(data,e) {
exsoft.document.event.printPageList(data);//첨부파일
exsoft.document.event.printDocumentVO(data);//기본정보
// 권한 Setting
$("#docDetailAclName").html(data.aclDetail.acl_name); //권한
exsoft.util.table.tableDocumentAclItemPrintList('detail_docAclItemList', data.aclItemList);
exsoft.util.table.tableDocumentAclItemPrintList('detail_docExAclItemList', data.aclExItemList);
exsoft.document.event.printRefDoc(data);//관련문서
//의견 총수
$(".opinion_cnt").text(data.commentCnt);
});
},
//1 문서 기본
docWriteInit : function(){
var folder_id = $("#documentWrite input[name=folder_id]").val();
var folder_path = $("#documentWrite input[name=folder_path]").val();
//메인에서 들어올 경우
if(folder_id == null || folder_id == ""){
exsoft.document.commDocBinder = new DataBinder("#documentWrite");
exsoft.document.commDocBinder.set("actionType", Constant.ACTION_CREATE);
exsoft.document.actionType = 'C';
exsoft.document.init.initDdslick();
$("#documentWrite").validation.clearShadowBox();
// 1.폴더에 문서등록 권한이 있는지 체크한다.
//exsoft.user.acl_menu_part = "${acl_menu_part}";
//alert(exsoft.user.acl_menu_part);
// 파일 관련
exsoft.common.file.init.initSettings('documentfileuploader', exsoft.document.callback.fileupload);
exsoft.document.commDocBinder.set("map_id","MYDEPT");
}else{
var jsonObject = { "folder_id":folder_id,"folder_path":folder_path};
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback(jsonObject,exsoft.contextRoot+"/document/docCommonRegist.do", "docRegistForm",
function(data,e) {
if(data.result == "true") {
// 2.문서등록 폼 초기화 처리
//docRegistFrm();
// 3. 공통처리 - 기본문서유형선택 및 map_id
// 기본폴더ID/MAP_ID :: 폴더선택 화면에서 변경시 폴더ID는 변경됨
exsoft.document.commDocBinder.set("folder_id",folder_id);
exsoft.document.commDocBinder.set("folder_path",folder_path);
exsoft.document.commDocBinder.set("map_id",data.folderVO.map_id);
// 4. 문서유형 선택유무 제어
wDocType = data.defaultType; // 폴더선택화면으로 넘겨줄 기본문서유형
exsoft.document.commDocBinder.set("doc_type",data.defaultType);
if(data.isChangeType == "FALSE") {
$("#documentWrite select[name=doc_type]").prop("disabled",true);
exsoft.document.commDocBinder.set("isChangeType","FALSE");
}else {
$("#documentWrite select[name=doc_type]").prop("disabled",false);
exsoft.document.commDocBinder.set("isChangeType","TRUE");
}
exsoft.document.commDocBinder.set("folderIsType",data.is_type);
// 개인함의 경우 HIDDEN처리
// sercurityView - 보안등급/조회등급 || aclLockView - 권한변경유무 ||
// keywordView - 키워드 || multiFolderView - 다차원분류 || workSpaceView - 권한/관련문서
//var mypageHide = new Array('#sercurityView', '#aclLockView', '#isShareView', '#keywordView', '#multiFolderView','#workSpaceView','#authorView');
if(data.folderVO.map_id == "MYPAGE") {
// 보존년한 영구 기본선택
$("#documentWrit select[name=preservation_year]").val(0);
// 확장문서유형
exsoft.documenet.event.setExtendTypeAttrItem(data.defaultType);
}else {
// 권한 :: 권한속성 아이템
$("#wAclName").html(data.aclDetail.acl_name);
//alert(exsoft.document.commDocBinder.set("acl_id",data.aclDetail.acl_name));
exsoft.util.table.tableDocumentAclItemPrintList('docmentWrite_acl',data.aclItemList);
exsoft.util.table.tableDocumentAclItemPrintList('docmentWrite_extAcl',data.aclExItemList);
// 확장문서유형
exsoft.documenet.event.setExtendTypeAttrItem(data.defaultType);
// 보존년한 5년 기본선택
$("#documentWrite select[name=preservation_year]").val(5);
}
// upload 상태값 초기화
exsoft.common.file.prototype.wUploadObj.fCounter = 0;
exsoft.common.file.prototype.wUploadObj.sCounter = 0;
exsoft.common.file.prototype.wUploadObj.tCounter = 0;
exsoft.common.file.prototype.wUploadObj.upCounter = 0;
}else {
jAlert(data.message);
}
});
}
},
docUpdateInit : function(){
exsoft.document.commDocBinder = new DataBinder("#documentUpdate");
exsoft.document.commDocBinder.set("actionType", Constant.ACTION_CREATE);
exsoft.document.actionType = 'C';
// 문서유형 select-box
//exsoft.document.init.initDdslick();
$("#documentUpdate").validation.clearShadowBox();
// 파일 관련
exsoft.common.file.init.initSettings('documentupdatefileuploader', exsoft.document.callback.fileupload);
},
initDdslick : function(type){
//검색 selectbox
exsoft.util.common.ddslick('#register_docType', 'srch_type1', 'doc_type', 85, function(divId, selectedData){
exsoft.document.commDocBinder.set("doc_type", selectedData.selectedData.value);
//문서유형에 맞는 확장 속성을 표시 한다.
// 1.폴더에 문서등록 권한이 있는지 체크한다.
exsoft.document.event.setExtendTypeAttrItem(selectedData.selectedData.value);
});
// 보존년한 selectbox
exsoft.util.common.ddslick('#register_preservationyear', 'srch_type1', 'preservation_year', 58, function(divId, selectedData){
exsoft.document.commDocBinder.set("preservation_year", selectedData.selectedData.value);
});
// 조회등급 selectbox
exsoft.util.common.ddslick('#register_accessgrade', 'srch_type1', 'access_grade', 58, function(divId, selectedData){
exsoft.document.commDocBinder.set("access_grade", selectedData.selectedData.value);
});
},
},
open : {
// 메일 수신자 선택
userSelectUrl : "/document/reciverUserSelect.do",
userSelectTarget : "mailReciverUserFrm",
// 쪽지보내기 사용자선택
docVersionDetailUrl : "/document/docVersionDetail.do",
docVersionDetailTarget : "docVersionDetailFrm",
// 메일수신자 선택 - 창열기
reciverDetail : function() {
this.contextRoot = exsoft.contextRoot;
this.openWindow(this.userSelectTarget,700,630);
this.formUserInit(document.mailReciverUserFrm,this.contextRoot+this.userSelectUrl,this.userSelectTarget);
},
// 사용자 환경설정 새창 CALL
versionDetail : function(docId) {
this.contextRoot = exsoft.contextRoot;
this.openWindow(this.docVersionDetailTarget,680,630);
this.formInit(document.docVersionDetailFrm,this.contextRoot+this.docVersionDetailUrl,docId,this.docVersionDetailTarget);
},
// 새창 띄우기
openWindow : function(targetName,width,height) {
var win= "";
win = window.open("",targetName,"width="+width+", height="+height+", toolbar=no, menubar=no, scrollbars=no, resizable=no" );
win.focus(); // 새창의 경우 항상 맨위로
},
formInit : function(formName,url,docId,targetName) {
var frm = formName;
frm.action = url;
frm.method = "post";
frm.target = targetName;
frm.docId.value = docId;
frm.submit();
},
formUserInit : function(formName,url,targetName) {
var frm = formName;
frm.action = url;
frm.method = "post";
frm.target = targetName;
frm.submit();
},
},
layer : {
// 문서상세조회 Layer OPEN
docCommonFrm : function(wrapperClass,layerClass,docId) {
exsoft.util.layout.divLayerOpen(wrapperClass,layerClass);
exsoft.document.init.docDetailInit(docId);
},
// 문서등록 Layer OPEN
docWriteCommonFrm : function(wrapperClass,layerClass) {
exsoft.util.layout.divLayerOpen(wrapperClass,layerClass);
exsoft.document.init.docWriteInit();
},
// 문서수정 Layer OPEN
docUpdateCommonFrm : function(wrapperClass,layerClass) {
exsoft.util.layout.divLayerOpen(wrapperClass,layerClass);
exsoft.document.init.docUpdateInit();
},
},
event : {
// 폴더찾기 호출
selectFolderFind : function() {
selectSingleFolderWindow.init(exsoft.document.callback.folderFind);
},
// 관련문서 창 호출
selectRelDocWindow : function() {
selectrelativeDocWindow.init(exsoft.document.callback.relDocWindow);
},
// 문서 권한 변경
changeDocumentAcl : function() {
selectAclWindow.initDocument(""/*AclId*/, Constant.ACL.TYPE_DOC, ""/*docId*/,exsoft.document.callback.selectAcl);
},
// 문서 유형에 따른 확장속성 표시 click.ddslick
setExtendTypeAttrItem : function(selectValue){
var jsonObject = {"type_id":selectValue,"is_extended":"T"};
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback(jsonObject,exsoft.contextRoot+'/type/attrList.do', '#documentWrite_docAttrView', function(data, param){
exsoft.util.table.tableExtendTypeItemPrintList('documentWrite_docAttrView', data.list, exsoft.document.actionType);
if(data.records != 0){
$(param).removeClass('hide');
exsoft.document.commDocBinder.set("is_extended", 'T');
// table에 select box가 존재하면 ddslick을 해준다.
var $extendType = $('#processWrite_docAttrView tbody').find('input, select');
$($extendType).each(function(idx){
var name = $(this).attr('name');
if($(this).is('select')){
$(this).attr('id', name);
$(this).attr('data-select', 'true');
exsoft.util.common.ddslick(name,'srch_type1',name,80, function(divId, selectValue){
exsoft.document.commDocBinder.set(name, selectValue);
});
}else{
$(this).attr('data-bind', name);
}
});
exsoft.document.commDocBinder.bindingElement(); // data-bind 전체 bind
}else{
$(param).addClass('hide');
exsoft.document.commDocBinder.set("is_extended", 'F');
}
}); // 확장속성 set
},
// 다차원분류 선택
registDocSelectMultiFolderFind : function() {
var doc_type = exsoft.document.commDocBinder.get("folderIsType");
var map_id = exsoft.document.commDocBinder.get("map_id");
//var doc_type = document.documentWrite.folderIsType.value;
//var map_id = document.documentWrite.map_id.value;
selectMultiFolderWindow.init(exsoft.document.event.registDocMultiFolderFind, map_id, "WORK_MYDEPT", true, doc_type);
},
// 다차원 분류 선택
registDocMultiFolderFind : function(obj) {
//$("#multiFolder").empty(); // 다차원분류 선택 폴더 초기화
exsoft.document.event.multiFolderAdd(obj,'documentWrite',document.documentWrite.folder_id.value,'multiFolder');
},
/**
* 문서등록/수정 다차원 분류 추가
* @param folderArr - 다차원분류 선택 폴더 리스트 Array
* @param divIds - 다차원분류 ID
* @param defaultFolderValue - 기본폴더ID
* @param formId - 폼ID
*/
multiFolderAdd : function(obj,formId,defaultFolderValue,divIds) {
var buffer = "";
buffer += "<ul>";
$.each(obj, function(index, result) {
// 기본폴더가 아니고 이미 추가된 다차원 분류가 아닌 경우에만 입력처리한다.
if(!exsoft.document.event.chkMultiFolderList(formId,'multi_folder',this.id) &&
this.id != defaultFolderValue ) {
var divNames = exsoft.util.common.uniqueId();
buffer += "<li>";
buffer += "<input type='hidden' class='' name='multi_folder' value='"+this.id+"'>";
buffer += "<input type='text' name='multi_folder_path' readonly value='"+this.fullPath.join("/")+"'>";
//buffer += "<span class='x_button' onclick=\"javascript:base.removeDivIds('"+divNames+"');\"></span><br />";
buffer += "</li>";
}
});
buffer += "</ul>";
$(".doc_classification_list").removeClass("hide");
$('#'+divIds).append(buffer);
},
/**
* 문서 등록/수정 다차원 분류 중복 체크
* @param formId
* @param inputName
* @param value
* @returns {Boolean}
*/
chkMultiFolderList : function(formId,inputName,value){
var ret = false;
$("#"+formId + " "+ " input[name='"+inputName+"']").each(function() {
if(this.value == value) {
ret = true;
return false; // break
}
});
return ret;
},
//1-1. 첨부파일 출력
printPageList : function(data) {
//데이터 초기화
$( "#detail_pageList").empty();
var pageList = data.pageList;
//var tableId = "#detail_pageList";
var buffer = "";
exsoft.document.prototype.gPageList = data.pageList; // URL 복사
//base.tableRemoveAll(tableId); // 기존 목록을 삭제
//base.showHideInit('vAttachShowHide','vAttachView'); // 첨부파일 기본 숨김
$(".attach_cnt").html(pageList.length+"개"); // 첨부 파일 갯수를 표시함
//$("#attach_cnt").html("({0})".format(pageList.length));
buffer += "<ul>";
$(pageList).each(function(index) { // 첨부 파일 갯수만큼 루프
//화장자별 아이콘 변경
var imgext = exsoft.document.ui.imgExtension(pageList[index].page_name);
buffer += "<li class='attach_docs_list'>";
//buffer += "<input type='checkbox' name='downChk' value='{0}'></input>'.format("+pageList[index].page_id+")"; // 번호
buffer += "<a href='#'><img src='"+exsoft.contextRoot+"/img/icon/"+imgext+"' alt='' title=''>"+pageList[index].page_name+"</a>";
buffer += "<div class='download_detail'>";
buffer += "<span class='download_filesize'>"+pageList[index].fsize+"</span>";
buffer += "<a href='#' class='download'>";
buffer += "<img src='"+exsoft.contextRoot+"/img/icon/attach_download1.png' alt='' title=''>";
buffer += "</a></div>";
buffer += "</li>";
});
buffer += "</ul>";
$( "#detail_pageList").append(buffer);
},
//조회수(xr_document.read_cnt)update
updateReadCount : function(readcnt) {
var cnt = parseInt(readcnt) + 1;
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback({doc_id:exsoft.document.prototype.gDocId,readcnt:cnt}, exsoft.contextRoot+"/document/doReadCountUpdate.do", "comment",
function(data, e) {
if(data.result == "true") {
}else {
jAlert(data.message, "확인", 0);
}
});
},
//1-2 문서 기본정보 출력
printDocumentVO : function(data) {
//데이터 초기화
$('#docDetailBasicInfo').empty();
var docVO = data.documentVO;
var buffer = "";
// 다차원분류 초기화
$("#multiFolder").empty();
// 개인문서함 > 다차원분류 tr hide
/*var mypageHide = new Array('#detail_multiLink_view');
if(docVO.map_id == "MYPAGE"){
base.mypageHideShow(mypageHide,false);
} else {
base.mypageHideShow(mypageHide,true);
}
*/
//조회수 갱신
exsoft.document.event.updateReadCount(docVO.read_cnt);
//첨부파일 총량
$(".attach_size").html("("+exsoft.util.common.bytesToSize(docVO.page_total,1)+")");
// 전역변수 설정
exsoft.document.prototype.getDocVO = docVO;
exsoft.document.prototype.gRootId = docVO.root_id == "" ? docVO.doc_id : docVO.root_id;
exsoft.document.prototype.gAclLevel = docVO.acl_level == "" ? "NONE" : docVO.acl_level;
exsoft.document.prototype.gAcl_checkoutCancel = docVO.acl_checkoutCancel;
exsoft.document.prototype.gFolderPath = data.folderPath; // URL 복사
buffer += "<tr><th>문서명</th><td colspan='3'>"+docVO.doc_name+"</td></tr>";
buffer += "<tr><th>기본폴더</th><td colspan='3'>"+data.folderPath+"</td></tr>";
buffer += "<tr><th>문서유형</th><td>"+docVO.type_name+"</td>";
buffer += "<th>보존연한</th><td>"+docVO.preservation_year == "0" ? "영구" : docVO.preservation_year + "년</td></tr>";
buffer += "<tr><th>보안등급</th><td>"+exsoft.util.common.findCodeName(data.securityList,docVO.security_level)+"</td>";
buffer += "<th>조회등급</th><td>"+exsoft.util.common.findCodeName(data.positionList,docVO.access_grade)+ "</td></tr>";
//buffer += "<tr><th>보안등급</th><td>"+docVO.security_level+"</td>";
//buffer += "<th>조회등급</th><td>"+docVO.access_grade+ "</td></tr>";
buffer += "<tr><th>등록자(소유자)</th><td>"+docVO.creator_name + " [" + docVO.owner_name + "]</td>";
buffer += "<th>등록일</th><td>"+docVO.create_date+ "</td></tr>";
buffer += "<tr><th>수정자</th><td>"+docVO.updater_name + "</td>";
buffer += "<th>수정일</th><td>"+docVO.update_date+ "</td></tr>";
if(docVO.map_id != "MYPAGE") {
if(data.multiFolderList.length > 0){
buffer += "<tr><th>다차원 분류</th><td colspan='3'>"+docVO.doc_name+"</td></tr>";
for(var m=0; m < data.multiFolderList.length; m++) {
//var divNames = exsoft.util.common.uniqueId();
buffer += data.multiFolderList[m].folder_path;
}
buffer += "</td></tr>";
//$('#detail_multiLink').append(buffer);
}
}
buffer += "<tr><th>키워드</th><td colspan='3'>"+docVO.keyword+"</td></tr>";
//$("#detail_keyword").html(docVO.keyword);
// DaumEditor View mode
buffer += "<tr><th>설명</th><td colspan='3'>"+docVO.doc_description.replace('<','<').replace('& lt;','<').replace('>', '>').replace('& gt;', '>')+"</td></tr>";
$('#docDetailBasicInfo').append(buffer);
//$("#vtemp_content").html(docVO.doc_description.replace('<','<').replace('& lt;','<').replace('>', '>').replace('& gt;', '>'));
//$('#vIframe_editor').attr("src","${contextRoot}/editor_7.4.5/doc_view.jsp");
$("#detail_doc_version").html("Ver " + docVO.version_no);
if (docVO.is_locked != "T") {
$("#btn_detail_checkout, #detail_docLockIcon").hide();
} else {
$("#btn_detail_checkout, #detail_docLockIcon").show();
$("#lockerInfo").text("반출자 :" + docVO.lock_owner_name);
$("#lockerDate").text("반출일시 :" + docVO.lock_date );
}
},
//1-3. 관련 문서 출력
printRefDoc : function(data) {
//데이터 초기화
$('#docDetailRefInfo').empty();
var refDocList = data.refDocumentList;
//alert(refDocList.size());
var buffer = "";
// 데이터가 없을 경우 관련문서를 표시하지 않는다
if(refDocList.length == 0 ){
$(".doc_detail_relative").addClass("hide");
}else{
buffer += "<ul>";
//테스트 해보기
$(refDocList).each(function(index) { // 관련 문서 갯수만큼 루프
//$(tableId + " tr:last td:eq(0)").html("<a href=\"javascript:initDocumentViewWindow('{0}');\">{1}</a>".format(this.doc_id, this.doc_name)); // 제목
//$(tableId + " tr:last td:eq(1)").html(this.creator_name); // 등록자
//$(tableId + " tr:last td:eq(2)").html(this.create_date); // 등록일
buffer += "<li class='relative_docs_list'>";
buffer += "<input type='checkbox' name='' class='relative_docs_checkbox' value=''>";
//화장자별 아이콘 변경
var imgext = exsoft.document.ui.imgExtension(this.doc_name);
buffer += "<a href='#'><img src=" + exsoft.contextRoot +"/img/icon/"+imgext+"' alt='' title=''></a>";
buffer += "<div class='download_detail'>";
buffer += "<span class='download_filesize'></span>";
buffer += "<a href='#' class='download'>";
buffer += "<img src=" + exsoft.contextRoot +"/img/icon/attach_download1.png' alt='' title=''></a>";
buffer += "<a href='#' class='contextMenu'><img src=" + exsoft.contextRoot +"/img/icon/attach_context.png' alt='' title=''></a>";
buffer += "<div class='relative_download_context'>";
buffer += "<ul>";
buffer += "<li><a href='#' class='modify_file'><span class='left'></span><span class='right'>파일수정</span></a></li>";
buffer += "<li><a href='#' class='save_modified'><span class='left'></span><span class='right'>수정파일저장</span></a></li>";
buffer += "<li><a href='#' class='open_modified'><span class='left'></span><span class='right'>수정파일열기</span></a></li>";
buffer += "<li><a href='#' class='cancel_modify'><span class='left'></span><span class='right'>수정취소</span></a></li>";
buffer += "</ul>";
buffer += "</div>";
buffer += "</div>";
buffer += "</li>";
});
buffer += "</ul>";
$('#docRelativeTotal').html(refDocList.length);
$('#docDetailRefInfo').append(buffer);
}
},
// 2. 문서의 모든 버전을 가져오고 화면에 표시한다
getDocumentVersionList : function() {
// 파라미터 추가
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback({doc_id:exsoft.document.prototype.gDocId,table_nm:"XR_DOCUMENT"}, exsoft.contextRoot+"/document/documentVersionList.do", "version",
function(data, e) {
exsoft.document.event.printVersionList(data);
});
},
//2-1. 버전 목록 출력
printVersionList : function(data) {
//데이터 초기화
$("#detaildocVersionList").empty();
$("#docVerAttach").empty();
var versionList = data.docAllVersionList;
var buffer = "";
$(versionList).each(function(index) {
if (versionList[index].is_current == "T") {
buffer += "<tr class='current'>";
}else{
buffer += "<tr>";
}
//buffer += "<input type='hidden' id='docVersionid' value='"+versionList[index].doc_id+"'>";
buffer += "<td>"+versionList[index].version_no+"버전</td>";// 버전
buffer += "<td>"+versionList[index].doc_name+"</td>";// 문서명
buffer += "<td>"+versionList[index].creator_name+"</td>";// 등록자
buffer += "<td>"+versionList[index].create_date+"</td>";// 등록일자
buffer += "</tr>";
buffer += "<tr><td colspan='4'>";
buffer += "<div class='doc_ver_attach'>";
buffer += "<div class='doc_ver_title'>";
buffer += "<span>";
buffer += "<strong>버전파일</strong> : <span class='ver_file_cnt'>"+versionList[index].pageList.length+"</span>";
buffer += "</span>";
buffer += "<div class='ver_btn_wrapper'>";
buffer += "<button class='' onClick=\"javascript:exsoft.document.event.versionDelete('" +versionList[index].doc_id + "')\">버전삭제</button>";
buffer += "<button class='' onClick=\"javascript:exsoft.document.open.versionDetail('" +versionList[index].doc_id + "')\" >상세조회</button>";
buffer += "</div></div>";
buffer += "<div class='doc_ver_wrapper' id='docVerAttach'>";
buffer += "<ul>";
// 첨부파일이 있을경우
if (versionList[index].pageList != undefined && versionList[index].pageList.length > 0) {
$(versionList[index].pageList).each(function(i){
buffer += "<li class='doc_ver_list'>";
buffer += "<a href='#'>";
var imgext = exsoft.document.ui.imgExtension(versionList[index].pageList[i].page_name);
buffer += "<img src='"+exsoft.contextRoot+"/img/icon/"+imgext+"' alt='' title=''>"+versionList[index].pageList[i].page_name+"</a>";
buffer += "<div class='download_detail'>";
buffer += "<span class='download_filesize'>"+exsoft.util.common.bytesToSize(versionList[index].pageList[i].page_size,1)+"</span>";
buffer += "<a href='#' class='download'>";
buffer += "<img src='"+exsoft.contextRoot+"/img/icon/attach_download1.png' alt='' title=''></a>";
buffer += "</div></li>";
});
buffer +="</ul></div></div></td></tr>";
//$("#docVerAttach").append(str);
}
});
$("#detaildocVersionList").append(buffer);
//var imgext = exsoftLayoutFunc.ui.imgExtension(pageList[index].page_name);
},
// 2-2. 특정 버전을 삭제한다
versionDelete : function(docId) {
if (docId == exsoft.document.prototype.gRootId) {
jAlert("기본 문서 버전은 삭제할 수 없습니다.");
return;
}
jConfirm('선택한 버전을 삭제하시겠습니까?', 'Confirm', function(r) {
if (r)
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback({doc_id:docId}, exsoft.contextRoot+"/document/deleteVersion.do", "deleteVersion",
function(data, e) {
exsoft.document.init.docDetailInit(docId);
});
});
},
//3. 문서의 이력정보를 가져오고 화면에 표시한다
getDocumentHistoryList : function() {
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback({doc_id:exsoft.document.prototype.gDocId}, exsoft.contextRoot+"/document/docHistoryList.do", "version",
function(data, e) {
exsoft.document.event.historyGridList(data);
});
},
//3-1 문서 이력정보 표시
historyGridList : function() {
//docHistoryList
$('#detaildocHistoryList').jqGrid({
url: exsoft.contextRoot+'/document/docHistoryList.do',
mtype:"post",
datatype:'json',
jsonReader:{
root:'list'
},
colNames:['action_date','action_name','actor_nm','version_no','etc'],
colModel:[
{name:'action_date',index:'action_date',width:110, editable:false,sortable:true,resizable:true,align:'center'},
{name:'action_name',index:'action_id',width:80, editable:false,sortable:true,resizable:true,align:'center'},
{name:'actor_nm',index:'actor_nm',width:70, editable:false,sortable:true,resizable:true,align:'center'},
{name:'version_no',index:'version_no',width:50, editable:false,sortable:true,resizable:true,align:'center'},
{name:'etc',index:'etc',width:150, editable:false,sortable:false,resizable:true,align:'center',
formatter : function (cellValue, option, rowObject) {
var noteStr = "";
if (rowObject.action_id == "MOVE") {
noteStr = "[{0}]폴더에서 [{1}]폴더로 이동".format(rowObject.before_nm, rowObject.after_nm);
} else if (rowObject.action_id == "CHANGE_CREATOR") {
noteStr = "[{0}]에서 [{1}]로 소유권 이전".format(rowObject.before_nm, rowObject.after_nm);
}
return noteStr;
}
},
],
autowidth:true,
height:"auto",
viewrecords: true,multiselect:false,sortable: true,shrinkToFit:true,gridview: true,
sortname : "action_date",
sortorder:"desc",
//scroll: true,
rowNum : 20,
rowList : exsoft.util.grid.listArraySize(),
emptyDataText: "조회된 결과가 없습니다.",
// pager:'#historyGridPager',
caption:'문서이력',
rownumbers:true,
rownumWidth:40,
scroll : true, // Virtual Scoll 활성화
postData : {doc_id:exsoft.document.prototype.gDocId}
,loadBeforeSend: function() {
exsoft.util.grid.gridNoDataMsgInit('detaildocHistoryList');
exsoft.util.grid.gridTitleBarHide('detaildocHistoryList');
}
,loadComplete: function() {
if ($("#detaildocHistoryList").getGridParam("records")==0) {
exsoft.util.grid.gridPagerViewHide('detaildocHistoryList');
exsoft.util.grid.gridNoDataMsg('detaildocHistoryList','no_data');
exsoft.util.grid.gridPagerHide('detaildocHistoryList');
}else {
exsoft.util.grid.gridPagerViewHide('detaildocHistoryList');
exsoft.util.grid.gridPagerShow('detaildocHistoryList');
}
exsoft.util.grid.gridInputInit(false);
//exsoft.util.grid.gridResize('detaildocHistoryList','targetDocHistoryList',55);
}
,loadError:function(xhr, status, error) {
exsoft.util.error.isErrorChk(xhr);
}
});
// 컬럼 헤더 정렬 및 다국어 변경 처리
var headerData = '{"action_date":"일시","action_name":"수행작업","actor_nm":"작업자","version_no":"버전","etc":"비고"}';
exsoft.util.grid.gridColumHeader('detaildocHistoryList',headerData,'center');
},
// 4. 문서의 댓글을 가져오고 화면에 표시한다
getDocumentCommentList : function() {
// 파라미터 추가
//$("#detaildocCommentList").empty();
$(".account_nm").html(exsoft.user.user_name);
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback({root_id:exsoft.document.prototype.gRootId,table_nm:"XR_COMMENT"}, exsoft.contextRoot+"/document/documentCommentList.do", "comment",
function(data, e) {
//exsoft.util.table.tablePrintList('detaildocCommentList', data.list, false, true);
exsoft.document.event.printCommentList(data);
});
},
//4-1. 문서 댓글 목록 출력
printCommentList : function(data) {
$("#detaildocCommentList").empty();
//var historyList = data.docHistoryList;
var buffer="";
// 댓글 갯수만큼 루프
$(data.list).each(function(index) {
//수정
buffer += "<tr id='comTR'>";
buffer += "<input type='hidden' id='com_id' value='"+this.com_id+"'>";
buffer += "<input type='hidden' id='content' value='"+this.content+"'>";
buffer += "<input type='hidden' id='com_step' value='"+this.com_step+"'>";
if(this.com_order != 0) {
buffer += "<td>└ "+this.creator_name+"</td>"; // 이름 + 댓글표시
}else{
buffer += "<td>"+this.creator_name+"</td>"; // 이름
}
buffer += "<td>"+this.parent_creator_name+"</td>"; // 수행작업
buffer += "<td >"+this.content+"</td>"; // 내용
buffer += "<td>"+this.create_date+"</td>"; // 등록일
buffer += "</tr>";
});
$("#detaildocCommentList").append(buffer);
},
//
commentAction : function(kbn){
exsoft.document.prototype.commentKbn = kbn;
var obj = $("#detaildocCommentList").find(".current");
var com_id= obj.find("input[id='com_id']").val();
var content= obj.find("input[id='content']").val();
var comstep= obj.find("input[id='com_step']").val();
if(kbn==Constant.ACTION_UPDATE){//갱신일때
$(".opinion_writeform").removeClass("hide");
$(".opinion_cnt_wrapper").find("textarea").val(content);
//this.docCommentUpdate(com_id,content);
}else if(kbn==Constant.ACTION_REPLY){//댓글일때
exsoft.document.prototype.commentKbn = kbn;
$(".opinion_writeform").removeClass("hide");
$(".opinion_cnt_wrapper").find("textarea").val("");
}else if(kbn==Constant.ACTION_DELETE){//삭제일때
var creator_id = obj.find("input[id='content']").val();
jConfirm("의견을 삭제하시겠습니까?", "확인", 6, function(r){
if(r){
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback({root_id:exsoft.document.prototype.gRootId,com_id:com_id,kbn:kbn,com_step:comstep}, exsoft.contextRoot+"/document/documentCommentUpdate.do", "comment",
function(data, e) {
if(data.result == "true") {
jAlert('의견을 삭제 했습니다.', "확인", 0);
exsoft.document.event.getDocumentCommentList();
}else {
jAlert(data.message, "확인", 0);
}
});
};
});
}
},
//4-2. 문서 댓글
docCommentUpdate : function() {
var obj = $("#detaildocCommentList").find(".current");
var com_id= obj.find("input[id='com_id']").val();
var content= $(".opinion_cnt_wrapper").find("textarea").val();
if(exsoft.document.prototype.commentKbn==null){
//신규 의견 등록
jConfirm("의견을 등록하시겠습니까?", "확인", 6, function(r){
if(r){
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback({root_id:exsoft.document.prototype.gRootId,content:content,kbn:exsoft.document.prototype.commentKbn}, exsoft.contextRoot+"/document/documentCommentUpdate.do", "comment",
function(data, e) {
if(data.result == "true") {
jAlert('의견을 등록 했습니다.', "확인", 0);
exsoft.document.event.getDocumentCommentList();
}else {
jAlert(data.message, "확인", 0);
}
});
};
});
}else if(exsoft.document.prototype.commentKbn== Constant.ACTION_UPDATE || exsoft.document.prototype.commentKbn== Constant.ACTION_REPLY){
//4-2. 문서 댓글 수정/댓글 추가
jConfirm("의견을 등록하시겠습니까?", "확인", 6, function(r){
if(!r){
return false;
}
});
//if(r){
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback({root_id:exsoft.document.prototype.gRootId,com_id:com_id,content:content,kbn:exsoft.document.prototype.commentKbn}, exsoft.contextRoot+"/document/documentCommentUpdate.do", "comment",
function(data, e) {
if(data.result == "true") {
jAlert('의견을 등록 했습니다.', "확인", 0);
exsoft.document.event.getDocumentCommentList();
}else {
jAlert(data.message, "확인", 0);
}
});
//};
}
exsoft.document.prototype.commentKbn = null;
},
//문서 잠금 해제
Detail_DocumentUnLock : function() {
if(exsoft.document.prototype.gAcl_checkoutCancel == 'T') {
var jsonArr = [{
doc_id : exsoft.document.prototype.getDocVO.doc_id
, root_id : exsoft.document.prototype.getDocVO.root_id
, is_locked : exsoft.document.prototype.getDocVO.lock_status
, doc_type : exsoft.document.prototype.getDocVO.doc_type
}];
documentListLayerWindow.event.documentCancelCheckoutSend(jsonArr, 'null');
} else {
jAlert('반출(잠금) 해제 권한이 없습니다');
}
},
//문서 복사
detail_copyDocument : function(){
if (exsoft.util.common.getAclLevel(exsoft.document.prototype.gAclLevel) < exsoft.util.common.getAclLevel("UPDATE")) {
jAlert("문서 복사 권한이 없습니다.");
return false;
}
if(exsoft.document.prototype.getDocVO.lock_status == 'T') {
jAlert("체크아웃한 문서가 존재합니다.\n 체크아웃취소 후 다시 작업하시기 바랍니다.");
return false;
}
var jsonArr = [{
doc_id : exsoft.document.prototype.getDocVO.doc_id
, doc_name : exsoft.document.prototype.getDocVO.doc_name
, is_locked : exsoft.document.prototype.getDocVO.lock_status
, root_id : exsoft.document.prototype.getDocVO.root_id
, doc_type : exsoft.document.prototype.getDocVO.doc_type
, is_inherit_acl : exsoft.document.prototype.getDocVO.is_inherit_acl
, folder_id : exsoft.document.prototype.getDocVO.folder_id
}];
documentListLayerWindow.event.documentDetailCopy("DETAIL", jsonArr);
//selectSingleFolderWindow.Detailinit(documentListLayerWindow.popupFolderCallback);
},
// 문서 이동
detail_moveDocument : function(){
if (exsoft.util.common.getAclLevel(exsoft.document.prototype.gAclLevel) < exsoft.util.common.getAclLevel("UPDATE")) {
jAlert("문서 이동 권한이 없습니다.");
return false;
}
if(exsoft.document.prototype.getDocVO.lock_status == 'T') {
jAlert("체크아웃한 문서가 존재합니다.\n 체크아웃취소 후 다시 작업하시기 바랍니다.");
return false;
}
var jsonArr = [{
doc_id : exsoft.document.prototype.getDocVO.doc_id
, doc_name : exsoft.document.prototype.getDocVO.doc_name
, is_locked : exsoft.document.prototype.getDocVO.lock_status
, root_id : exsoft.document.prototype.getDocVO.root_id
, doc_type : exsoft.document.prototype.getDocVO.doc_type
, is_inherit_acl : exsoft.document.prototype.getDocVO.is_inherit_acl
, folder_id : exsoft.document.prototype.getDocVO.folder_id
}];
documentListLayerWindow.event.documentDetailMove("DETAIL", jsonArr);
//selectSingleFolderWindow.init(documentListLayerWindow.popupFolderCallback, getMapId, getWorkType, true, getDocType);
},
// 문서 즐겨찾기 추가
documentAddFavorite : function(){
var jsonArr = [];
var jsonArrIndex = 0;
var rowData = {doc_id:"", root_id:""};
//jsonObject
rowData['doc_id'] = exsoft.document.prototype.gDocId;
rowData['root_id'] = exsoft.document.prototype.gRootId;
if(rowData.doc_id){
jsonArr[jsonArrIndex] = rowData;
// jsonArrIndex++;
}
if(jsonArr.length > 0) {
documentListLayerWindow.event.documentAddFavoriteSend(rowData);
return;
} else {
jAlert("즐겨찾기 문서를 구성하는 중 오류가 발생했습니다.", "즐겨찾기 추가", 0);
}
},
// 작업카트 추가 - 추가기능/버튼 일때 사용 (multi Selected)
documentTempwork : function(){
var jsonArr = [];
var jsonArrIndex = 0;
var rowData = {doc_id:"", root_id:""};
if(exsoft.document.prototype.getDocVO.lock_status == 'T') {
jAlert("체크아웃한 문서가 존재합니다.\n 체크아웃취소 후 다시 작업하시기 바랍니다.", "작업카트 추가", 0);
return false;
}
//jsonObject
rowData['doc_id'] = exsoft.document.prototype.gDocId;
rowData['root_id'] = exsoft.document.prototype.gRootId;
rowData['is_locked'] = exsoft.document.prototype.getDocVO.lock_status;
if(rowData.doc_id){
jsonArr[jsonArrIndex] = rowData;
jsonArrIndex++;
}
//}
if(jsonArr.length > 0){
documentListLayerWindow.event.documentTempworkSend(jsonArr);
} else {
jAlert("작업카트에 문서를 추가하는 중 오류가 발생했습니다.", "작업카트 추가", 0);
}
},
sendUrlCopy : function() {
//$(".url_email").removeClass("hide");
// 문서첨부파일 처리
var buffer = "";
var params = "";
$("#copy_doc_name").html(exsoft.document.prototype.getDocVO.doc_name + " " + exsoft.document.prototype.getDocVO.version_no) ;
$("#copy_folderPath").html(exsoft.document.prototype.gFolderPath);
$("#copy_creator_name").html(exsoft.document.prototype.getDocVO.creator_name + " [" + exsoft.document.prototype.getDocVO.owner_name + "]");
$("#copy_create_date").html(exsoft.document.prototype.getDocVO.create_date);
// 첨부파일 처리 : copy_file_list
if(exsoft.document.prototype.gPageList.length == 0) {
$("#copy_file_list").html("첨부된 파일이 없습니다.");
}else {
$(exsoft.document.prototype.gPageList).each(function(index) {
if(urlCopyPeriod.setUrlValue == 0) {
params = this.page_id + "#" + "9999-12-31";
}else {
params = this.page_id + "#" + exsoft.util.date.addDate("d",urlCopyPeriod.setUrlValue, exsoft.util.date.todayStr(),"-");
}
buffer += "<a href='" + urlCopyPeriod.gServerUrl + base64Encode(params) + "'>" + this.page_name + "</a><br>";
params = "";
});
$("#copy_file_list").html(buffer);
}
exsoft.util.common.copyToClipboard('copyToUrl');
exsoft.util.layout.divLayerClose('url_copy_wrapper','url_copy');
buffer = null;
params = null;
},
//URL 메일송부
getUrlInfo : function() {
//exsoft.document.prototype.gUrlExpired = "99";
//alert(exsoft.document.prototype.getDocVO.doc_name);
// URL 복사 유효기간 유효성 체크처리
var checkOption = $('input:radio[name="urlDate"]:checked').val();
if(checkOption == "limit") {
if($("#urlExpired").val().length == 0 || $("#urlExpired").val() == 0 ) {
jAlert("조회기간을 입력하세요.(0이상)");
return false;
}else if($("#urlExpired").val() > exsoft.document.prototype.gUrlExpired) {
jAlert("조회기간은 시스템 유효기간 이내에서 입력가능합니다.("+exsoft.document.prototype.gUrlExpired+"일이내)");
return false;
}
exsoft.document.prototype.setUrlValue = $("#urlExpired").val();
}
//alert(exsoft.document.prototype.copy_type);
if(exsoft.document.prototype.copy_type == "URL_SEND") {
urlCopyPeriod.close();
registMail.event.sendOperation();
} else {
}
},
sendOperation : function() {
// 문서첨부파일 처리
var buffer = "";
var params = "";
$("#copy_doc_name").html(exsoft.document.prototype.getDocVO.doc_name + " " + exsoft.document.prototype.getDocVO.version_no) ;
$("#copy_folderPath").html(exsoft.document.prototype.gFolderPath);
$("#copy_creator_name").html(exsoft.document.prototype.getDocVO.creator_name + " [" + exsoft.document.prototype.getDocVO.owner_name + "]");
$("#copy_create_date").html(exsoft.document.prototype.getDocVO.create_date);
$(exsoft.document.prototype.gPageList).each(function(index) {
if(exsoft.document.prototype.setUrlValue == 0) {
params = this.page_id + "#" + "9999-12-31";
}else {
params = this.page_id + "#" + exsoft.util.date.addDate("d",exsoft.document.prototype.setUrlValue, exsoft.util.date.todayStr(),"-");
}
buffer += "<br><a href='" + exsoft.document.prototype.gServerUrl + base64Encode(params) + "'>" + this.page_name + "</a><br>";
params = "";
});
$("#copy_file_list").html(buffer);
var postData = {
subject : "문서 발송",//$("#email_subject").val(),
receiver_address : $(".email_receiver").val(),
//cc_address : $("#cc_email").val(),
//hcc_address : $("#hcc_email").val(),
messageText : $(".url_paste_cnts").html()
};
exsoft.util.ajax.ajaxDataFunctionWithCallback(postData, exsoft.contextRoot+'/common/sendURLMail.do', "sendURLMail", function(data, param) {
if (data.result == "success") {
registMail.close.close();
jAlert("메일 발송 완료", "URL메일송부", 0);
} else {
jAlert(data.message, "URL메일송부", 0);
return;
}
});
},
// URL메일 송부
docDetailsendUrlEmail : function() {
var jsonObject = { "type":"INFO"};
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback(jsonObject, exsoft.contextRoot+'/document/copyUrlLink.do', 'urlInfo',
function(data, e){
if(data.result == 'true'){
if(data.expired == 0) {
$("#urlDate1").prop("disabled",false);
}else {
$("#urlDate1").prop("disabled",true);
}
$("#urlExpired").val(data.expired);
// URL 복사 전역변수
exsoft.document.prototype.gUrlExpired = data.expired;
exsoft.document.prototype.gServerUrl =data.urlInfo;
} else {
jAlert(data.message);
}
}
);
$(".url_email").removeClass("hide");
},
// 폴더 기본 권한 set
setAclItem : function(acl_id){
exsoft.util.ajax.ajaxDataFunctionWithCallback({"acl_id" : acl_id}, exsoft.contextRoot+"/permission/aclItemList.do", "", function(data, acl_id) {
// 기본 접근자세팅
exsoft.util.table.tableDocumentAclItemPrintList('#docmentWrite_acl', data.list);
});
},
// exRep ECM에 파일 등록을 성공하였을 경우 후 처리
setUploadFile : function(data){
// 파일 업로드 성공 처리 :: 성공한 파일개수 증가 및 성공 값 array 담기
exsoft.document.wFileUploadJsonArr.push({orgFile:data.orgFile,contentPath:data.contentPath,fileSize:data.fileSize,volumeId:data.volumeId});
exsoft.common.file.prototype.wUploadObj.upCounter += 1;
},
// 등록 취소 시 기존에 등록한 파일을 삭제 한다.
deleteUploadFile : function(){
//console.log('[stephan][exsoft.document.wFileUploadJsonArr.length] : '+exsoftProcessWrite.wFileUploadJsonArr.length);
if(exsoft.document.wFileUploadJsonArr.length >0){
var jsonObject = {"fileList":JSON.stringify(exsoft.document.wFileUploadJsonArr)};
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback(jsonObject, exsoft.contextRoot+"/common/fileDelete.do", null, function(){});
}
},
//문서등록 처리
documentSubmit : function(){
if ($("#documentWrite").validation()) {
// jsonMultiFolders :: 다차원 분류 리스트 : multi_folder
//var jsonMultiFolderArr = exsoft.document.event.getMultiFolderList('documentWrite','multi_folder');
//objForm.jsonMultiFolders.value = JSON.stringify(jsonMultiFolderArr);
/**********************************************************************
// fileCounter :: 업로드 영역에 있는 파일 수
// 1 : 첨부파일 없는 문서등록 처리
// 2이상 : 첨부파일 업로드 후 문서등록 처리
// upCounter :: 대상 파일 중 업로드 성공한 파일 수
**********************************************************************/
// page_cnt :: 첨부파일수
var objForm = document.documentWrite;
objForm.page_cnt.value = exsoft.common.file.prototype.wUploadObj.fileCounter - 1;
if(exsoft.common.file.prototype.wUploadObj.fileCounter == 1 ||
(exsoft.common.file.prototype.wUploadObj.fileCounter -1) == exsoft.common.file.prototype.wUploadObj.upCounter) {
exsoft.document.commDocBinder.set("isType","insert"); //업무구분
exsoft.document.commDocBinder.set("version_type","NEW"); //버전타입
exsoft.document.commDocBinder.set("doc_name",document.documentWrite.doc_name.value); //버전타입
var jsonObject = exsoft.document.commDocBinder.getDataToJson();
jsonObject.fileList = JSON.stringify(exsoft.document.wFileUploadJsonArr);
jsonObject.page_cnt = exsoft.common.file.prototype.wUploadObj.fileCounter - 1;
jsonObject.security_level = $("#documentWrite input[name=security_level]:radio:checked").val();
jsonObject.acl_id = $("#documentWrite input[name=acl_id]").val();
$("#documentWrite select[name=doc_type]").prop("disabled",false);
// 등록처리
exsoft.util.ajax.ajaxDataFunctionWithCallback(jsonObject ,exsoft.contextRoot + '/document/docSubmit.do',null,function(data, param){
if(data.result == "true") {
exsoft.document.close.layerClose(false);
// 등록창 닫기
}else {
jAlert(data.message);
}
});
}else {
// 대상 파일을 업로드 한다.
$("#loading_message").show();
exsoft.common.file.prototype.wUploadObj.startUpload();
}
} else {
jAlert("validation 실패");
}
/*
// page_cnt :: 첨부파일수
var objForm = document.documentWrite;
objForm.page_cnt.value = wUploadObj.fileCounter - 1;
if(wUploadObj.fileCounter > 1) {
objForm.fileList.value = JSON.stringify(wJsonArr);
}
$("#documentWrite select[name=doc_type]").prop("disabled",false);
// 등록처리
exsoft.util.ajax.ajaxFunctionWithCallback("documentWrite",exsoft.contextRoot+"/document/docSubmit.do", "docCommonRegist",
function(data,e) {
if(data.result == "true") {
exsoft.document.close.layerClose(false);
// 등록창 닫기
}else {
jAlert(data.message);
}
});*/
}
},//event END
//3. 닫기 + hide
close : {
layerClose : function(isFileDelete){
if(true){
exsoft.document.event.deleteUploadFile();
}
exsoft.common.file.prototype.wUploadObj.cancelAll();
exsoft.util.layout.divLayerClose('doc_register_wrapper','doc_register');
},
},
ui : {
//문서 상세보기 선택탭 영역에 따른 액션 분기
docDetailSelectAction : function(index) {
if(index==0){
exsoft.document.init.docDetailInit(exsoft.document.prototype.gDocId);
//document.docDetailInit("DOC000000033762");
}else if(index==1){
exsoft.document.event.getDocumentVersionList();
}else if(index==2){
exsoft.document.event.getDocumentHistoryList();
}else if(index==3){
exsoft.document.event.getDocumentCommentList();
}
},
imgExtension : function(page_name) {
var ext = page_name.lastIndexOf(".");
var extnm = page_name.substring(ext+1);
if(extnm=="xls" || extnm=="xlsx"){
imgext = "xls.png";
}else if(extnm=="ppt"|| extnm=="pptx" ){
imgext = "ppt.png";
}else if(extnm=="hwp"|| extnm=="hwp" ){
imgext = "hwp.png";
}else if(extnm=="doc"|| extnm=="docx" ){
imgext = "doc.png";
}
return imgext;
},
},
callback : {
// 폴더 선택 콜백
selectAcl : function(aclInfo) {
console.log("selectAclCallback");
console.log(aclInfo);
/*
* aclInfo 상세
* .aclDetail [object]
* .aclId [var]
* .aclItems [list]
* .exAclItems [list]
*
* 콘솔로그 확인 후 필요한거 쓰시면 됩니다.
*/
},
relDocWindow : function(returnObjects){
var documentList = new Array();
var selectCnt = returnObjects.length;
/*// 1. 선택 문서 중복제거
$(returnObjects).each(function(index) {
var returnDoc = this;
var isExists = false;
// 기존의 목록에 있는지 체크함
$("#uRefDocTable input[type=checkbox]").each(function(index) {
if (this.value == returnDoc.doc_id) {
isExists = true;
}
});
// 중복이 아닐경우 추가할 목록에 구성함
if (!isExists) {
documentList.push(returnDoc);
}
});
// 2. 한도 초과 확인
if (wRefDoc + documentList.length > wDefaultRefDocCnt) {
jAlert('관련문서는 최대 {0}개까지 가능합니다.'.format(wDefaultRefDocCnt));
return false;
}
// 3. 문서등록 초기상태인 경우나 문서등록 취소 후 다시 창을 띄운 경우
if(wRefDoc == 0) {
$('#uRefDocTable tr:gt(0)').remove();
}
wRefDoc += documentList.length;
// 4. 목록에 선택한 문서를 추가한다
var buffer = "";
// D.DOC_ID, D.DOC_NAME, D.CREATOR_NAME, D.CREATE_DATE
$(documentList).each(function(i) {
buffer += "<tr id='{0}'><td><input type='checkbox' name='uRefDocIdx' id='uRefDocIdx' value='{0}'/></td>".format(this.root_id == "" ? this.doc_id : this.root_id);
buffer += "<td><a href=\"javascript:initDocumentViewWindow('{0}');\">{1}</td>".format(this.doc_id, this.doc_name);
buffer += "<td>{0}</td>".format(this.creator_name);
buffer += "<td>{0}</td>".format(this.create_date);
buffer += "</tr>";
});
$('#uRefDocTable').append(buffer); */
},
// 기본폴더 set
folderFind : function(nodeInfo) {
//console.info(nodeInfo);
exsoft.document.commDocBinder.set("folder_path", nodeInfo.full_path.join("/"));
exsoft.document.commDocBinder.set("folder_id", nodeInfo.id);
exsoft.document.commDocBinder.set("map_id", nodeInfo.map_id);
exsoft.document.commDocBinder.set("acl_id", nodeInfo.original.acl_id);
// 문서유형 set
if(nodeInfo.original.is_type == 'ALL_TYPE'){
$('#register_docType').ddslick('enable');
//document.documentWrite.folderIsType.value = nodeInfo.original.is_type;
exsoft.document.commDocBinder.set("folderIsType",nodeInfo.original.is_type);
}else{
$('#register_docType').ddslick('disable');
exsoft.document.commDocBinder.set("doc_type", nodeInfo.original.is_type);
//문서유형에 맞는 확장 속성을 표시 한다.
exsoft.document.event.setExtendTypeAttrItem(nodeInfo.original.is_type);
exsoft.document.commDocBinder.set("folderIsType",nodeInfo.original.is_type);
}
// 권한 셋팅
exsoft.document.event.setAclItem(nodeInfo.original.acl_id);
exsoft.document.init.docWriteInit();
},
// 파일 처리 callback
fileupload : function(files,data,xhr){
exsoft.document.event.setUploadFile(data);
// 전체 파일이 올라 갔을 경우
if((exsoft.common.file.prototype.wUploadObj.fileCounter -1) == exsoft.common.file.prototype.wUploadObj.upCounter) {
exsoft.document.event.documentSubmit();
}
},
}//callback END
}; // exsoft.document end...
exsoft.document.prototype = {
gDocId : null,
gRootId : null,
gAclLevel : null,
gAcl_checkoutCancel : null,
getDocVO : null,
gPageList : null, // URL 복사
gFolderPath : null, // URL 복사
gUpdateCallback : null, // 수정 화면 전환 후 사용되는 콜백
commentKbn : null,
gUrlExpired : null,
gServerUrl : null,
copy_type : null,
setUrlValue : 0,
}; // exsoft.document.prototype end...
/***********************************************
* Preview
**********************************************/
exsoft.namespace("preview");
exsoft.preview = {
binder : null,
event : {
getPreview : function(docId) {
if (exsoft.preview.binder == null) {
exsoft.preview.binder = new DataBinder("#workDocListBody");
}
exsoft.preview.ajax.documentDetail(docId);
}
},
ajax : {
documentDetail : function(docId) {
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback({doc_id:docId}, exsoft.contextRoot+"/document/documentDetail.do", "select", function(data,e) {
console.info(data);
exsoft.preview.binder.binding(data.documentVO);
exsoft.preview.binder.set("folderPath", data.folderPath);
exsoft.preview.binder.set("multiFolderList", exsoft.preview.functions.getMultiFolderList(data.multiFolderList));
exsoft.preview.ui.printPageList("#previewPageListHorizon", data.pageList);//첨부파일
exsoft.preview.ui.printPageList("#previewPageListVertical", data.pageList);//첨부파일
// 권한 Setting
exsoft.preview.binder.set("aclName", data.aclDetail.acl_name);
exsoft.util.table.tableDocumentAclItemPrintList('detail_docAclItemListHorizon', data.aclItemList);
exsoft.util.table.tableDocumentAclItemPrintList('detail_docAclItemListVertical', data.aclItemList);
exsoft.util.table.tableDocumentAclItemPrintList('detail_docExAclItemListHorizon', data.aclExItemList);
exsoft.util.table.tableDocumentAclItemPrintList('detail_docExAclItemListVertical', data.aclExItemList);
// 문서 설명 (웹 에디터 적용 후 다시 해야함)
// 확장 속성 정보
exsoft.preview.ui.hidePreviewSample();
});
}
},
functions : {
getMultiFolderList : function(multiFolderList) {
var _ret = "";
$(multiFolderList).each(function() {
_ret += "{0}<br/>".format(this.folder_path);
});
return _ret;
}
},
ui : {
// 화면 분할 기본 화면 숨기기
hidePreviewSample : function() {
$(".aside_title").addClass("hide");
$(".aside_cnts").addClass("hide");
$(".bottom_title").addClass("hide");
$(".bottom_cnts").addClass("hide");
$(".doc_detail_attach, .doc_detail_info, .doc_detail_auth").removeClass("hide");
},
//1-1. 첨부파일 출력
printPageList : function(divId, pageList) {
//데이터 초기화
$(divId).empty();
exsoft.preview.binder.set("previewAttachFileCount", pageList.length);
var buffer = "<ul>";
$(pageList).each(function(index) { // 첨부 파일 갯수만큼 루프
//화장자별 아이콘 변경
var imgext = exsoft.document.ui.imgExtension(pageList[index].page_name);
buffer += "<li class='attach_docs_list'>";
buffer += "<a href='#'><img src='"+exsoft.contextRoot+"/img/icon/"+imgext+"' alt='' title=''>"+pageList[index].page_name+"</a>";
buffer += "<div class='download_detail'>";
buffer += "<span class='download_filesize'>"+pageList[index].fsize+"</span>";
buffer += "<a href='#' class='download'>";
buffer += "<img src='"+exsoft.contextRoot+"/img/icon/attach_download1.png' alt='' title=''>";
buffer += "</a></div>";
buffer += "</li>";
});
buffer += "</ul>";
$(divId).append(buffer);
},
}
}
/***********************************************
* process
**********************************************/
/**
* 협업프로세스 common
* namespace로 관리
*/
exsoft.process = {
testMsg : function(){
alert('11exsoft.process.common')
},
write : function(wrapClass, divClass){
exsoft.util.common.formClear('processWrite');
$('.'+wrapClass).removeClass('hide');
$('.'+divClass).removeClass('hide');
exsoftProcessWrite.init.initProcessWrite();
exsoft.util.layout.lyrPopupWindowResize($('.'+divClass));
},
}; // exsoft.process end...
exsoft.process.prototype = {
}; // exsoft.process.prototype end...
/***********************************************
* 공통 bind 관련
**********************************************/
/**
* 공통 bind 관련 common
* namespace로 관리
*/
exsoft.common.bind = {
// 좌측 메뉴 펼치기/숨기기
leftMenuToggle : function(){
$("body").off("click", '.toggle_tree_menu');
$("body").on("click", '.toggle_tree_menu', function(e){
// 운영중 오류 날경우 $('.cnts_tbl_wrapper div:nth-child(1)).attr('id') 에서 gbox_ 만 제외하면 됨
var tableId = $('.cnts_list div:nth-child(1) div:nth-child(3) div:nth-child(3) div:nth-child(1) table').attr('id');
var targetId = $('.cnts_tbl_wrapper').attr('id');
var width = $('.lnb_menu').width();
if(!$(this).hasClass('toggle_hide')) {
$('.contents').animate({
'left':0,
'width':'100%'
}, 300, function(){
$('.lnb_menu').addClass('hide');
$('.toggle_tree_menu').addClass('toggle_hide');
});
if (typeof tableId != 'undefined' && typeof targetId != 'undefined')
exsoft.util.grid.gridIsLeftMenuResize(tableId,targetId,20,-width);
} else {
$('.lnb_menu').removeClass('hide');
$('.contents').removeAttr('style');
$('.toggle_tree_menu').removeClass('toggle_hide');
if (typeof tableId != 'undefined' && typeof targetId != 'undefined')
exsoft.util.grid.gridIsLeftMenuResize(tableId,targetId,20,0);
}
});
},
//퀵메뉴 펼치기/숨기기
quickMenuToggle : function(){
$('a.quick_menu').unbind("click");
$('a.quick_menu').bind("click", function(e){
e.preventDefault();
var target = $(this).find('span[class^="quick_menu"]');
var div = $('.quick_sub_wrapper');
if(!target.hasClass('toggle_hide')) {
div.animate({width:0}, 500, function(){
target.addClass('toggle_hide');
});
} else {
div.animate({width:750}, 500, function(){
target.removeClass('toggle_hide');
});
}
});
},
//화면 상하좌우 분활 icon 선택(레이아웃 선택)
layoutViewToggle : function(){
$('body').off('click', '.layout_view a');
$('body').on('click', '.layout_view a', function(e){
e.preventDefault();
var dropDown_menu = $(this).parent().find('.layout_view_dropDown');
if(dropDown_menu.hasClass('hide')){
dropDown_menu.removeClass('hide');
$(this).addClass('clicked');
var listViewType = $.cookie('listViewType');
if(listViewType == 'undefined') {
listViewType = "list_only";
}
$('.layout_view_dropDown > ul').find('li#'+listViewType).addClass('checked');
} else {
dropDown_menu.addClass('hide');
$(this).removeClass('clicked');
}
});
},
//화면 상하좌우 분화에 따라 화면 보이기
layoutViewDivide : function(){
//테이블 메뉴 - 레이아웃 선택 드롭다운 체크 선택변경
$('body').off('click', '[class^="layout_view_dropDown"] > ul li > a');
$('body').on('click', '[class^="layout_view_dropDown"] > ul li > a', function(e){
e.preventDefault();
var li = $('.layout_view_dropDown > ul').find('li');
var parent = $(this).parents('.layout_view_dropDown');
li.removeClass('checked');
$(this).parent().addClass('checked');
exsoft.common.bind.doFunction.setListView($(this).parent().attr('id'));
parent.addClass('hide');
});
},
/**
* Event 관련 함수
*/
event : {
// 화면 좌우 분활 시 마우스 드래그 시 화면 분활 비율 설정
layoutDragHorizontal : function() {
$('body').off('mousedown', '[class^="horizontal_draggable"]');
$('body').on('mousedown', '[class^="horizontal_draggable"]', function(e){
var cntsList = $('.cnts_list');
var parent = $(this).parent();
var cntsListStartWidth = cntsList.width();
var startWidth = parent.width();
var pX = e.pageX;
$(document).on('mouseup', function(e){
$(document).off('mouseup').off('mousemove');
});
$(document).on('mousemove', function(me){
var mx = (me.pageX - pX);
parent.css({width: startWidth - mx});
cntsList.css({width: cntsListStartWidth + mx});
});
});
},
// 화면 상하 분활 시 마우스 드래그 시 화면 분활 비율 설정
layoutDragVertical : function() {
$('body').off('mousedown', '[class^="vertical_draggable"]');
$('body').on('mousedown', '[class^="vertical_draggable"]', function(e){
var cntsList = $('.cnts_list');
var parent = $(this).parent();
var cntsListStartHeight = cntsList.height();
var startHeight = parent.height();
var pY = e.pageY;
$(document).on('mouseup', function(e){
$(document).off('mouseup').off('mousemove');
});
$(document).on('mousemove', function(me){
var my = (me.pageY-pY);
parent.css({height:startHeight-my});
cntsList.css({height:cntsListStartHeight+my});
});
});
},
// 문서 상세조회 의견 contextMenu
commentCentextMenu : function(){
$('body .opinion_wrapper > table').off('mousedown', 'tbody > tr > td');
$('body .opinion_wrapper > table').on('mousedown', 'tbody > tr > td',function(e){
var context_menu = $('.opinion_contextMenu');
if(e.which == 3) {
var offsetX = e.pageX - $('.opinion_wrapper').offset().left;
var offsetY = e.pageY - $('.opinion_wrapper').offset().top;
context_menu.css({
left:offsetX,
top:offsetY
});
context_menu.removeClass('hide');
$(this).parents('table').find('tr').removeClass('current');
$(this).parent().addClass('current');
} else if(e.which == 1) {
context_menu.addClass('hide');
}
});
},
// div 드롭다운(동적 사용을 위해 on 사용) :: 메뉴 펼치기/숨기기
divDropDown : function(){
$("body").off("click", 'a[class="dropDown_txt"]');
$("body").on("click", 'a[class="dropDown_txt"]', function(e){
e.preventDefault();
var span = $(this).parent().find("span");
var divLength = $(this).parent().children('div').children('div').length;
if(span.hasClass("down")) {
span.removeClass("down");
span.addClass("up");
$(this).parent().children('div').removeClass('hide');
if(divLength > 0) {
$(this).next().removeClass('hide');
}
} else {
span.removeClass("up");
span.addClass("down");
$(this).parent().children('div').addClass('hide');
if(divLength > 0) {
$(this).next().addClass('hide');
}
}
});
},
// 텝이동
/*commentTabMenu : function(){
//탭 요소 클릭 시 폼 변경
$('.tab_element').bind("click", function(){
var idx = $(this).index();
var targetFrm = $(this).parent().parent().parent().find('div[class^="tab_form"]');
targetFrm.addClass('hide');
targetFrm.eq(idx).removeClass('hide');
$('.tab_element').removeClass('selected');
$(this).addClass('selected');
});
},*/
// 텝이동
urlEmailClose : function(){
//URL메일발송 - 창 닫기
$('.url_email_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.url_email').addClass('hide');
});
},
// 문서 상세조회 - 창 닫기
docDetailWindowClose : function(){
$('.doc_detail_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.doc_detail').addClass('hide');
$('.doc_detail_wrapper').addClass('hide');
});
}
},
/**
* 일반함수
*/
doFunction : {
// 목록 화면 분활 종류 설정
setListView : function(listViewType){
var tableId = $('.cnts_list div:nth-child(1) div:nth-child(3) div:nth-child(3) div:nth-child(1) table').attr('id');
var targetId = $('.cnts_tbl_wrapper').attr('id');
if(listViewType == "list_only") {
$('[class*="cnts_aside"]').addClass('hide');
$('[class*="cnts_bottom"]').addClass('hide');
$('[class*="cnts_list"]').css({
width:'100%',
height:'100%',
'max-width':'100%',
'max-height':'100%'
});
} else if(listViewType == "horizontal_divide") { // 좌우 분활
$('[class*="cnts_bottom"]').addClass('hide');
$('[class*="cnts_aside"]').removeClass('hide');
$('[class*="cnts_list"]').css({
'height':'100%',
'max-height':'100%'
});
var cntsListWidth = $('[class*="cnts_list"]').width();
var cntsListMinWidth = parseInt($('[class*="cnts_list"]').css('min-width').replace('px', ''), 10);
var asideWidth = $('[class*="cnts_aside"]').width();
var tblWrapWidth = $('cnts_tbl_wrapper').width();
$('[class*="cnts_list"]').width(tblWrapWidth - asideWidth);
$('[class*="cnts_list"]').css({'max-width' : tblWrapWidth - 200});
$('[class*="cnts_aside"]').css({'max-width' : tblWrapWidth - 700});
} else if(listViewType == "vertical_divide") { // 상하 분활
$('[class*="cnts_aside"]').addClass('hide');
$('[class*="cnts_bottom"]').removeClass('hide');
$('[class*="cnts_list"]').css({
'width':'100%',
'max-width':'100%'
});
var cntsListHeight = $('[class*="cnts_list"]').height();
var cntsListMinHeight = parseInt($('[class*="cnts_list"]').css('min-height').replace('px', ''), 10);
var bottomHeight = $('[class*="cnts_bottom"]').height();
var tblWrapHeight = $('.cnts_tbl_wrapper').height();
$('[class*="cnts_list"]').css({
height:tblWrapHeight - 66,
'max-height':tblWrapHeight - 66
});
$('[class*="cnts_bottom"]').css({'max-height' : tblWrapHeight - cntsListMinHeight});
}
$.cookie('listViewType', listViewType);
},
// 화면분활 쿠키값 보이기
layoutViewCookie : function(){
var listViewType = $.cookie('listViewType');
if(listViewType == 'undefined') {
listViewType = "list_only";
}
exsoft.common.bind.doFunction.setListView(listViewType);
},
},
}; // exsoft.common.bind end...
exsoft.common.bind.prototype = {
}; // exsoft.common.bind.prototype end...
/***********************************************
* file upload 관련
**********************************************/
/**
* file upload 관련 common
* namespace로 관리
*/
exsoft.common.file = {
wSettings : null,
wMaxFileSize : 2048,
wTotalFileSize : 4096,
wMaxFileCount : 10,
wExtList : 'exe;bat;dll;ocx;',
init : {
initSettings : function(id, callback){
exsoft.common.file.prototype.fileuploadDivId = id;
// 환경설정 적용값:: 파일등록개수/파일사이즈/전체사이즈/확장자목록
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback(null,exsoft.contextRoot + '/common/configFileInfo.do',null,function(data, param){
exsoft.common.file.wMaxFileSize = exsoft.util.common.fileConfig(data.FILESIZE.skey, data.FILESIZE.is_use, data.FILESIZE.sval, exsoft.common.file.prototype.wDefaultFileSize);
exsoft.common.file.wTotalFileSize = exsoft.util.common.fileConfig(data.FILETOTAL.skey, data.FILETOTAL.is_use, data.FILETOTAL.sval, exsoft.common.file.prototype.wDefaultFileTotal);
exsoft.common.file.wMaxFileCount = exsoft.util.common.fileConfig(data.FILECNT.skey, data.FILECNT.is_use, data.FILECNT.sval, exsoft.common.file.prototype.wDefaultFileCnt);
exsoft.common.file.wExtList = exsoft.util.common.fileConfig(data.EXT.skey, data.EXT.is_use, data.EXT.sval,"*");
exsoft.common.file.wSettings = null;
exsoft.common.file.prototype.wUploadObj = null;
exsoft.common.file.wSettings = {
url: exsoft.contextRoot+"/common/fileUpload.do",
multiple:true, // multiple file selection is allowed.
autoSubmit:false,
dragDrop:true,
fileName: "wFiles",
formData: {"uniqStr":exsoft.util.common.uniqueId()},
maxFileSize:exsoft.common.file.wMaxFileSize, // 파일최대크기(1GB=1024*1000*1000) :: -1 제한없음
totalFileSize:exsoft.common.file.wTotalFileSize, // 총파일사이즈제한 :: -1 제한없음
maxFileCount:exsoft.common.file.wMaxFileCount, // Allowed Maximum number of files to be uploaded :: -1 제한없음
allowedTypes:exsoft.common.file.wExtList, // 허용하지 않는 확장자 리스트 :: * 제한없음
returnType:"json",
onSuccess:function(files,data,xhr){
callback(files,data,xhr);
},
onError: function(files,status,errMsg)
{
$("#loading_message").hide();
},
showProgress:true,
showDone: false,
showError: true, // 에러메세지 출력(파일사이즈,파일개수)
multiDragErrorStr: "드래그 & 드롭 파일은 허용되지 않습니다.",
extErrorStr:"허용되지 않는 확장자입니다.",
sizeErrorStr:"파일당 최대크기를 초과하였습니다.",
totalSizeErrorStr:"파일최대크기를 초과하였습니다.",
uploadErrorStr:"업로드를 실패하였습니다.",
serverErrorStr : "서버URL 주소가 잘못되었거나 서버가 응답하지 않습니다.",
duplicateErrorStr : "동일한 파일이 존재합니다.",
dragdropWidth: 150 // file box 넓이
};
//파일추가 영역이 호출 시 지속적으로 생김. dragDrop:true일 경우 기존 영역 삭제. false일 경우 그때 가서 고민
$('.ajax-upload-dragdrop').remove();
exsoft.common.file.prototype.wUploadObj = $(exsoft.util.common.getIdFormat(id)).uploadFile(exsoft.common.file.wSettings);
}); // ajax 호출 끝
}
},
}; // exsoft.common.file end...
exsoft.common.file.prototype = {
fileuploadDivId : null, // document, process 구분해야 함
wUploadObj : null,
//config.properties 값
wDefaultFileCnt : null,
wDefaultFileSize : null,
wDefaultFileTotal : null,
}; // exsoft.common.file.prototype end...<file_sep>/EDMS3/src/kr/co/exsoft/user/vo/UserHtVO.java
package kr.co.exsoft.user.vo;
/**
* 사용자변경이력 VO
* @author 패키지 개발팀
* @since 2014.07.31
* @version 3.0
*
*/
public class UserHtVO {
// 테이블 객체(XR_USER_HT)
private long user_seq; // 사용자 변경IDX : XR_COUNTER KEY='XR_USER_HT'
private String user_id; // 사용자ID
private String emp_no; // 사원번호
private String user_name_ko; // 사용자명 - 한국어
private String user_name_en; // 사용자명 - 영어
private String user_name_ja; // 사용자명 - 일본어
private String user_name_zh; // 사용자명 - 중국어
private String group_id; // 부서ID
private String group_nm; // 부서명
private String jobtitle; // 직위코드
private String jobtitle_nm; // 직위명
private String position; // 직책코드
private String position_nm; // 직책명
private String email; // 이메일
private String telephone; // 전화번호
private String user_status; // 사용자 상태 - C:활성(재직) U:수정 D:비활성(퇴사)
private String role_id; // 사용자 ROLE ID
private String create_date; // 수행일
private String status; // 수행구분 - C:생성(입사),U:변경(소속변경/직위변경/직책변경),D:비활성(퇴사),R(사번변경),E(완전삭제) XR_CODE REF
// 조회항목
private String user_nm; // 사용자명
private String role_nm; // ROLE명
private String user_status_nm; // 사용자 상태명
private String status_nm; // 수행구분명
public UserHtVO() {
this.user_seq = 0;
this.user_id = "";
this.emp_no = "";
this.user_name_ko = "";
this.user_name_en = "";
this.user_name_ja = "";
this.user_name_zh = "";
this.group_id = "";
this.group_nm = "";
this.jobtitle = "";
this.jobtitle_nm = "";
this.position = "";
this.position_nm = "";
this.email = "";
this.telephone = "";
this.user_status = "";
this.role_id = "";
this.create_date = "";
this.status = "";
this.user_nm = "";
this.role_nm = "";
this.user_status_nm = "";
this.status_nm = "";
}
public long getUser_seq() {
return user_seq;
}
public void setUser_seq(long user_seq) {
this.user_seq = user_seq;
}
public String getUser_id() {
return user_id;
}
public void setUser_id(String user_id) {
this.user_id = user_id;
}
public String getEmp_no() {
return emp_no;
}
public void setEmp_no(String emp_no) {
this.emp_no = emp_no;
}
public String getUser_name_ko() {
return user_name_ko;
}
public void setUser_name_ko(String user_name_ko) {
this.user_name_ko = user_name_ko;
}
public String getUser_name_en() {
return user_name_en;
}
public void setUser_name_en(String user_name_en) {
this.user_name_en = user_name_en;
}
public String getUser_name_ja() {
return user_name_ja;
}
public void setUser_name_ja(String user_name_ja) {
this.user_name_ja = user_name_ja;
}
public String getUser_name_zh() {
return user_name_zh;
}
public void setUser_name_zh(String user_name_zh) {
this.user_name_zh = user_name_zh;
}
public String getGroup_id() {
return group_id;
}
public void setGroup_id(String group_id) {
this.group_id = group_id;
}
public String getGroup_nm() {
return group_nm;
}
public void setGroup_nm(String group_nm) {
this.group_nm = group_nm;
}
public String getJobtitle() {
return jobtitle;
}
public void setJobtitle(String jobtitle) {
this.jobtitle = jobtitle;
}
public String getJobtitle_nm() {
return jobtitle_nm;
}
public void setJobtitle_nm(String jobtitle_nm) {
this.jobtitle_nm = jobtitle_nm;
}
public String getPosition() {
return position;
}
public void setPosition(String position) {
this.position = position;
}
public String getPosition_nm() {
return position_nm;
}
public void setPosition_nm(String position_nm) {
this.position_nm = position_nm;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getTelephone() {
return telephone;
}
public void setTelephone(String telephone) {
this.telephone = telephone;
}
public String getUser_status() {
return user_status;
}
public void setUser_status(String user_status) {
this.user_status = user_status;
}
public String getRole_id() {
return role_id;
}
public void setRole_id(String role_id) {
this.role_id = role_id;
}
public String getCreate_date() {
return create_date;
}
public void setCreate_date(String create_date) {
this.create_date = create_date;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
public String getUser_nm() {
return user_nm;
}
public void setUser_nm(String user_nm) {
this.user_nm = user_nm;
}
public String getRole_nm() {
return role_nm;
}
public void setRole_nm(String role_nm) {
this.role_nm = role_nm;
}
public String getUser_status_nm() {
return user_status_nm;
}
public void setUser_status_nm(String user_status_nm) {
this.user_status_nm = user_status_nm;
}
public String getStatus_nm() {
return status_nm;
}
public void setStatus_nm(String status_nm) {
this.status_nm = status_nm;
}
}
<file_sep>/EDMS3/src/kr/co/exsoft/user/vo/XmlSampleVO.java
package kr.co.exsoft.user.vo;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
@XmlType
@XmlRootElement(name="samples")
@XmlAccessorType(XmlAccessType.FIELD)
public class XmlSampleVO {
//생성자는 필수로 선언
public XmlSampleVO() {
}
@XmlElement
private String userId = "";
@XmlElement
private String userNm = "";
@XmlElement
private String userPw = "";
public String getUserId() {
return userId;
}
public void setUserId(String userId) {
this.userId = userId;
}
public String getUserNm() {
return userNm;
}
public void setUserNm(String userNm) {
this.userNm = userNm;
}
public String getUserPw() {
return userPw;
}
public void setUserPw(String userPw) {
this.userPw = userPw;
}
}
<file_sep>/EDMS3/WebContent/js/statistics/statistics.js
/**
* 통계 공통 스크립트
*/
var exsoftStatisticsFunc = {
pageTitle : "",
pageTitleId : "",
pageSize : "",
part : "",
gridId : "",
gridUrl : "",
subGrid : "",
workType : "",
statisticsMenuType : "",
chartInit : "true",
searchOptions : ['statisticsStrYear','statisticsDecade','statisticsSdate','statisticsEdate'],
// 통계메뉴
ConstPageName : {
//'myStatistics' : '내 문서 현황',
'myStatistics' : exsoft.message.statistic.myStatisticsTitle,
'loginStatistics' : '로그인 이력',
'userDocStatistics' : '사용자별 등록/활용 현황',
'groupDocStatistics' : '부서별 등록/활용 현황',
'decadeDocStatistics' : '기간별 등록/활용 현황',
'userFolderStatistics' : '사용자/문서함별 보유현항',
'folderDocStatistics' : '문서함/폴더별 보유현황',
'typeStatistics' : '문서유형별 보유 현황',
'securityLevelStatistics' : '보안등급별 보유현황',
'quotaStatistics' : '문서 Quota 현황'
},
SubMenu : ['loginStatistics','userDocStatistics','groupDocStatistics','decadeDocStatistics','userFolderStatistics','folderDocStatistics','typeStatistics','securityLevelStatistics','quotaStatistics'],
init : {
// 로그인 이력 초기 함수
initPage : function(menuType,statisticsMenuType,pageTitleId,pageSize,part,gridId,gridUrl,subGrid) {
// 로그인이력,사용자별등록/활용현황
// 상단 메뉴 선택 표시
exsoft.util.layout.topMenuSelect(menuType);
// 메뉴PATH
exsoftStatisticsFunc.pageTitle = exsoftStatisticsFunc.ConstPageName[statisticsMenuType];
exsoftStatisticsFunc.pageTitleId = pageTitleId;
exsoftStatisticsFunc.ui.pageNaviTitle(exsoftStatisticsFunc.pageTitle);
exsoftStatisticsFunc.pageSize = pageSize;
exsoftStatisticsFunc.part = part;
// 통계현황인 경우 선택된 메뉴 표시
exsoftStatisticsFunc.init.subMenuInit(statisticsMenuType);
// 사용자/문서함별 보유현황 & 문서함/폴더별 보유 현황 구분자(USER/FOLDER);
if(statisticsMenuType == "userFolderStatistics") {
exsoftStatisticsFunc.workType = "USER";
}else if(statisticsMenuType == "folderDocStatistics") {
exsoftStatisticsFunc.workType = "FOLDER";
}
// 메인 GRID
exsoftStatisticsFunc.gridId = gridId;
exsoftStatisticsFunc.gridUrl = gridUrl;
// 서브 GRID
exsoftStatisticsFunc.subGrid = subGrid;
// 검색조건 공통
exsoft.util.common.ddslick('#statisticsStrIndex', 'srch_type1', '', 79, function(divId){});
// 페이지목록 :: 사용안하는 경우 처리
if(statisticsMenuType == "decadeDocStatistics" || statisticsMenuType == "typeStatistics" || statisticsMenuType == "myStatistics" ) {
$("#statisticsRows").addClass("hide");
}else {
$("#statisticsRows").removeClass("hide");
exsoft.util.common.ddslick('#statisticsRows', 'tbl_rowCount', '', 68, exsoftStatisticsFunc.callback.statisticsRows);
}
exsoft.util.common.ddslick('#statisticsDecade', 'srch_type1', '', 83, exsoftStatisticsFunc.callback.statisticsDecade);
// 날짜 DatePicker
$("#statisticsSdate").datepicker({dateFormat:'yy-mm-dd'});
$("#statisticsEdate").datepicker({dateFormat:'yy-mm-dd'});
// 페이지목록 값 설정
exsoft.util.layout.setSelectBox('statisticsRows',exsoftStatisticsFunc.pageSize);
// 기간 1개월 기본 적용처리
exsoft.util.date.changeDate("one_month", "statisticsSdate", "statisticsEdate");
// 기간별 등록활용현황 예외처리
if(statisticsMenuType == "decadeDocStatistics") {
exsoft.util.date.selectYearBox(2010,2021,'statisticsStrYear');
// 년도 선택 비활성 및 초기값 설정
exsoft.util.common.ddslick('#statisticsStrYear', 'srch_type1', '', 79, function(divId){});
$('#statisticsStrYear').addClass("hide");
}
},
// 차트 SelectBox 동적 생성 처리
chatPageInit : function(statisticsMenuType) {
var chartType = "";
var colType = "";
// ChartType 초기데이터
exsoft.util.common.chartTypeInit();
exsoft.util.common.ddslick('#chartType', 'chart_type', '', 88, exsoftStatisticsFunc.callback.chatTypeChange);
// 파라미터정의
// 부서별 등록활용현황 - bar
// 기간별 등록활용현황,문서함/폴더별 보유현황 - line
// 문서유형별 보유현황,보안등급별 보유현황 - pie
if(statisticsMenuType == "decadeDocStatistics"|| statisticsMenuType == "folderDocStatistics") {
chartType = "line";
}else if(statisticsMenuType == "groupDocStatistics" ) {
chartType = "bar";
}else if(statisticsMenuType == "typeStatistics" || statisticsMenuType == "securityLevelStatistics") {
chartType = "pie";
}
exsoft.util.layout.setSelectBox("chartType",chartType);
// ColType 초기데이터 :
// 파라미터정의 => 1 - 부서별 등록활용현황/기간별 등록활용현황
// 파라미터정의 => 2 - 문서함/폴더별 보유현황,문서유형별 보유현황,보안등급별 보유현황
if(statisticsMenuType == "folderDocStatistics") {
exsoft.util.common.colTypeInit(2);
colType = "page_cnt";
}else if(statisticsMenuType == "typeStatistics" || statisticsMenuType == "securityLevelStatistics") {
exsoft.util.common.colTypeInit(2);
colType = "doc_cnt";
}else {
exsoft.util.common.colTypeInit(1);
colType = "create_cnt";
}
exsoft.util.common.ddslick('#colType', 'chart_count', '', 88,exsoftStatisticsFunc.callback.colTypeChange);
exsoft.util.layout.setSelectBox("colType",colType);
exsoftStatisticsFunc.statisticsMenuType = statisticsMenuType;
},
// 좌측메뉴초기화
subMenuInit : function(statisticsMenuType) {
for (var n in exsoftStatisticsFunc.SubMenu) {
$("#"+exsoftStatisticsFunc.SubMenu[n]).removeClass("selected");
}
$("#"+statisticsMenuType).addClass("selected");
},
},
open : {
// 차트 OPEN
chatViewer : function() {
exsoftStatisticsFunc.chartInit = "false";
if(exsoftStatisticsFunc.statisticsMenuType == "decadeDocStatistics") { // 기간별 등록활용 현황의 경우 조건 다름
if($("input[name='term']:checked").val() == "daily") {
if(exsoft.util.check.searchValid($("#statisticsSdate").val(),$("#statisticsEdate").val()) ) {
exsoftStatisticsFunc.event.chartViewerProc();
}
}else {
exsoftStatisticsFunc.event.chartViewerProc();
}
}else {
if(exsoft.util.check.searchValid($("#statisticsSdate").val(),$("#statisticsEdate").val()) ) {
exsoftStatisticsFunc.event.chartViewerProc();
}
}
}
},
layer : {
},
close : {
},
event : {
// 로그인 이력 GRID
loginLogGridList : function() {
$('#loginLogGridList').jqGrid({
url: exsoft.contextRoot+exsoftStatisticsFunc.gridUrl,
mtype:"post",
datatype:'json',
jsonReader:{
page:'page',total:'total',root:'list'
},
colNames:['connect_time','user_nm','user_id','group_nm','connect_ip','cert_nm'],
colModel:[
{name:'connect_time',index:'connect_time',width:50, align:'center',editable:false,sortable:true,key:true,align:'center'},
{name:'user_nm',index:'user_nm',width:50, align:'center',editable:false,sortable:true,key:true,align:'center'},
{name:'user_id',index:'user_id',width:50, align:'center',editable:false,sortable:true,key:true,align:'center'},
{name:'group_nm',index:'group_nm',width:50, align:'center',editable:false,sortable:true,key:true,align:'center'},
{name:'connect_ip',index:'connect_ip',width:50, align:'center',editable:false,sortable:true,key:true,align:'center'},
{name:'cert_nm',index:'code_nm',width:50, align:'center',editable:false,sortable:true,key:true,align:'center'}
],
autowidth:true,viewrecords: true,multiselect:false,sortable: true,shrinkToFit:true,gridview: true,
height:"auto",
sortname : "connect_time",
sortorder:"desc",
scrollOffset: 0,
viewsortcols:'vertical',
rowNum : exsoftStatisticsFunc.pageSize,
emptyDataText: "데이터가 없습니다.",
caption:'로그인 이력',
postData : {strIndex:exsoft.util.layout.getSelectBox('statisticsStrIndex','option'),strKeyword:$("#statisticsKeyword").val(),sdate:$("#statisticsSdate").val(),edate:$("#statisticsEdate").val(),part : exsoftStatisticsFunc.part},
loadBeforeSend: function() {
exsoft.util.grid.gridTitleBarHide('loginLogGridList');
exsoft.util.grid.gridNoDataMsgInit('loginLogGridList');
}
,loadComplete: function(data) {
if ($("#loginLogGridList").getGridParam("records")==0) {
exsoft.util.grid.gridNoRecords('loginLogGridList','no_data');
}else {
exsoft.util.grid.gridViewRecords('loginLogGridList');
}
exsoft.util.grid.gridInputInit(false);
exsoft.util.grid.gridPager("#loginLogGridPager",data);
}
,loadError:function(xhr, status, error) {
exsoft.util.error.isErrorChk(xhr);
}
});
// 컬럼 헤더 정렬 및 다국어 변경 처리
var headerData = '{"connect_time":"접속일자","user_nm":"사용자명","user_id":"사용자ID","group_nm":"부서명","connect_ip":"IP주소","cert_nm":"인증여부"}';
exsoft.util.grid.gridColumHeader('loginLogGridList',headerData,'center');
}, /// END OF GRID
// 사용자별 등록/활용 현황 GRID
userDocGridList : function() {
$('#userDocGridList').jqGrid({
url:exsoft.contextRoot+exsoftStatisticsFunc.gridUrl,
mtype:"post",
datatype:'json',
jsonReader:{
page:'page',total:'total',root:'list'
},
colNames:['user_nm','user_id','group_nm','type_name','create_cnt','read_cnt','update_cnt','delete_cnt'],
colModel:[
{name:'user_nm',index:'user_nm',width:40, align:'center',editable:false,sortable:false,key:true,align:'center'},
{name:'user_id',index:'user_id',width:40, editable:false,sortable:false,resizable:true,align:'center'},
{name:'group_nm',index:'group_nm',width:40, editable:false,sortable:false,resizable:true,align:'center'},
{name:'type_name',index:'type_name',width:30, editable:false,sortable:true,resizable:true,align:'center',summaryType:'count',summaryTpl : '소계'},
{name:'create_cnt',index:'create_cnt',width:20, editable:false,sortable:false,resizable:true,align:'center',
sorttype:'number',formatter:'number',summaryType:'sum', formatoptions:{thousandsSeparator:",", decimalPlaces: 0}},
{name:'read_cnt',index:'read_cnt',width:20, editable:false,sortable:false,resizable:true,align:'center',
sorttype:'number',formatter:'number',summaryType:'sum', formatoptions:{thousandsSeparator:",", decimalPlaces: 0}},
{name:'update_cnt',index:'update_cnt',width:20, editable:false,sortable:false,resizable:true,align:'center',
sorttype:'number',formatter:'number',summaryType:'sum', formatoptions:{thousandsSeparator:",", decimalPlaces: 0}},
{name:'delete_cnt',index:'delete_cnt',width:30, editable:false,sortable:false,resizable:true,align:'center',
sorttype:'number',formatter:'number',summaryType:'sum', formatoptions:{thousandsSeparator:",", decimalPlaces: 0}}
],
autowidth:true,viewrecords: true,multiselect:false,sortable: true,shrinkToFit:true,gridview: true,
height:"auto",
sortname : "user_nm",
sortorder:"asc",
scrollOffset: 0,
viewsortcols:'vertical',
rowNum : exsoftStatisticsFunc.pageSize,
emptyDataText: "데이터가 없습니다.",
caption:'사용자별 등록/활용 현황',
postData : {strIndex:exsoft.util.layout.getSelectBox('statisticsStrIndex','option'),strKeyword:$("#statisticsKeyword").val(),sdate:$("#statisticsSdate").val(),edate:$("#statisticsEdate").val(),part : exsoftStatisticsFunc.part},
grouping: true,
groupingView : {
groupField : ['user_nm'],
groupColumnShow : [false],
groupText:['<b>{0}({1})</b>'],
groupCollapse : false,
groupOrder: ['asc'],
groupSummary : [true],
groupDataSorted : true
},
userDataOnFooter: true,
loadBeforeSend: function() {
exsoft.util.grid.gridTitleBarHide('userDocGridList');
exsoft.util.grid.gridNoDataMsgInit('userDocGridList');
}
,loadComplete: function(data) {
if ($("#userDocGridList").getGridParam("records")==0) {
exsoft.util.grid.gridNoRecords('userDocGridList','no_data');
}else {
exsoft.util.grid.gridViewRecords('userDocGridList');
}
exsoft.util.grid.gridInputInit(false);
exsoft.util.grid.gridPager("#userDocGridPager",data);
}
,loadError:function(xhr, status, error) {
exsoft.util.error.isErrorChk(xhr);
}
});
// 컬럼 헤더 정렬 및 다국어 변경 처리
var headerData = '{"user_nm":"사용자명","user_id":"사용자ID","group_nm":"부서명","type_name":"문서유형","create_cnt":"등록","read_cnt":"조회","update_cnt":"수정","delete_cnt":"삭제"}';
exsoft.util.grid.gridColumHeader('userDocGridList',headerData,'center');
},
// 부서별 등록/활용 현황 GRID
groupDocGridList : function() {
$('#groupDocGridList').jqGrid({
url:exsoft.contextRoot+exsoftStatisticsFunc.gridUrl,
mtype:"post",
datatype:'json',
jsonReader:{
page:'page',total:'total',root:'list'
},
colNames:['group_nm','group_id','type_name','create_cnt','read_cnt','update_cnt','delete_cnt'],
colModel:[
{name:'group_nm',index:'group_nm',width:50, align:'center',editable:false,sortable:false,key:true,align:'center'},
{name:'group_id',index:'group_id',width:50, editable:false,sortable:false,resizable:true,align:'center'},
{name:'type_name',index:'type_name',width:50, editable:false,sortable:true,resizable:true,align:'center',summaryType:'count',summaryTpl : '소계'},
{name:'create_cnt',index:'create_cnt',width:20, editable:false,sortable:false,resizable:true,align:'center',
sorttype:'number',formatter:'number',summaryType:'sum', formatoptions:{thousandsSeparator:",", decimalPlaces: 0}},
{name:'read_cnt',index:'read_cnt',width:20, editable:false,sortable:false,resizable:true,align:'center',
sorttype:'number',formatter:'number',summaryType:'sum', formatoptions:{thousandsSeparator:",", decimalPlaces: 0}},
{name:'update_cnt',index:'update_cnt',width:20, editable:false,sortable:false,resizable:true,align:'center',
sorttype:'number',formatter:'number',summaryType:'sum', formatoptions:{thousandsSeparator:",", decimalPlaces: 0}},
{name:'delete_cnt',index:'delete_cnt',width:20, editable:false,sortable:false,resizable:true,align:'center',
sorttype:'number',formatter:'number',summaryType:'sum', formatoptions:{thousandsSeparator:",", decimalPlaces: 0}}
],
autowidth:true,viewrecords: true,multiselect:false,sortable: true,shrinkToFit:true,gridview: true,
height:"auto",
sortname : "group_nm",
sortorder:"asc",
scrollOffset: 0,
viewsortcols:'vertical',
rowNum : exsoftStatisticsFunc.pageSize,
emptyDataText: "데이터가 없습니다.",
caption:'부서별 등록/활용 현황',
postData : {strKeyword:$("#statisticsKeyword").val(),sdate:$("#statisticsSdate").val(),edate:$("#statisticsEdate").val(),part : exsoftStatisticsFunc.part},
grouping: true,
groupingView : {
groupField : ['group_nm'],
groupColumnShow : [false],
groupText:['<b>{0}({1})</b>'],
groupCollapse : false,
groupOrder: ['asc'],
groupSummary : [true],
groupDataSorted : true
},
userDataOnFooter: true,
loadBeforeSend: function() {
exsoft.util.grid.gridTitleBarHide('groupDocGridList');
exsoft.util.grid.gridNoDataMsgInit('groupDocGridList');
}
,loadComplete: function(data) {
if ($("#groupDocGridList").getGridParam("records")==0) {
exsoft.util.grid.gridNoRecords('groupDocGridList','no_data');
}else {
exsoft.util.grid.gridViewRecords('groupDocGridList');
}
exsoft.util.grid.gridInputInit(false);
exsoft.util.grid.gridPager("#groupDocGridPager",data);
}
,loadError:function(xhr, status, error) {
exsoft.util.error.isErrorChk(xhr);
}
});
// 컬럼 헤더 정렬 및 다국어 변경 처리
var headerData = '{"group_nm":"부서명","group_id":"부서ID","type_name":"문서유형","create_cnt":"등록","read_cnt":"조회","update_cnt":"수정","delete_cnt":"삭제"}';
exsoft.util.grid.gridColumHeader('groupDocGridList',headerData,'center');
},
//기간별 등록/활용 현황 GRID
decadeDocGridList : function() {
$('#decadeDocGridList').jqGrid({
url:exsoft.contextRoot+exsoftStatisticsFunc.gridUrl,
mtype:"post",
datatype:'json',
jsonReader:{
page:'page',total:'total',root:'list'
},
colNames:['dateStr','create_cnt','read_cnt','update_cnt','delete_cnt'],
colModel:[
{name:'dateStr',index:'order_str',width:50, align:'center',editable:false,sortable:true,key:true,align:'center'},
{name:'create_cnt',index:'create_cnt',width:30, editable:false,sortable:false,resizable:true,align:'center',
sorttype:'number',formatter:'number',summaryType:'sum', formatoptions:{thousandsSeparator:",", decimalPlaces: 0}},
{name:'read_cnt',index:'read_cnt',width:30, editable:false,sortable:false,resizable:true,align:'center',
sorttype:'number',formatter:'number',summaryType:'sum', formatoptions:{thousandsSeparator:",", decimalPlaces: 0}},
{name:'update_cnt',index:'update_cnt',width:30, editable:false,sortable:false,resizable:true,align:'center',
sorttype:'number',formatter:'number',summaryType:'sum', formatoptions:{thousandsSeparator:",", decimalPlaces: 0}},
{name:'delete_cnt',index:'delete_cnt',width:30, editable:false,sortable:false,resizable:true,align:'center',
sorttype:'number',formatter:'number',summaryType:'sum', formatoptions:{thousandsSeparator:",", decimalPlaces: 0}}
],
autowidth:true,viewrecords: true,multiselect:false,sortable: true,shrinkToFit:true,gridview: true,
height:"auto",
sortname : "order_str",
sortorder:"desc",
scrollOffset: 0,
emptyDataText: "데이터가 없습니다.",
caption:'기간별 등록/활용 현황',
postData : {strIndex:exsoft.util.layout.getSelectBox('statisticsStrIndex','option'),strKeyword:$("#statisticsKeyword").val(),sdate:$("#statisticsSdate").val(),
edate:$("#statisticsEdate").val(),term:$("input[name='term']:checked").val(),strYear:exsoft.util.layout.getSelectBox('statisticsStrYear','option'),part : exsoftStatisticsFunc.part},
loadBeforeSend: function() {
exsoft.util.grid.gridTitleBarHide('decadeDocGridList');
exsoft.util.grid.gridNoDataMsgInit('decadeDocGridList');
}
,loadComplete: function(data) {
if ($("#decadeDocGridList").getGridParam("records")==0) {
exsoft.util.grid.gridNoRecords('decadeDocGridList','nolayer_data'); // no_data => nolayer_data 변경처리
}else {
exsoft.util.grid.gridViewRecords('decadeDocGridList');
}
exsoft.util.grid.gridInputInit(false);
}
,loadError:function(xhr, status, error) {
exsoft.util.error.isErrorChk(xhr);
}
});
// 컬럼 헤더 정렬 및 다국어 변경 처리
var headerData = null;
if($("input[name='term']:checked").val() == "daily") {
headerData = '{"dateStr":"일자","create_cnt":"등록","read_cnt":"조회","update_cnt":"수정","delete_cnt":"삭제"}';
}else {
headerData = '{"dateStr":"년월","create_cnt":"등록","read_cnt":"조회","update_cnt":"수정","delete_cnt":"삭제"}';
}
exsoft.util.grid.gridColumHeader('decadeDocGridList',headerData,'center');
},
// 사용자/문서함별 보유 현황 GRID
userFolderGridList : function() {
$("#userFolderGridList").jqGrid({
url:exsoft.contextRoot+exsoftStatisticsFunc.gridUrl,
mtype:"post",
datatype:'json',
jsonReader:{
page:'page',total:'total',root:'list'
},
colNames:['user_nm','owner_id','group_nm','map_nm','doc_cnt','page_cnt','page_total'],
colModel:[
{name:'user_nm',index:'user_nm',width:50, align:'center',editable:false,sortable:false,key:true,align:'center'},
{name:'owner_id',index:'owner_id',width:50, editable:false,sortable:false,resizable:true,align:'center'},
{name:'group_nm',index:'group_nm',width:50, editable:false,sortable:false,resizable:true,align:'center'},
{name:'map_nm',index:'map_nm',width:30, editable:false,sortable:false,resizable:true,align:'center',summaryType:'count',summaryTpl : '소계'},
{name:'doc_cnt',index:'doc_cnt',width:30, editable:false,sortable:false,resizable:true,align:'center',
sorttype:'number',formatter:'number',summaryType:'sum', formatoptions:{thousandsSeparator:",", decimalPlaces: 0}},
{name:'page_cnt',index:'page_cnt',width:30, editable:false,sortable:false,resizable:true,align:'center',
sorttype:'number',formatter:'number',summaryType:'sum', formatoptions:{thousandsSeparator:",", decimalPlaces: 0}},
{name:'page_total',index:'page_total',width:40, editable:false,sortable:false,resizable:true,align:'center',
sorttype:'number',formatter:exsoft.util.grid.bytes2Size,summaryType:'sum'},
],
autowidth:true,viewrecords: true,multiselect:false,sortable: true,shrinkToFit:true,gridview: true,
height:"auto",
scrollOffset: 0,
viewsortcols:'vertical',
rowNum : exsoftStatisticsFunc.pageSize,
emptyDataText: "데이터가 없습니다.",
caption:'사용자/문서함별 보유 현황',
postData : {
strIndex:exsoft.util.layout.getSelectBox('statisticsStrIndex','option'),strKeyword:$("#statisticsKeyword").val(),
sdate:$("#statisticsSdate").val(),edate:$("#statisticsEdate").val(),
part : exsoftStatisticsFunc.part,workType:exsoftStatisticsFunc.workType},
grouping: true,
groupingView : {
groupField : ['user_nm'],
groupColumnShow : [false],
groupText:['<b>{0}({1})</b>'],
groupCollapse : false,
groupOrder: ['ASC'],
groupSummary : [true],
groupDataSorted : true
},
userDataOnFooter: true,
loadBeforeSend: function() {
exsoft.util.grid.gridTitleBarHide('userFolderGridList');
exsoft.util.grid.gridNoDataMsgInit('userFolderGridList');
}
,loadComplete: function(data) {
if ($("#userFolderGridList").getGridParam("records")==0) {
exsoft.util.grid.gridNoRecords('userFolderGridList','no_data');
}else {
exsoft.util.grid.gridViewRecords('userFolderGridList');
}
exsoft.util.grid.gridInputInit(false);
exsoft.util.grid.gridPager("#userFolderGridPager",data);
}
,loadError:function(xhr, status, error) {
exsoft.util.error.isErrorChk(xhr);
}
});
// 컬럼 헤더 정렬 및 다국어 변경 처리
var headerData = '{"user_nm":"사용자명","owner_id":"사용자ID","group_nm":"부서명","map_nm":"문서함","doc_cnt":"문서수","page_cnt":"파일수","page_total":"용량"}';
exsoft.util.grid.gridColumHeader('userFolderGridList',headerData,'center');
},
// 문서함/폴더별 보유 현황 GRID
folderDocGridList : function() {
$('#folderDocGridList').jqGrid({
url:exsoft.contextRoot+exsoftStatisticsFunc.gridUrl,
mtype:"post",
datatype:'json',
jsonReader:{
page:'page',total:'total',root:'list'
},
colNames:['group_nm','doc_cnt','page_cnt','fsize'],
colModel:[
{name:'group_nm',index:'group_nm',width:50, editable:false,sortable:false,resizable:true,align:'center'},
{name:'doc_cnt',index:'doc_cnt',width:40, editable:false,sortable:false,resizable:true,align:'center',
sorttype:'number',formatter:'number',summaryType:'sum', formatoptions:{thousandsSeparator:",", decimalPlaces: 0}},
{name:'page_cnt',index:'page_cnt',width:40, editable:false,sortable:false,resizable:true,align:'center',
sorttype:'number',formatter:'number',summaryType:'sum', formatoptions:{thousandsSeparator:",", decimalPlaces: 0}},
{name:'fsize',index:'page_total',width:40, editable:false,sortable:false,resizable:true,align:'center'}
],
autowidth:true,viewrecords: true,multiselect:false,sortable: true,shrinkToFit:true,gridview: true,
height:"auto",
sortname : "FOLDER_NAME_KO",
sortorder:"asc",
scrollOffset: 0,
viewsortcols:'vertical',
rowNum : exsoftStatisticsFunc.pageSize,
caption:'문서함/폴더별 보유 현황',
postData : {
strFolderIdx:exsoft.util.layout.getSelectBox('statisticsStrIndex','option'),
sdate:$("#statisticsSdate").val(),edate:$("#statisticsEdate").val(),
part : exsoftStatisticsFunc.part,workType:exsoftStatisticsFunc.workType},
loadBeforeSend: function() {
exsoft.util.grid.gridTitleBarHide('folderDocGridList');
exsoft.util.grid.gridNoDataMsgInit('folderDocGridList');
}
,loadComplete: function(data) {
if ($("#folderDocGridList").getGridParam("records")==0) {
exsoft.util.grid.gridNoRecords('folderDocGridList','no_data');
}else {
exsoft.util.grid.gridViewRecords('folderDocGridList');
}
exsoft.util.grid.gridInputInit(false);
exsoft.util.grid.gridPager("#folderDocGridPager",data);
}
,loadError:function(xhr, status, error) {
exsoft.util.error.isErrorChk(xhr);
}
});
// 컬럼 헤더 정렬 및 다국어 변경 처리
var headerData = '{"group_nm":"구분","doc_cnt":"문서수","page_cnt":"파일수","fsize":"용량"}';
exsoft.util.grid.gridColumHeader('folderDocGridList',headerData,'center');
},
// 문서유형별 보유 현황 GRID
typeStatGridList : function() {
$('#typeStatGridList').jqGrid({
url:exsoft.contextRoot+exsoftStatisticsFunc.gridUrl,
mtype:"post",
datatype:'json',
jsonReader:{
page:'page',total:'total',root:'list'
},
colNames:['type_name','map_nm','doc_cnt','page_cnt','page_total'],
colModel:[
{name:'type_name',index:'type_name',width:50, editable:false,sortable:false,resizable:true,align:'center'},
{name:'map_nm',index:'map_nm',width:50, editable:false,sortable:false,resizable:true,align:'center',summaryType:'count',summaryTpl : '소계',
cellattr: function (rowId, cellValue, rowObject) {
return 'style="cursor: pointer;"';
}
},
{name:'doc_cnt',index:'doc_cnt',width:40, editable:false,sortable:false,resizable:true,align:'center',
sorttype:'number',formatter:'number',summaryType:'sum', formatoptions:{thousandsSeparator:",", decimalPlaces: 0}},
{name:'page_cnt',index:'page_cnt',width:40, editable:false,sortable:false,resizable:true,align:'center',
sorttype:'number',formatter:'number',summaryType:'sum', formatoptions:{thousandsSeparator:",", decimalPlaces: 0}},
{name:'page_total',index:'page_total',width:40, editable:false,sortable:false,resizable:true,align:'center',
sorttype:'number',formatter:exsoft.util.grid.bytes2Size,summaryType:'sum'}
],
autowidth:true,viewrecords: true,multiselect:false,sortable: true,shrinkToFit:true,gridview: true,
height:"auto",
sortname : "type_name",
sortorder:"asc",
scrollOffset: 0,
emptyDataText: "데이터가 없습니다.",
caption:'문서유형별 보유 현황',
postData : {strIndex:exsoft.util.layout.getSelectBox('statisticsStrIndex','option'),sdate:$("#statisticsSdate").val(),edate:$("#statisticsEdate").val(),part : exsoftStatisticsFunc.part },
grouping: true,
groupingView : {
groupField : ['type_name'],
groupColumnShow : [false],
groupText:['<b>{0}({1})</b>'],
groupCollapse : false,
groupOrder: ['ASC'],
groupSummary : [true],
groupDataSorted : true
},
userDataOnFooter: true,
loadBeforeSend: function() {
exsoft.util.grid.gridTitleBarHide('typeStatGridList');
exsoft.util.grid.gridNoDataMsgInit('typeStatGridList');
}
,loadComplete: function() {
if ($("#typeStatGridList").getGridParam("records")==0) {
exsoft.util.grid.gridNoRecords('typeStatGridList','no_data');
}else {
exsoft.util.grid.gridViewRecords('typeStatGridList');
}
exsoft.util.grid.gridInputInit(false);
}
,loadError:function(xhr, status, error) {
exsoft.util.error.isErrorChk(xhr);
}
});
// 컬럼 헤더 정렬 및 다국어 변경 처리
var headerData = '{"type_name":"문서유형","map_nm":"문서함","doc_cnt":"문서수","page_cnt":"파일수","page_total":"용량"}';
exsoft.util.grid.gridColumHeader('typeStatGridList',headerData,'center');
},
// 보안등급별 보유현황 GRID
securityLevelGridList : function() {
$('#securityLevelGridList').jqGrid({
url:exsoft.contextRoot+exsoftStatisticsFunc.gridUrl,
mtype:"post",
datatype:'json',
jsonReader:{
page:'page',total:'total',root:'list'
},
colNames:['code_nm','user_nm','doc_cnt','page_cnt','page_total'],
colModel:[
{name:'code_nm',index:'code_nm',width:50, align:'center',editable:false,sortable:false,key:true,align:'center'},
{name:'user_nm',index:'user_nm',width:50, editable:false,sortable:false,resizable:true,align:'center',summaryType:'count',summaryTpl : '소계'},
{name:'doc_cnt',index:'doc_cnt',width:30, editable:false,sortable:false,resizable:true,align:'center',
sorttype:'number',formatter:'number',summaryType:'sum', formatoptions:{thousandsSeparator:",", decimalPlaces: 0}},
{name:'page_cnt',index:'page_cnt',width:30, editable:false,sortable:false,resizable:true,align:'center',
sorttype:'number',formatter:'number',summaryType:'sum', formatoptions:{thousandsSeparator:",", decimalPlaces: 0}},
{name:'page_total',index:'page_total',width:30, editable:false,sortable:false,resizable:true,align:'center',
sorttype:'number',formatter:exsoft.util.grid.bytes2Size,summaryType:'sum'}
],
autowidth:true,viewrecords: true,multiselect:false,sortable: true,shrinkToFit:true,gridview: true,
height:"auto",
sortname : "security_level",
sortorder:"asc",
scrollOffset: 0,
viewsortcols:'vertical',
rowNum : exsoftStatisticsFunc.pageSize,
emptyDataText: "데이터가 없습니다.",
caption:'보안등급별보유 현황',
postData : {strIndex:exsoft.util.layout.getSelectBox('statisticsStrIndex','option'),strKeyword:$("#statisticsKeyword").val(),sdate:$("#statisticsSdate").val(),edate:$("#statisticsEdate").val(),part : exsoftStatisticsFunc.part},
grouping: true,
groupingView : {
groupField : ['code_nm'],
groupColumnShow : [false],
groupText:['<b>{0}({1})</b>'],
groupCollapse : false,
groupOrder: ['asc'],
groupSummary : [true],
groupDataSorted : true
},
userDataOnFooter: true,
loadBeforeSend: function() {
exsoft.util.grid.gridTitleBarHide('securityLevelGridList');
exsoft.util.grid.gridNoDataMsgInit('securityLevelGridList');
}
,loadComplete: function(data) {
if ($("#securityLevelGridList").getGridParam("records")==0) {
exsoft.util.grid.gridNoRecords('securityLevelGridList','no_data');
}else {
exsoft.util.grid.gridViewRecords('securityLevelGridList');
}
exsoft.util.grid.gridInputInit(false);
exsoft.util.grid.gridPager("#securityLevelGridPager",data);
}
,loadError:function(xhr, status, error) {
exsoft.util.error.isErrorChk(xhr);
}
});
// 컬럼 헤더 정렬 및 다국어 변경 처리
var headerData = '{"code_nm":"보안등급","user_nm":"사용자","doc_cnt":"문서수","page_cnt":"파일수","page_total":"용량"}';
exsoft.util.grid.gridColumHeader('securityLevelGridList',headerData,'center');
},
// 문서 Quota 현황 GRID
quotaGridList : function() {
$('#quotaGridList').jqGrid({
url:exsoft.contextRoot+exsoftStatisticsFunc.gridUrl,
mtype:"post",
datatype:'json',
jsonReader:{
page:'page',total:'total',root:'list'
},
colNames:['part_nm','storage_quota','page_total'],
colModel:[
{name:'part_nm',index:'part_nm',width:50, editable:false,sortable:false,resizable:true,align:'center'},
{name:'storage_quota',index:'storage_quota',width:40, editable:false,sortable:false,resizable:true,align:'center',
sorttype:'number',
formatter:function(cellValue, option, rowObject){
if(cellValue > -1) {
return exsoft.util.grid.bytes2Size(rowObject.storage_quota);
} else {
return "<span>무제한</span>";
}
}
},
{name:'page_total',index:'page_total',width:40, editable:false,sortable:false,resizable:true,align:'center',
sorttype:'number',
formatter: function (cellValue, option,rowObject) {
//할당랼 초과시 현사용량 색 변경처리
if(rowObject.page_total > rowObject.storage_quota && rowObject.storage_quota != -1) {
return "<font color='red'>"+exsoft.util.grid.bytes2Size(rowObject.page_total)+"</font>";
}else {
return exsoft.util.grid.bytes2Size(rowObject.page_total);
}
}
}
],
autowidth:true,viewrecords: true,multiselect:false,sortable: true,shrinkToFit:true,gridview: true,
height:"auto",
sortname : "part_nm",
sortorder:"asc",
scrollOffset: 0, // 스크롤 위치 조정. 0으로 지정 안할 경우 테이블 우측에 여백 생김
viewsortcols:'vertical',
rowNum : exsoftStatisticsFunc.pageSize,
emptyDataText: "데이터가 없습니다.",
caption:'문서 Quota 현황',
postData : {strIndex:exsoft.util.layout.getSelectBox('statisticsStrIndex','option'),part : exsoftStatisticsFunc.part},
loadBeforeSend: function() {
exsoft.util.grid.gridTitleBarHide('quotaGridList');
exsoft.util.grid.gridNoDataMsgInit('quotaGridList');
}
,loadComplete: function(data) {
if ($("#quotaGridList").getGridParam("records")==0) {
exsoft.util.grid.gridNoRecords('quotaGridList','no_data');
}else {
exsoft.util.grid.gridViewRecords('quotaGridList');
}
exsoft.util.grid.gridInputInit(false);
exsoft.util.grid.gridPager("#quotaGridPager",data);
}
,loadError:function(xhr, status, error) {
exsoft.util.error.isErrorChk(xhr);
}
});
// 컬럼 헤더 정렬 및 다국어 변경 처리
var headerData = '{"part_nm":"구분","storage_quota":"할당량","page_total":"현사용량"}';
exsoft.util.grid.gridColumHeader('quotaGridList',headerData,'center');
},
// 내문서현황 GRID
myGridList : function() {
$('#myStatisticsGridList').jqGrid({
url:exsoft.contextRoot+exsoftStatisticsFunc.gridUrl,
mtype:"post",
datatype:'json',
jsonReader:{
page:'page',total:'total',root:'list'
},
colNames:['type_name','create_cnt','read_cnt','update_cnt','delete_cnt'],
colModel:[
{name:'type_name',index:'type_name',width:50, align:'center',editable:false,sortable:false,key:true,align:'center'},
{name:'create_cnt',index:'create_cnt',width:20, editable:false,sortable:false,resizable:true,align:'center',
sorttype:'number',formatter:'number',summaryType:'sum', formatoptions:{thousandsSeparator:",", decimalPlaces: 0}},
{name:'read_cnt',index:'read_cnt',width:20, editable:false,sortable:false,resizable:true,align:'center',
sorttype:'number',formatter:'number',summaryType:'sum', formatoptions:{thousandsSeparator:",", decimalPlaces: 0}},
{name:'update_cnt',index:'update_cnt',width:20, editable:false,sortable:false,resizable:true,align:'center',
sorttype:'number',formatter:'number',summaryType:'sum', formatoptions:{thousandsSeparator:",", decimalPlaces: 0}},
{name:'delete_cnt',index:'delete_cnt',width:30, editable:false,sortable:false,resizable:true,align:'center',
sorttype:'number',formatter:'number',summaryType:'sum', formatoptions:{thousandsSeparator:",", decimalPlaces: 0}}
],
autowidth:true,viewrecords: true,multiselect:false,sortable: true,shrinkToFit:true,gridview: true,
height:"auto",
sortname : "type_name",
sortorder:"asc",
scrollOffset: 0,
viewsortcols:'vertical',
rowNum : exsoftStatisticsFunc.pageSize,
emptyDataText: "데이터가 없습니다.",
caption:'내문서현황',
postData : {sdate:$("#statisticsSdate").val(),edate:$("#statisticsEdate").val()},
loadBeforeSend: function() {
exsoft.util.grid.gridTitleBarHide('myStatisticsGridList');
exsoft.util.grid.gridNoDataMsgInit('myStatisticsGridList');
}
,loadComplete: function(data) {
if ($("#myStatisticsGridList").getGridParam("records")==0) {
exsoft.util.grid.gridNoRecords('myStatisticsGridList','no_data');
}else {
exsoft.util.grid.gridViewRecords('myStatisticsGridList');
}
exsoft.util.grid.gridInputInit(false);
}
,loadError:function(xhr, status, error) {
exsoft.util.error.isErrorChk(xhr);
}
});
// 컬럼 헤더 정렬 및 다국어 변경 처리
var headerData = '{"type_name":"문서유형","create_cnt":"등록","read_cnt":"조회","update_cnt":"수정","delete_cnt":"삭제"}';
exsoft.util.grid.gridColumHeader('myStatisticsGridList',headerData,'center');
},
//검색처리(공통)
searchFunc : function() {
// 검색기간 유효성 체크 추가 처리(전체옵션 제거)
if(exsoft.util.check.searchValid($("#statisticsSdate").val(),$("#statisticsEdate").val()) ) {
var postData = {strIndex:exsoft.util.layout.getSelectBox('statisticsStrIndex','option'),
strKeyword:exsoft.util.common.sqlInjectionReplace($("#statisticsKeyword").val()),
sdate:$("#statisticsSdate").val(),
edate:$("#statisticsEdate").val(),
part : exsoftStatisticsFunc.part,
workType:exsoftStatisticsFunc.workType,
is_search:'true'};
exsoft.util.grid.gridPostDataRefresh(exsoftStatisticsFunc.gridId,exsoft.contextRoot+exsoftStatisticsFunc.gridUrl, postData);
}
},
//검색처리(내문서현황)
searchMyFunc : function() {
// 검색기간 유효성 체크 추가 처리(전체옵션 제거)
if(exsoft.util.check.searchValid($("#statisticsSdate").val(),$("#statisticsEdate").val()) ) {
var postData = {
sdate:$("#statisticsSdate").val(),
edate:$("#statisticsEdate").val(),
};
exsoft.util.grid.gridPostDataRefresh(exsoftStatisticsFunc.gridId,exsoft.contextRoot+exsoftStatisticsFunc.gridUrl, postData);
}
},
//검색처리(문서Quota현황)
searchQuotaFunc : function() {
var postData = {strIndex:exsoft.util.layout.getSelectBox('statisticsStrIndex','option'),
part :exsoftStatisticsFunc.part,
is_search:'true'};
exsoft.util.grid.gridPostDataRefresh(exsoftStatisticsFunc.gridId,gridId,exsoft.contextRoot+exsoftStatisticsFunc.gridUrl, postData);
},
//검색처리(문서함/폴더별 보유현황)
searchFolderFunc : function() {
// 검색기간 유효성 체크 추가 처리(전체옵션 제거)
if(exsoft.util.check.searchValid($("#statisticsSdate").val(),$("#statisticsEdate").val()) ) {
var postData = {strFolderIdx:exsoft.util.layout.getSelectBox('statisticsStrIndex','option'),
sdate:$("#statisticsSdate").val(),
edate:$("#statisticsEdate").val(),
part : exsoftStatisticsFunc.part,
workType:exsoftStatisticsFunc.workType,
is_search:'true'};
exsoft.util.grid.gridPostDataRefresh(exsoftStatisticsFunc.gridId,exsoft.contextRoot+exsoftStatisticsFunc.gridUrl, postData);
}
},
// 검색처리(기간별 등록/활용 현황)
searchDecadeFunc : function() {
var term = $("input[name='term']:checked").val();
if(term == "monthly") {
var postData = {strIndex:exsoft.util.layout.getSelectBox('statisticsStrIndex','option'),
term:$("input[name='term']:checked").val(),
strKeyword:exsoft.util.common.sqlInjectionReplace($("#statisticsKeyword").val()),
strYea:exsoft.util.layout.getSelectBox('statisticsStrYear','option'),
sdate:$("#statisticsSdate").val(),
edate:$("#statisticsEdate").val(),
part :exsoftStatisticsFunc.part,
is_search:'true'};
exsoft.util.grid.gridPostDataRefresh(exsoftStatisticsFunc.gridId,exsoft.contextRoot+exsoftStatisticsFunc.gridUrl, postData);
}else {
// 검색기간 유효성 체크 추가 처리(전체옵션 제거)
if(exsoft.util.check.searchValid($("#statisticsSdate").val(),$("#statisticsEdate").val()) ) {
var postData = {strIndex:exsoft.util.layout.getSelectBox('statisticsStrIndex','option'),
term:$("input[name='term']:checked").val(),
strKeyword:exsoft.util.common.sqlInjectionReplace($("#statisticsKeyword").val()),
strYear:exsoft.util.layout.getSelectBox('statisticsStrYear','option'),
sdate:$("#statisticsSdate").val(),
edate:$("#statisticsEdate").val(),
part :exsoftStatisticsFunc.part,
is_search:'true'};
exsoft.util.grid.gridPostDataRefresh(exsoftStatisticsFunc.gridId,exsoft.contextRoot+exsoftStatisticsFunc.gridUrl, postData);
}
}
},
// 페이지이동 처리(공통)
gridPage : function(nPage) {
$(exsoftStatisticsFunc.gridId).setGridParam({page:nPage,postData:{is_search:'false',page_init:'false'}}).trigger("reloadGrid");
},
// 페이지목록 선택시(공통)
rowsPage : function(rowNum) {
$(exsoftStatisticsFunc.gridId).setGridParam({rowNum:rowNum}); // 페이지목록 설정값 변경처리
$(exsoftStatisticsFunc.gridId).setGridParam({page:1,postData:{is_search:'false',page_init:'true'}}).trigger("reloadGrid");
},
// 차트 그리기
chartViewerProc : function() {
exsoft.util.layout.divLayerOpen('statics_view_wrapper', 'statics_view');
exsoftStatisticsFunc.ui.clearCanvas();
if(exsoftStatisticsFunc.statisticsMenuType == "decadeDocStatistics") { // 기간별 등록활용 현황
exsoftStatisticsFunc.chart.decadeDocStatisticsChart();
}else if(exsoftStatisticsFunc.statisticsMenuType == "typeStatistics") { // 문서유형별 보유현황
exsoftStatisticsFunc.chart.typeStatisticsChart();
}else if(exsoftStatisticsFunc.statisticsMenuType == "groupDocStatistics") { // 부서별 등록/활용 현황
exsoftStatisticsFunc.chart.groupDocStatisticsChart();
}else if(exsoftStatisticsFunc.statisticsMenuType == "folderDocStatistics") { // 문서함/폴더별 보유현황
exsoftStatisticsFunc.chart.folderDocStatisticsChart();
}else if(exsoftStatisticsFunc.statisticsMenuType == "securityLevelStatistics") { // 보안 등급별 보유현황
exsoftStatisticsFunc.chart.securityLevelStatisticsChart();
}
},
},
chart : {
// 문서유형별 그래프 그리기
typeStatisticsChart : function() {
var jsonObject = {strIndex:exsoft.util.layout.getSelectBox('statisticsStrIndex','option'),
strKeyword : $("#statisticsKeyword").val(),
sdate:$("#statisticsSdate").val(),
edate:$("#statisticsEdate").val(),
isChart:'chart',
part : exsoftStatisticsFunc.part,
page : $('#typeStatGridList').getGridParam('page'),
rows : $('#typeStatGridList').getGridParam('rowNum'),
chartType : exsoft.util.layout.getSelectBox('chartType','option'),
colType : exsoft.util.layout.getSelectBox('colType','option'),
yTitle : exsoft.util.layout.getSelectBox('colType','text'),
is_search:'false'};
exsoftStatisticsFunc.callback.charServerCall(jsonObject,'/statistics/typeStatisticsList.do');
},
// 기간별 등록/활용 현황 그래프 그리기
decadeDocStatisticsChart : function() {
var jsonObject = {strIndex:exsoft.util.layout.getSelectBox('statisticsStrIndex','option'),
strKeyword : $("#statisticsKeyword").val(),term:$("input[name='term']:checked").val(),
sdate:$("#statisticsSdate").val(),strYear:exsoft.util.layout.getSelectBox('statisticsStrYear','option'),
edate:$("#statisticsEdate").val(),
isChart:'chart',
part : exsoftStatisticsFunc.part,
page : $('#decadeDocGridList').getGridParam('page'),
rows : $('#decadeDocGridList').getGridParam('rowNum'),
chartType : exsoft.util.layout.getSelectBox('chartType','option'),
colType : exsoft.util.layout.getSelectBox('colType','option'),
yTitle : exsoft.util.layout.getSelectBox('colType','text'),
is_search:'false'};
exsoftStatisticsFunc.callback.charServerCall(jsonObject,'/statistics/decadeDocGridList.do');
},
// 부서별 등록/활용 현황
groupDocStatisticsChart : function() {
var jsonObject = {strIndex:exsoft.util.layout.getSelectBox('statisticsStrIndex','option'),
strKeyword : $("#statisticsKeyword").val(),
sdate:$("#statisticsSdate").val(),strYear:exsoft.util.layout.getSelectBox('statisticsStrYear','option'),
edate:$("#statisticsEdate").val(),
isChart:'chart',
part : exsoftStatisticsFunc.part,
page : $('#groupDocGridList').getGridParam('page'),
rows : $('#groupDocGridList').getGridParam('rowNum'),
chartType : exsoft.util.layout.getSelectBox('chartType','option'),
colType : exsoft.util.layout.getSelectBox('colType','option'),
yTitle : exsoft.util.layout.getSelectBox('colType','text'),
is_search:'false'};
exsoftStatisticsFunc.callback.charServerCall(jsonObject,'/statistics/groupDocGridList.do');
},
// 문서함/폴더별 보유현황
folderDocStatisticsChart : function() {
var jsonObject = {strFolderIdx:exsoft.util.layout.getSelectBox('statisticsStrIndex','option'),
sdate:$("#statisticsSdate").val(),strYear:exsoft.util.layout.getSelectBox('statisticsStrYear','option'),
edate:$("#statisticsEdate").val(),
isChart:'chart',
workType:exsoftStatisticsFunc.workType,
part : exsoftStatisticsFunc.part,
page : $('#folderDocGridList').getGridParam('page'),
rows : $('#folderDocGridList').getGridParam('rowNum'),
chartType : exsoft.util.layout.getSelectBox('chartType','option'),
colType : exsoft.util.layout.getSelectBox('colType','option'),
yTitle : exsoft.util.layout.getSelectBox('colType','text'),
is_search:'false'};
exsoftStatisticsFunc.callback.charServerCall(jsonObject,'/statistics/userFolderStatisticsList.do');
},
// 보안 등급별 보유현황
securityLevelStatisticsChart : function() {
var jsonObject = {strIndex:exsoft.util.layout.getSelectBox('statisticsStrIndex','option'),
strKeyword : $("#statisticsKeyword").val(),
sdate:$("#statisticsSdate").val(),
edate:$("#statisticsEdate").val(),
isChart:'chart',
part : exsoftStatisticsFunc.part,
page : $('#securityLevelGridList').getGridParam('page'),
rows : $('#securityLevelGridList').getGridParam('rowNum'),
chartType : exsoft.util.layout.getSelectBox('chartType','option'),
colType : exsoft.util.layout.getSelectBox('colType','option'),
yTitle : exsoft.util.layout.getSelectBox('colType','text'),
is_search:'false'};
exsoftStatisticsFunc.callback.charServerCall(jsonObject,'/statistics/securityStatisticsList.do');
}
},
ui : {
// 통계 메뉴PATH
pageNaviTitle : function() {
$("#"+exsoftStatisticsFunc.pageTitleId).html(exsoftStatisticsFunc.pageTitle);
},
// 기간별 등록/활용 현황 기간표시 Show/Hide
termChange : function() {
// TO DO 년도/기간 비활성화 처리
if($("input[name='term']:checked").val() == "daily") {
for (var n in exsoftStatisticsFunc.searchOptions) {
if(exsoftStatisticsFunc.searchOptions[n] == "statisticsStrYear") {
$("#statisticsStrYear").addClass("hide");
$("#statisticsDecade").removeClass("hide");
$("#statisticsSdate").removeClass("readonly");
$("#statisticsEdate").removeClass("readonly");
}else {
$("#"+exsoftStatisticsFunc.searchOptions[n]).prop("disabled",false);
}
}
}else {
for (var n in exsoftStatisticsFunc.searchOptions) {
if(exsoftStatisticsFunc.searchOptions[n] == "statisticsStrYear") {
$("#statisticsStrYear").removeClass("hide");
$("#statisticsDecade").addClass("hide");
$("#statisticsSdate").addClass("readonly");
$("#statisticsEdate").addClass("readonly");
}else {
$("#"+exsoftStatisticsFunc.searchOptions[n]).prop("disabled",true);
}
}
}
},
// 캔버스 초기화 처리
clearCanvas : function() {
$('#compradores').remove();
$('.statics_chart_area').append('<canvas id="compradores" width="650" height="400"></canvas>');
}
},
callback : {
// 페이지목록 변경선택시
statisticsRows : function(divId, selectedData){
exsoftStatisticsFunc.event.rowsPage(selectedData.selectedData.value);
},
// 날짜목록 변경선택시
statisticsDecade : function(divId, selectedData){
exsoft.util.date.changeDate(selectedData.selectedData.value, "statisticsSdate", "statisticsEdate");
},
// 챠트 속성 변경에 따른 이벤트 처리
colTypeChange : function(divId, selectedData,arrParam) {
if(exsoftStatisticsFunc.chartInit != "true") {
exsoftStatisticsFunc.open.chatViewer();
}
},
chatTypeChange : function(divId, selectedData,arrParam) {
if(exsoftStatisticsFunc.chartInit != "true") {
exsoftStatisticsFunc.open.chatViewer();
}
},
charServerCall : function(jsonObject,urls) {
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback(jsonObject, exsoft.contextRoot+urls, 'chartView',
function(data, e){
if(data.result == 'true'){
var chartData = null;
var ctx = null;
var labels = [],dataset=[];
if(data.chartType == "bar" || data.chartType == "line") {
for(var item in data.chartData){
labels.push(data.chartData[item].label);
dataset.push(data.chartData[item].value);
}
}
if(data.chartType == "bar") {
chartData = {
labels : labels,
datasets : [{
fillColor: "rgba(220,220,220,0.5)",
strokeColor: "rgba(220,220,220,0.8)",
highlightFill: "rgba(220,220,220,0.75)",
highlightStroke: "rgba(220,220,220,1)",
data : dataset
}]
}
ctx = document.getElementById('compradores').getContext('2d');
new Chart(ctx).Bar(chartData);
}else if(data.chartType == "line") {
chartData = {
labels : labels,
datasets : [{
fillColor: "rgba(220,220,220,0.2)",
strokeColor: "rgba(220,220,220,1)",
pointColor: "rgba(220,220,220,1)",
pointStrokeColor: "#fff",
pointHighlightFill: "#fff",
pointHighlightStroke: "rgba(220,220,220,1)",
data : dataset
}]
}
ctx = document.getElementById("compradores").getContext("2d");
new Chart(ctx).Line(chartData, {
scaleShowGridLines : true,
scaleGridLineColor : "rgba(0,0,0,0.05)",
scaleGridLineWidth : 1,
bezierCurve : true,
bezierCurveTension : 0.4,
pointDot : true,
pointDotRadius : 4,
pointDotStrokeWidth : 1,
pointHitDetectionRadius : 20,
datasetStroke : true,
datasetStrokeWidth : 2,
datasetFill : true,
onAnimationProgress: function() {
//console.log("onAnimationProgress");
},
onAnimationComplete: function() {
//console.log("onAnimationComplete");
}
});
}else if(data.chartType == "pie") {
ctx = document.getElementById("compradores").getContext("2d");
new Chart(ctx).Doughnut(data.chartData);
}
}else {
jAlert('그래프를 생성하는데 실패하였습니다.','확인',0);
}
});
}
},
}<file_sep>/EDMS3/WebContent/js/popup/documentVersionDetail.js
/**
* 상세검색 JavaScript
*/
var gViewDocId;
var gRootId ="";
var exsoftDocVersionDetailFunc = {
// 0. 초기화
init : {
initDocumentViewWindow : function(docid) {
gViewDocId = docid;
//this.gRootId = gRootId;
// 문서 기본정보 조회
exsoftDocVersionDetailFunc.event.getDocumentView();
},
},
// 1. 팝업
open : {
},
//2. layer + show
layer : {
},
//3. 닫기 + hide
close : {
},
//4. 화면 이벤트 처리
event : {
// 1-1. 문서 상세정보를 가져오고 화면에 표시한다
getDocumentView : function() {
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback({doc_id:gViewDocId}, exsoft.contextRoot+"/document/documentDetail.do", "select",
function(data,e) {
exsoftDocVersionDetailFunc.event.printDocumentViewVO(data);
//base.tableAclItemPrintList("view_docAclItemList", data.aclItemList,'document');
//base.tableAclItemPrintList("view_docExAclItemList", data.aclExItemList,'document');
//printRefDocView(data);
//printPageListView(data);
});
},
//2-1. 문서 기본정보 출력
printDocumentViewVO : function(data) {
var docVO = data.documentVO;
//gRootId = docVO.root_id == "" ? docVO.doc_id : docVO.root_id;
$("#view_doc_name").html(docVO.doc_name);
$("#view_doc_version").html("Ver " + docVO.version_no);
$("#view_folderPath").html(data.folderPath);
$("#view_type_name").html(docVO.type_name);
$("#view_preservation_year").html(docVO.preservation_year + "년");
$("#view_security_level_name").html(exsoft.util.common.findCodeName(data.securityList,docVO.security_level));
$("#view_access_grade_name").html(exsoft.util.common.findCodeName(data.positionList,docVO.access_grade));
$("#view_creator_name").html(docVO.creator_name + " [" + docVO.owner_name + "]");
$("#view_create_date").html(docVO.create_date);
$("#view_multiLink").html();
$("#view_keyword").html(docVO.keyword);
$("#vtemp_content").html(docVO.doc_description.replace('<','<').replace('& lt;','<').replace('>', '>').replace('& gt;', '>'));
$('#vIframe_editor').attr("src","${contextRoot}/editor_7.4.5/doc_view.jsp");
}
},
//5. 화면 UI 변경 처리
ui : {
},
//6. callback 처리
callback : {
},
}
<file_sep>/EDMS3/WebContent/js/popup/registAclWindow.js
var registAclWindow = {
// Members
callback : null,
popType : null, // create, modify, copy
gridParameters : {
"successfunc" : null,
"url" : 'clientArray',
"extraparam" : {},
"aftersavefunc" : function( response ) {},
"errorfunc": null,
"afterrestorefunc" : null,
"restoreAfterError" : true,
"mtype" : "POST"
},
postData : {
acl_id : null,
acl_name : null,
acl_type : null,
src_acl_name : null,
open_id : null,
open_name : null,
open_isgroup : null,
sort_index : null,
type : null,
aclItemArrayList : null
},
reset : function() {
registAclWindow.popType = null;
registAclWindow.postData = {
acl_id : null,
acl_name : null,
acl_type : null,
src_acl_name : null,
open_id : null,
open_name : null,
open_isgroup : null,
sort_index : null,
type : null,
aclItemArrayList : null
};
registAclWindow.grid.clearAclItemGrid();
},
init : function(callback, popType, aclDetail, aclItems) {
registAclWindow.callback = callback; // callback 저장
registAclWindow.popType = popType; // 등록 / 수정 구분 저장
registAclWindow.ui.setTitle(); // popType에 따라 타이틀 변경 (등록 / 수정)
registAclWindow.grid.initAclItemGrid(); // 그리드 초기화
registAclWindow.ui.setOpenRangeRole(); // 사용자 ROLE에 따라 Radio 버튼 초기화
switch (popType) {
case "create" :
registAclWindow.grid.addOwner();
registAclWindow.ui.resetAclName();
registAclWindow.ui.setOpenUser();
break;
case "modify" :
registAclWindow.ui.setModifyAcl(aclDetail, aclItems);
break;
case "copy" :
registAclWindow.ui.resetAclName();
registAclWindow.ui.setCopyAcl(aclDetail, aclItems);
break;
default :
jAlert("popup type이 올바르지 않습니다.");
return;
}
registAclWindow.open();
},
open : function() {
exsoft.util.layout.divLayerOpen("subFolder_authModifyCopy_wrapper", "subFolder_authModifyCopy");
},
close : function() {
exsoft.util.layout.divLayerClose("subFolder_authModifyCopy_wrapper", "subFolder_authModifyCopy");
},
ajax : {
},
functions : {
},
grid : {
addOwner : function() {
var rowData = {
accessor_id : 'OWNER',
accessor_isgroup : 'F',
accessor_isalias : 'T',
accessor_name : '소유자',
fol_default_acl : 'DELETE',
fol_act_create : 'T',
fol_act_change_permission : 'T',
doc_default_acl : 'DELETE',
doc_act_create : 'T',
doc_act_cancel_checkout : 'T',
doc_act_change_permission : 'T'
}
$("#registAclItemGridList").jqGrid("addRowData", rowData.accessor_id, rowData);
},
addRowData : function(rowData) {
$("#registAclItemGridList").jqGrid("addRowData", rowData.accessor_id, rowData);
if (this.accessor_id != "OWNER") {
$("#registAclItemGridList").editRow(rowData.accessor_id, false);
}
},
getSelectedItems : function(gridId) {
var retArr = [];
var rowIds = $("#" + gridId).jqGrid("getDataIDs");
var _cnt = rowIds.length;
for (var i = 0; i < _cnt; i++) {
// save 후 rowId에 대한 값을 불러 온다.
$('#'+gridId).jqGrid('saveRow', rowIds[i], registAclWindow.gridParameters );
var _row =$("#"+gridId).getRowData(rowIds[i]);
retArr[i] = {
accessor_id : _row.accessor_id,
accessor_isgroup : _row.accessor_isgroup,
accessor_isalias : _row.accessor_isalias,
accessor_name : _row.accessor_name,
fol_default_acl : _row.fol_default_acl,
fol_act_create : _row.fol_act_create,
fol_act_change_permission : _row.fol_act_change_permission,
doc_default_acl : _row.doc_default_acl,
doc_act_create : _row.doc_act_create,
doc_act_cancel_checkout : _row.doc_act_cancel_checkout,
doc_act_change_permission : _row.doc_act_change_permission
}
}
return retArr;
},
clearAclItemGrid : function() {
$("#registAclItemGridList").jqGrid("clearGridData");
},
initAclItemGrid : function() {
if ($("#registAclItemGridList")[0].grid != undefined) {
$('#registAclItemGridList').jqGrid('GridUnload');
}
$('#registAclItemGridList').jqGrid({
datatype:'json',
colNames:['accessor_id', 'accessor_isgroup', 'accessor_isalias', '접근자','기본권한','폴더등록','권한변경','기본권한','문서등록','반출취소','권한변경'],
colModel:[
{name:'accessor_id',index:'accessor_id',width:5, align:'center',editable:false,sortable:false,key:true,hidden:true},
{name:'accessor_isgroup',index:'accessor_id',width:5, align:'center',editable:false,sortable:false,key:true,hidden:true},
{name:'accessor_isalias',index:'accessor_id',width:5, align:'center',editable:false,sortable:false,key:true,hidden:true},
{name:'accessor_name',index:'accessor_name',width:80, editable:false,sortable:false,resizable:true,align:'center'},
{name:'fol_default_acl',index:'fol_default_acl',width:65, editable:true,sortable:false,resizable:true,align:'center',edittype:'select',
editoptions:{
value:"DELETE:삭제;UPDATE:수정;READ:조회;BROWSE:목록"
},formatter:'select' //formatter의 역활은 value값으로 grid에 표시함.
},
{name:'fol_act_create',index:'fol_act_create',width:65, editable:true,sortable:false,resizable:true,align:'center',
edittype:'checkbox',
editoptions:{value:'T:F'},
fomatter:'checkbox'
},
{name:'fol_act_change_permission',index:'fol_act_change_permission',width:65, editable:true,sortable:false,resizable:true,align:'center',
edittype:'checkbox',
editoptions:{value:'T:F'},
fomatter:'checkbox'
},
{name:'doc_default_acl',index:'doc_default_acl',width:65, editable:true,sortable:false,resizable:true,align:'center',edittype:'select',
editoptions:{
value:"NONE:없음;DELETE:삭제;UPDATE:수정;READ:조회;BROWSE:목록"
},formatter:'select'
},
{name:'doc_act_create',index:'doc_act_create',width:65, editable:true,sortable:false,resizable:true,align:'center',
edittype:'checkbox',
editoptions:{value:'T:F'},
fomatter:'checkbox'
},
{name:'doc_act_cancel_checkout',index:'doc_act_cancel_checkout',width:65, editable:true,sortable:false,resizable:true,align:'center',
edittype:'checkbox',
editoptions:{value:'T:F'},
fomatter:'checkbox'
},
{name:'doc_act_change_permission',index:'doc_act_change_permission',width:65, editable:true,sortable:false,resizable:true,align:'center',
edittype:'checkbox',
editoptions:{value:'T:F'},
fomatter:'checkbox'
},
],
autowidth:true,
// width:"500px",
height:"auto",
viewrecords: true,multiselect:true,sortable: true,shrinkToFit:true,gridview: true,
caption:'접근자 목록'
,loadBeforeSend: function() {
exsoft.util.grid.gridTitleBarHide('registAclItemGridList');
}
,gridComplete : function() {
// url을 서버로 호출하지 않기에 loadComplete event는 발생 안함
// exsoft.util.grid.gridInputInit(false);
//
// $("span.ui-jqgrid-resize.ui-jqgrid-resize-ltr").each(function(){
// if($(this).css("height") == '48px'){
// $(this).css("height", "");
// }
// });
}
,onCellSelect: function(rowid, iCol,cellcontent,e){
if(exsoft.util.grid.gridIsRowDataExist('registAclItemGridList', rowid, 'accessor_id', 'OWNER')) {
jAlert('소유자는 수정할 수 없습니다.');
$('#registAclItemGridList').jqGrid('setSelection',rowid,false); ////checkbox 해제
} else {
$('#registAclItemGridList').editRow(rowid,false);
}
}
,onSelectRow: function(rowid,status,e){
if(!exsoft.util.grid.gridIsRowDataExist('registAclItemGridList', rowid, 'accessor_id', 'OWNER')) {
// 에디터모드인지 체크
var edited = exsoft.util.grid.gridEditMode('registAclItemGridList',rowid);
// false 이면 row 저장처리
if(!status) {
$('#registAclItemGridList').jqGrid('saveRow', rowid, registAclWindow.gridParameters);
}else {
if(edited == "0") {
$('#registAclItemGridList').editRow(rowid,false);
}
}
}
}
});
// 헤더 colspan
$("#registAclItemGridList").jqGrid('destroyGroupHeader');
$("#registAclItemGridList").jqGrid('setGroupHeaders', {
useColSpanStyle: true,
groupHeaders:[
{startColumnName: 'fol_default_acl', numberOfColumns: 3, titleText: '폴더권한'},
{startColumnName: 'doc_default_acl', numberOfColumns: 4, titleText: '문서권한'}
]
});
}
},
event : {
// 공유 범위 대상 라디오 클릭
selectOpenRange : function(obj) {
registAclWindow.postData.acl_type = $(obj).val();
var _vars = []; // 0:open_id 1:open_name 2:open_isgroup 3:sort_index
switch ($(obj).val()) {
case Constant.ACL.TYPE_ALL : // 전사
_vars = ["ALL", "전사", "F", "3"]
break;
case Constant.ACL.TYPE_DEPT : // 하위부서 포함
_vars = ["", "찾기 버튼을 이용하여 공유대상을 선택하세요", "T", "1"]
break;
case Constant.ACL.TYPE_TEAM : // 부서
if (exsoft.user.acl_menu_part == Constant.MENU.MENU_TEAM) {
_vars = [exsoft.user.manage_group_id, exsoft.user.manage_group_nm, "T", "2"];
} else {
_vars = ["", "찾기 버튼을 이용하여 공유대상을 선택하세요", "T", "2"];
}
break;
case Constant.ACL.TYPE_PRIVATE : // 공유 안함
_vars = [exsoft.user.user_id, exsoft.user.user_name, "F", "4"]
break;
}
registAclWindow.postData.open_id = _vars[0];
registAclWindow.postData.open_name = _vars[1];
registAclWindow.postData.open_isgroup = _vars[2];
registAclWindow.postData.sort_index = _vars[3];
$("#registAclWindowOpenName").val(_vars[1]);
},
// 공유 대상 선택 팝업
selectShareTargetWindow : function() {
// 공유 범위에 따른 분기 (전사/부서/하위부서포함/공유안함)
switch (registAclWindow.postData.acl_type) {
case Constant.Acl.TYPE_ALL :
jAlert("공유범위가 '전사'는 공유대상이 전사로 지정됩니다");
break;
case Constant.Acl.TYPE_DEPT :
case Constant.Acl.TYPE_TEAM :
if (exsoft.user.acl_menu_part == Constant.MENU.MENU_GROUP) {
// 그룹윈도우 팝업(관리대상부서)
selectSingleFolderWindow.init(function(){}, "GROUP", exsoft.user.manage_group_id);
} else {
selectSingleFolderWindow.init(function(){}, "GROUP");
}
break;
case Constant.Acl.TYPE_PRIVATE :
selectSingleUserWindow.init.initSingleUserWindow()
break;
}
},
// AclItem 추가 (접근자 선택 팝업)
addAclItemRow : function() {
selectAccessorWindow.init(registAclWindow.callbackFunctions.selectAccessorCallback, "registAclItemGridList");
},
// AclItem 제외
deleteAclItemRow : function() {
exsoft.util.grid.gridDeleteRow('registAclItemGridList', 'OWNER', '소유자는 삭제할 수 없습니다.', false);
exsoft.util.grid.gridCheckBoxInit('registAclItemGridList');
},
submit : function() {
if ($("#registAclWindowAclName").val().length == 0) {
jAlert("권한명을 입력하세요.");
return;
}
if (registAclWindow.postData.open_id == 0 || registAclWindow.postData.open_name == 0) {
jAlert("공개범위를 선택하세요");
return;
}
registAclWindow.postData.acl_name = $("#registAclWindowAclName").val();
registAclWindow.postData.aclItemArrayList = JSON.stringify(registAclWindow.grid.getSelectedItems("registAclItemGridList"));
switch (registAclWindow.popType) {
case "create" :
case "copy" :
registAclWindow.postData.type = "insert";
exsoft.util.ajax.ajaxDataFunctionWithCallback(registAclWindow.postData, exsoft.contextRoot+"/permission/aclControl.do", "create", registAclWindow.callback);
break;
case "modify" :
registAclWindow.postData.type = "update";
exsoft.util.ajax.ajaxDataFunctionWithCallback(registAclWindow.postData, exsoft.contextRoot+"/permission/aclControl.do", "modify", registAclWindow.callback);
break;
}
registAclWindow.reset();
registAclWindow.close();
},
cancel : function() {
registAclWindow.reset();
registAclWindow.close();
}
},
ui : {
resetAclName : function() {
$("#registAclWindowAclName").val("");
},
setTitle : function() {
if (registAclWindow.popType == "create" || registAclWindow.popType == "copy") {
$("#registAclWindowTitle").text("[권한] 등록");
} else {
$("#registAclWindowTitle").text("[권한] 수정");
}
},
setOpenUser : function() {
registAclWindow.postData.acl_type = "PRIVATE";
registAclWindow.postData.open_id = exsoft.user.user_id;
registAclWindow.postData.open_name = exsoft.user.user_name;
registAclWindow.postData.open_isgroup = "F";
registAclWindow.postData.sort_index = "4";
$("#registAclWindowOpenName").val(exsoft.user.user_name);
},
setModifyAcl : function(aclDetail, aclItems) {
registAclWindow.postData.acl_id = aclDetail.acl_id;
registAclWindow.postData.acl_type = aclDetail.acl_type;
registAclWindow.postData.acl_name = aclDetail.acl_name;
registAclWindow.postData.src_acl_name = aclDetail.acl_name;
registAclWindow.postData.open_id = aclDetail.open_id;
registAclWindow.postData.open_name = aclDetail.open_name;
registAclWindow.postData.open_isgroup = aclDetail.open_isgroup;
registAclWindow.postData.sort_index = aclDetail.sort_index;
$("input[name=registAclWindowOpenRange][value='{0}'".format(aclDetail.acl_type)).prop("checked", true);
$("#registAclWindowOpenName").val(aclDetail.open_name);
$("#registAclWindowAclName").val(aclDetail.acl_name);
registAclWindow.grid.clearAclItemGrid();
$(aclItems).each(function() {
registAclWindow.grid.addRowData(this);
});
},
setCopyAcl : function(aclDetail, aclItems) {
registAclWindow.postData.acl_type = aclDetail.acl_type;
registAclWindow.postData.open_id = aclDetail.open_id;
registAclWindow.postData.open_name = aclDetail.open_name;
registAclWindow.postData.open_isgroup = aclDetail.open_isgroup;
registAclWindow.postData.sort_index = aclDetail.sort_index;
$("input[name=registAclWindowOpenRange][value='{0}'".format(aclDetail.acl_type)).prop("checked", true);
$("#registAclWindowOpenName").val(aclDetail.open_name);
registAclWindow.grid.clearAclItemGrid();
$(aclItems).each(function() {
registAclWindow.grid.addRowData(this);
});
},
setOpenRangeRole : function() {
var disableArr = [];
switch (exsoft.user.acl_menu_part) {
case Constant.ROLE.SYSTEM_ROLE :
case Constant.MENU.MENU_ALL :
disableArr = [true, true, true, true, true];
break;
case Constant.MENU.MENU_GROUP :
disableArr = [false, false, false, true, true];
break;
case Constant.MENU.MENU_TEAM :
disableArr = [false, false, true, true, false];
break;
default :
disableArr = [false, true, true, true, false];
break;
}
$("input[name=registAclWindowOpenRange][value='PRIVATE'").attr("disabled", disableArr[0]);
$("input[name=registAclWindowOpenRange][value='TEAM'").attr("disabled", disableArr[1]);
$("input[name=registAclWindowOpenRange][value='DEPT'").attr("disabled", disableArr[2]);
$("input[name=registAclWindowOpenRange][value='ALL'").attr("disabled", disableArr[3]);
registAclWindow.ui.setVisibleGroupSearch(disableArr[4]);
},
setVisibleGroupSearch : function(isVisible) {
if (isVisible) {
$("#registAclWindowSearch").show();
} else {
$("#registAclWindowSearch").hide();
}
}
},
callbackFunctions : {
selectGroup : function(groupInfo) {
if (groupInfo[0].mapId == "PROJECT" && registAclWindow.postData.acl_type == Constant.ACL.TYPE) {
jAlert("프로젝트 그룹은 하위부서 포함 공개범위를 선택할 수 없습니다.");
return;
}
registAclWindow.postData.open_id = returnGroup[0].id;
registAclWindow.postData.open_name = returnGroup[0].text;
registAclWindow.postData.open_isgroup = "T";
$("#registAclWindowOpenName").val(returnGroup[0].text);
},
selectUser : function(userInfo) {
registAclWindow.postData.open_id = userInfo.user_id;
registAclWindow.postData.open_name = userInfo.user_name_ko;
registAclWindow.postData.open_isgroup = "F";
$("#registAclWindowOpenName").val(userInfo.user_name_ko);
},
selectAccessorCallback : function(aclItemRowList) {
exsoft.util.grid.gridSetAclItemAddCallback("registAclItemGridList", aclItemRowList);
}
}
}<file_sep>/EDMS3/WebContent/js/sysadmin/menuAuthManager.js
/**
* 환경설정 관련 스크립트
*/
var exsoftAdminMenuFunc = {
pageSize : "",
gRoleId : "",
gRoleNm : "",
gridId : "",
gBrowser : exsoft.util.layout.browser(),
init : {
initPage : function(pageSize,gridId) {
exsoftAdminMenuFunc.pageSize = pageSize;
exsoftAdminMenuFunc.gridId = gridId;
}
},
open : {
// ROLE ADD
writeFunc : function() {
exsoft.util.layout.divLayerOpen('user_role_wrapper', 'user_role');
exsoft.util.common.formClear('frm');
$("#type").val('insert');
$("#code_id").removeClass("readonly");
$("#code_id").prop("readonly",false);
}
},
layer : {
},
close : {
// ROLE DEL
delFunc : function() {
var id = $("#roleGrid").getGridParam('selarrrow');
var data = "";
for (var i = 0; i < id.length; i++) {
var rowdata = $("#roleGrid").getRowData(id[i]);
if(rowdata.is_sys == 'Y') {
jAlert("시스템제공 작업권한은 삭제할 수 없습니다.",'알림',0);
return false;
}
data += rowdata.code_id + ','; // 선택된 데이터를 변수에 넣어서 "," 를 붙여가면서
} // String 객체를 만든다. (ex: id1,id3,id7,id8)
if(id.length==0){
jAlert("삭제할 Role ID를 선택하세요",'알림',0);
return false;
}else {
if (id.length > 0) {
jConfirm('선택한 Role ID를 삭제하시겠습니까?', 'Role삭제',0,
function(r){
var jsonObject = { "type":"delete", "inputStr":data};
if(r) exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback(jsonObject, exsoft.contextRoot+"/admin/codeDelete.do", 'roleDelete',
function(data, e){
if(data.result == "true") {
exsoft.util.grid.gridRefresh('roleGrid',exsoft.contextRoot+'/admin/codePage.do');
}else {
jAlert(data.message,'확인',0);
}
});
}
);
}
}
},
},
event : {
// 작업권한(Role) 유형 TreeGrid
roleGridList : function() {
$('#roleGrid').jqGrid({
url:exsoft.contextRoot+'/admin/codePage.do',
mtype:"post",
datatype:'json',
jsonReader:{page:'page',total:'total',root:'list'},
colNames:['code_id','code_nm','is_sys'],
colModel:[
{name:'code_nm',index:'code_nm',width:50, editable:false,sortable:true,resizable:true},
{name:'code_id',index:'code_id',width:60, editable:false,sortable:true,resizable:true,hidden:false},
{name:'is_sys',index:'is_sys',width:10, editable:false,sortable:false,resizable:true,align:'center',hidden:true}
],
autowidth:true,
height:"auto",
viewrecords: true,
multiselect:true,
sortable: true,
shrinkToFit:true,
gridview: true,
sortname : "code_nm",
sortorder:"desc",
scrollOffset:0,
viewsortcols:'vertical',
rowNum : exsoftAdminMenuFunc.pageSize,
multikey: "ctrlKey",
emptyDataText: "데이터가 없습니다.",
caption:'작업권한 유형'
,ondblClickRow: function(rowid,iRow,iCol,e){
if(iCol == 0){
$("#roleGrid").jqGrid('setSelection',rowid);
}else {
// 수정FORM
exsoft.util.layout.divLayerOpen('user_role_wrapper', 'user_role');
exsoft.util.common.formClear('frm');
$("#type").val("update");
$("#code_id").val($("#roleGrid").getRowData(rowid).code_id);
$("#code_nm").val(exsoft.util.common.stripHTMLtag($("#roleGrid").getRowData(rowid).code_nm));
$("#code_id").addClass("readonly");
$("#code_id").prop("readonly",true);
$("#code_nm").focus();
}
}
,onCellSelect : function(rowid,iCol,cellcontent,e){
exsoft.util.grid.checkBox(e); // CheckBox Browser Bug Fix
if(iCol == 0){
$("#roleGrid").jqGrid('setSelection',rowid);
}else {
exsoftAdminMenuFunc.gRoleId = $("#roleGrid").getRowData(rowid).code_id;
exsoftAdminMenuFunc.gRoleNm = $("#roleGrid").getRowData(rowid).code_nm;
exsoft.util.grid.gridPostDataRefresh('menuAuthGrid',exsoft.contextRoot+'/admin/menuAuth.do', {role_id:exsoftAdminMenuFunc.gRoleId});
// 접근권한,메뉴목록 체크박스 초기화
if(exsoftAdminMenuFunc.gBrowser.name == "mozilla") {
exsoft.util.grid.treeGridChckAllReleaseFF('checkAll');
exsoft.util.grid.treeGridChckAllReleaseFF('menuAllCheck');
exsoft.util.grid.gridAllCheckFF('menuAllCheck','midx');
}else {
exsoft.util.grid.treeGridChckAllRelease(event,'checkAll');
exsoft.util.grid.treeGridChckAllRelease(event,'menuAllCheck');
exsoft.util.grid.gridAllCheck(event,'menuAllCheck','midx');
}
// 선택된 row '>' 표시
//console.log("rowid======="+rowid);
//$("#select_list").remove();
//$("#"+rowid).find('td:eq(1)').prepend("<span id='select_list' class='select_list_icon'></span>");
}
}
,loadBeforeSend: function() {
exsoft.util.grid.gridTitleBarHide('roleGrid');
exsoft.util.grid.gridNoDataMsgInit('roleGrid');
}
,loadComplete: function(data) {
exsoft.util.grid.gridPagerViewHide('roleGrid');
if (this.p.records === 0) {
exsoft.util.grid.gridNoRecords('roleGrid','no_data');
postData = {role_id:'NoData'} ;
exsoftAdminMenuFunc.gRoleNm = "데이터가 없습니다";
}else {
exsoft.util.grid.gridPagerShow('roleGrid');
var rowId = $("#roleGrid").getDataIDs()[0];
var gRoleId = $("#roleGrid").getRowData(rowId).code_id;
exsoftAdminMenuFunc.gRoleNm = $("#roleGrid").getRowData(rowId).code_nm;
postData = {role_id:exsoftAdminMenuFunc.gRoleId} ;
}
// 메뉴권한목록 refresh
exsoft.util.grid.gridPostDataRefresh('menuAuthGrid',exsoft.contextRoot+'/admin/menuAuth.do',postData);
// 메뉴권한목록 체크박스 초기화
if(exsoftAdminMenuFunc.gBrowser.name == "mozilla") {
exsoft.util.grid.treeGridChckAllReleaseFF('checkAll');
exsoft.util.grid.treeGridChckAllReleaseFF('menuAllCheck');
exsoft.util.grid.gridAllCheckFF('menuAllCheck','midx');
}else {
exsoft.util.grid.treeGridChckAllRelease(event,'checkAll');
exsoft.util.grid.treeGridChckAllRelease(event,'menuAllCheck');
exsoft.util.grid.gridAllCheck(event,'menuAllCheck','midx');
}
exsoft.util.grid.gridInputInit(false);
exsoft.util.grid.gridPager("#rolePager",data);
}
,gridComplete : function() {
var rowId = $("#roleGrid").getDataIDs()[0];
exsoftAdminMenuFunc.gRoleId = $("#roleGrid").getRowData(rowId).code_id;
exsoftAdminMenuFunc.gRoleNm = $("#roleGrid").getRowData(rowId).code_nm;
exsoftAdminMenuFunc.event.menuAuthGridList(exsoftAdminMenuFunc.gRoleId);
}
,loadError:function(xhr, status, error) {
exsoft.util.error.isErrorChk(xhr);
}
});
// Grid 컬럼정렬 처리
var headerData = '{"code_nm":"역할명","code_id":"역할ID"}';
exsoft.util.grid.gridColumHeader('roleGrid',headerData,'center');
},
// 메뉴목록 TreeGrid
menuGridList : function() {
var checkOption = "";
$('#menuGrid').jqGrid({
url:exsoft.contextRoot+'/admin/menuList.do',
mtype:"post",
datatype: "json",
autowidth:true,
height:"auto",
colNames:['<input type="checkbox" id="menuAllCheck" onclick="exsoft.util.grid.gridAllCheck(event,\'menuAllCheck\'\,\'midx\')" />','menu_cd','menu_nm'],
colModel:[
{name:'checkBox',index:'checkBox',width:10, editable:false,sortable:false,align:'center',
formatter: function (cellValue, option) {
return '<input type="checkbox" id="midx" name="midx" value="' +cellValue + '" onclick="javascript:exsoftAdminMenuFunc.ui.checkBoxChanged('+option.rowId+',\'midx\',event);">';
}},
{name:'menu_cd',index:'menu_cd',width:1, editable:false,sortable:false,key:true,align:'center',hidden:true},
{name:'menu_nm',index:'menu_nm',width:130, editable:false,sortable:false},
],
treeReader: {
level_field: "level",
parent_id_field: "parent",
leaf_field: "isLeaf",
expanded_field: "expanded"
},
jsonReader: {
repeatitems: false,
root:'list'
},
gridview: true,
treeGrid: true,
loadonce: true,
scrollOffset:0,
treeGridModel: 'adjacency',
ExpandColumn: 'menu_nm',
caption:'메뉴목록',
treeIcons: {leaf:'ui-icon-document'},
loadBeforeSend: function() {
exsoft.util.grid.gridTitleBarHide('menuGrid');
}
,loadComplete: function() {
exsoft.util.grid.gridInputInit(false);
}
});
// Grid 컬럼정렬 처리
var headerData = '{"menu_nm":"메뉴명"}';
exsoft.util.grid.gridColumHeader('menuGrid',headerData,'center');
},
// 접근권한 TreeGrid
menuAuthGridList : function(role_id) {
var checkOption = "";
var radioDisable = "";
$('#menuAuthGrid').jqGrid({
url:exsoft.contextRoot+'/admin/menuAuth.do',
mtype:"post",
datatype: "json",
height: "auto",
autowidth: true,
colNames:['<input type="checkbox" id="checkAll" onclick="exsoft.util.grid.gridAllCheck(event,\'checkAll\'\,\'idx\')" />','menu_cd','menu_nm','part','all','group','team','role_id','gcode_id'],
colModel:[
{name:'checkBox',index:'checkBox',width:15, editable:false,sortable:false,align:'center'
,formatter: function (cellValue, option) {
return '<input type="checkbox" id="idx" name="idx" value="' +cellValue + '" onclick="javascript:exsoftAdminMenuFunc.ui.checkBoxChanged('+option.rowId+',\'idx\',event);">';
}},
{name:'menu_cd',index:'menu_cd',width:50, align:'center',editable:false,sortable:false,key:true,hidden:true},
{name:'menu_nm',index:'menu_nm',width:120, editable:false,sortable:false},
{name:'part',index:'part',width:30, align:'center',editable:false,sortable:false,hidden:true},
{name: 'all', index: 'all', width: 60, align: 'center', editable:false, edittype:"radio", sortable:false,classes:'pss-jqgrid-pointer-nodecoration',
formatter: function (cellValue, option,rowObject) {
return exsoft.util.grid.radioFormatter(rowObject.menu_level,option.rowId,cellValue,"ALL");
}},
{name: 'group', index: 'group', width: 60, align: 'center', editable:false, edittype:"radio", sortable:false,classes:'pss-jqgrid-pointer-nodecoration',
formatter: function (cellValue, option,rowObject) {
return exsoft.util.grid.radioFormatter(rowObject.menu_level,option.rowId,cellValue,"GROUP");
}},
{name: 'team', index: 'team', width: 60, align: 'center', editable:false, edittype:"radio", sortable:false,classes:'pss-jqgrid-pointer-nodecoration',
formatter: function (cellValue, option,rowObject) {
return exsoft.util.grid.radioFormatter(rowObject.menu_level,option.rowId,cellValue,"TEAM");
}} ,
{name:'role_id',index:'menu_nm',width:150, editable:false,sortable:false,hidden:true},
{name:'gcode_id',index:'menu_nm',width:150, editable:false,sortable:false,hidden:true}
],
treeReader: {
level_field: "level",
parent_id_field: "parent",
leaf_field: "isLeaf",
expanded_field: "expanded"
},
jsonReader: {
repeatitems: false,
root:'list'
},
gridview: true,
treeGrid: true,
loadonce: true,
treeGridModel: 'adjacency',
ExpandColumn: 'menu_nm',
emptyDataText: "데이터가 없습니다.",
caption: '접근권한',
postData: {role_id:role_id},
treeIcons: {leaf:'ui-icon-document'}
,loadBeforeSend: function() {
exsoft.util.grid.gridTitleBarHide('menuAuthGrid');
exsoft.util.grid.gridNoDataMsgInit('menuAuthGrid');
}
,loadComplete: function() {
if ($("#menuAuthGrid").getGridParam("records")==0) {
exsoft.util.grid.gridNoDataMsg('menuAuthGrid','nolayer_data');
}
// 선택 접근권한 명 변경처리
$("#roleTitle").html(exsoftAdminMenuFunc.gRoleNm);
exsoft.util.grid.gridInputInit(false);
}
});
var headerData = '{"menu_nm":"사용가능메뉴","all":"전사","group":"하위부서포함","team":"소속부서"}';
exsoft.util.grid.gridColumHeader('menuAuthGrid',headerData,'center');
},
// 페이지이동 처리(공통)
gridPage : function(nPage) {
$(exsoftAdminMenuFunc.gridId).setGridParam({page:nPage,postData:{is_search:'false',page_init:'false'}}).trigger("reloadGrid");
},
// 권한 등록/수정 처리
managerFunc : function() {
var objForm = document.frm;
if(objForm.code_id.value.length == 0){
objForm.code_id.focus();
jAlert('Role ID를 입력하세요','확인',0);
return false;
}
if(objForm.code_nm.value.length == 0){
objForm.code_nm.focus();
jAlert('Role 명을 입력하세요','확인',0);
return false;
}
if($("#type").val() == "update") {
exsoft.util.ajax.ajaxFunctionWithCallback('frm',exsoft.contextRoot+'/admin/codeWrite.do','update',exsoftAdminMenuFunc.callback.returnAjaxFunction);
}else {
exsoft.util.ajax.ajaxFunctionWithCallback('frm',exsoft.contextRoot+'/admin/codeWrite.do','insert',exsoftAdminMenuFunc.callback.returnAjaxFunction);
}
},
// 검색처리
searchFunc :function() {
exsoft.util.grid.gridPostDataRefresh('roleGrid',exsoft.contextRoot+'/admin/codePage.do',{strKeyword:$("#strKeyword").val(),is_search:'true'});
},
// 접근권한 수정하기
modifyFunc : function() {
var data = "";
var cnt = 0;
$('input[name=idx]:checked').each(function() {
var str = $(this).val()
var dotPos = str.split("#");
var menu_cd = dotPos[2];
var selectVal = $("input:radio[name=radio_"+menu_cd+"]:checked").val();
var lastStr = str + "#"+selectVal;
data += lastStr + ",";
cnt++;
});
if(cnt == 0) {
jAlert('수정할 권한을 선택하세요.');
return false;
}
var jsonObject = { "type":"update", "inputStr":data};
exsoft.util.ajax.ajaxDataFunctionWithCallback(jsonObject,exsoft.contextRoot+'/admin/menuAuthManager.do','menuAuthUpdate',exsoftAdminMenuFunc.callback.returnAjaxFunction);
}
},
ui : {
// 대문자
upper : function(obj) {
var code = obj.value;
code=code.replace(/[^A-Z]/g,"");
obj.value=code;
},
// 메뉴목록 체크박스
checkBoxChanged : function(rowId,checkNm,e) {
e = e || event;
e.stopPropagation ? e.stopPropagation() : e.cancelBubble = true;
$("input:checkbox[name='"+checkNm+"']").each(function() {
if($(this).val().indexOf(rowId) != -1) {
$(this).prop("checked",true);
}
});
},
// 메뉴권한등록처리
treeGridAdd : function() {
var data = "";
var cnt = 0;
if(exsoftAdminMenuFunc.gRoleId == '') {
jAlert('등록할 권한목록을 선택하세요','확인',0);
return false;
}
$('input[name=midx]:checked').each(function() {
var str = $(this).val()
data += str + ",";
cnt++;
});
if(cnt == 0) {
jAlert('등록할 메뉴를 선택하세요.','확인',0);
return false;
}
var jsonObject = { "type":"insert", "inputStr":data,"role_id":exsoftAdminMenuFunc.gRoleId};
exsoft.util.ajax.ajaxDataFunctionWithCallback(jsonObject,exsoft.contextRoot+'/admin/menuAuthManager.do','menuAuthInsert',exsoftAdminMenuFunc.callback.returnAjaxFunction);
},
// 메뉴권한삭제처리
treeGridDel : function() {
var data = "";
var cnt = 0;
$('input[name=idx]:checked').each(function() {
var str = $(this).val()
data += str + ",";
cnt++;
});
if(cnt == 0) {
jAlert('삭제할 권한을 선택하세요.','확인',0);
return false;
}
var jsonObject = { "type":"delete", "inputStr":data};
exsoft.util.ajax.ajaxDataFunctionWithCallback(jsonObject,exsoft.contextRoot+'/admin/menuAuthManager.do','menuAuthDelete',exsoftAdminMenuFunc.callback.returnAjaxFunction);
if(exsoftAdminMenuFunc.gBrowser.name == "mozilla") {
exsoft.util.grid.treeGridChckAllReleaseFF('checkAll');
}else {
exsoft.util.grid.treeGridChckAllRelease(event,'checkAll');
}
}
},
callback : {
// CallBack 처리
returnAjaxFunction : function(data,param) {
if(param == 'insert'){
if(data.result == "true") {
var postData = {strKeyword : $("#strKeyword").val() } ;
exsoft.util.grid.gridPostDataRefresh('roleGrid',exsoft.contextRoot+'/admin/codePage.do',postData);
exsoft.util.layout.divLayerClose('user_role_wrapper', 'user_role');
}else {
jAlert(data.message,'확인',0);
}
}else if(param == 'update'){
if(data.result == "true") {
var postData = {strKeyword : $("#strKeyword").val() } ;
exsoft.util.grid.gridPostDataRefresh('roleGrid',exsoft.contextRoot+'/admin/codePage.do',postData);
exsoft.util.layout.divLayerClose('user_role_wrapper', 'user_role');
}else {
jAlert(data.message,'확인',0);
}
}else if(param == 'menuAuthInsert'){
// 접근권한 등록처리
if(data.result == "true") {
exsoft.util.grid.gridRefresh('menuAuthGrid',exsoft.contextRoot+'/admin/menuAuth.do');
if(exsoftAdminMenuFunc.gBrowser.name == "mozilla") {
exsoft.util.grid.treeGridChckAllReleaseFF('menuAllCheck');
exsoft.util.grid.gridAllCheckFF('menuAllCheck','midx');
}else {
exsoft.util.grid.treeGridChckAllRelease(event,'menuAllCheck');
exsoft.util.grid.gridAllCheck(event,'menuAllCheck','midx');
}
}else {
jAlert(data.message,'확인',0);
}
}else if(param == 'menuAuthDelete') {
// 메뉴권한삭제처리
if(data.result == "true") {
exsoft.util.grid.gridRefresh('menuAuthGrid',exsoft.contextRoot+'/admin/menuAuth.do');
}else {
jAlert(data.message,'확인',0);
}
}else if(param == "menuAuthUpdate") {
// 접근권한 수정처리
if(data.result == "true") {
exsoft.util.grid.gridRefresh('menuAuthGrid',exsoft.contextRoot+'/admin/menuAuth.do');
}else {
jAlert(data.message,'확인',0);
}
}
}
}
}<file_sep>/EDMS3/WebContent/js/process/processList.js
/**
* 협업프로세스 JavaScript
*/
var exsoftProcessFunc = {
pageTitle : '',
pageTitleId : '',
pageSize : '',
processType : Constant.PROCESS.WRITE_ING_MENU,
// 협업문서 우측 메뉴명
pageNaviTitle : function() {
$("#"+this.pageTitleId).html(this.pageTitle);
},
// 협업문서 우측메뉴명
ConstPageName : {
'REQUEST' : '요청 문서',
'WRITE_ING' : '작성중 문서',
'APPROVAL_ING' : '승인중 문서',
'WRITE_END' : '작성한 문서',
'APPROVAL_END' : '승인한 문서',
'RECEIVE' : '수신문서',
},
// 0. 초기화
init : {
// 협업문서 초기화 함수
initPage : function(menuType, processType, pageTitleId, pageSize) {
exsoftProcessFunc.init.initDdslick(); // select box 초기화
exsoft.util.layout.topMenuSelect(menuType); // 상단 협업 메뉴 선택 표시
exsoftProcessFunc.pageTitle = exsoftProcessFunc.ConstPageName[processType];
exsoftProcessFunc.pageTitleId = pageTitleId;
//exsoftProcessFunc.init.contextRoot = exsoft.contextRoot;
exsoftProcessFunc.pageNaviTitle(exsoftProcessFunc.pageTitle);
exsoftProcessFunc.pageSize = pageSize;
// 상세검색 postdata세팅
processDetailSearch.processType = processType;
processDetailSearch.url = "/process/processList.do";
exsoftProcessFunc.event.processDocGridList();
},
// 협업좌측 메뉴 선택시
menuInitPage : function(processType) {
//alert(processType);
exsoftProcessFunc.pageTitle = exsoftProcessFunc.ConstPageName[processType];
exsoftProcessFunc.pageNaviTitle(exsoftProcessFunc.pageTitle);
exsoftProcessFunc.processType = processType;
// 상세검색 postdata세팅
processDetailSearch.processType = exsoftProcessFunc.processType;
processDetailSearch.url = "/process/processList.do";
exsoft.util.grid.gridPostDataInitRefresh('#processDocGridList', exsoft.contextRoot+'/process/processList.do', {type:processType});
},
/**
* select box :: ddslick 사용
*/
initDdslick : function(){
//검색 selectbox
exsoft.util.common.ddslick('#processList_select', 'srch_type1', '', 79, function(divId){}); // 검색 selectbox
exsoft.util.common.ddslick('#tbl_rowCount', 'tbl_rowCount', '', 68, function(divId){}); // 목록 selectbox
},
initBind : function() {
}
},
// 1. 팝업
open : {
},
//2. layer + show
layer : {
// 협업 업무 상세조회 호출하기
processView : function(wrapperClass,layerClass,formId) {
exsoft.util.layout.divLayerOpen(wrapperClass,layerClass);
exsoft.util.common.formClear(formId);
},
},
//3. 닫기 + hide
close : {
},
//4. 화면 이벤트 처리
event : {
// 목록 리스트
processDocGridList : function(){
$('#processDocGridList').jqGrid({
url: exsoft.contextRoot+'/process/processList.do',
mtype:"post",
datatype:'json',
postData : {
strIndex:exsoft.util.layout.getSelectBox('processList_select','option'),
strKeyword1:$("#strKeyword").val(),
sdate:$("#sdate").val(),
edate:$("#edate").val(),
type:exsoftProcessFunc.processType
},
jsonReader:{
page:'page',total:'total',root:'list'
},
cmTemplate: { title: false }, // tooltip 제거
colNames:['process_id','doc_root_id','name','status_nm','expect_date','complete_date', 'author_nm', 'approval_count', 'receiver_count'],
colModel:[
{name:'process_id',index:'process_id',width:10, align:'center',editable:false,sortable:false,key:true,hidden:true},
{name:'doc_root_id',index:'doc_root_id',width:10, align:'center',editable:false,sortable:false,key:true,hidden:true},
{name:'name',index:'name',width:100, align:'left',editable:false,sortable:true,key:false,
formatter : function (cellValue, option, rowObject) {
return '<span stype="cursor:pointer" onclick=\'javascript:exsoftProcessFunc.event.processView("'+rowObject.process_id+'","'+rowObject.doc_root_id+'")\'>' + cellValue +'</span>';
},
cellattr: function (rowId, cellValue, rowObject) {
var tooltip = '<p>'+rowObject.name+'</p>';
var mouseMove = 'onmousemove="javascript:exsoft.util.common.tooltip(\'processDocGridList\',\''+tooltip+'\',\'false\',event)"';
var mouseOut = 'onmouseout="javascript:$(\'.tooltip\').addClass(\'hide\')"';
return mouseMove+' '+mouseOut;
}
},
{name:'status_nm',index:'name',width:20, align:'left',editable:false,sortable:true,key:false},
{name:'expect_date',index:'expect_date',width:20, align:'left',editable:false,sortable:true,key:false},
{name:'complete_date',index:'complete_date',width:20, align:'left',editable:false,sortable:true,key:false},
{name:'author_nm',index:'author_nm',width:20, align:'left',editable:false,sortable:true,key:false,
cellattr: function (rowId, cellValue, rowObject) {
//작성자 rowObject.write_list
var tooltip = '';
if(rowObject.write_list.length > 0){
var tempList = rowObject.write_list;
$.each(tempList, function(){
var writers = this.split('|');
if(writers[1] == '작성완료'){
tooltip += '<p>● '+writers[0]+' : '+writers[1]+'</p>';
}else{
tooltip += '<p>○ '+writers[0]+' : '+writers[1]+'</p>';
}
});
}else{
tooltip = '<p>○ '+cellValue+'</p>';
}
// mouseOver 시 마우스 안따라 다님(고정)
var mouseMove = 'onmousemove="javascript:exsoft.util.common.tooltip(\'processDocGridList\',\''+tooltip+'\',\'false\',event)"';
var mouseOut = 'onmouseout="javascript:$(\'.tooltip\').addClass(\'hide\')"';
return mouseMove+' '+mouseOut;
}
},
{name:'approval_count',index:'approval_count',width:20, align:'left',editable:false,sortable:true,key:false,
cellattr: function (rowId, cellValue, rowObject) {
// 승인자 rowObject.approval_list
var tooltip = '';
if(rowObject.approval_list.length > 0){
var tempList = rowObject.approval_list;
$.each(tempList, function(){
var approvlers = this.split('|');
if(approvlers[1] == '승인완료'){
tooltip += '<p>● '+approvlers[0]+' : '+approvlers[1]+'</p>';
}else{
tooltip += '<p>○ '+approvlers[0]+' : '+approvlers[1]+'</p>';
}
});
}else{
tooltip = '<p>○ 승인자 없음</p>';
}
var mouseMove = 'onmousemove="javascript:exsoft.util.common.tooltip(\'processDocGridList\',\''+tooltip+'\',\'true\',event)"';
var mouseOut = 'onmouseout="javascript:$(\'.tooltip\').addClass(\'hide\')"';
return mouseMove+' '+mouseOut;
}
},
{name:'receiver_count',index:'receiver_count',width:20, align:'left',editable:false,sortable:true,key:false,
cellattr: function (rowId, cellValue, rowObject) {
//수신자
var tooltip = '';
if(rowObject.receiver_list.length > 0){
var tempList = rowObject.receiver_list;
$.each(tempList, function(){
var receivers = this.split('|');
if(receivers[1] == '열람완료'){
tooltip += '<p>● '+receivers[0]+' : '+receivers[1]+'</p>';
}else{
tooltip += '<p>○ '+receivers[0]+' : '+receivers[1]+'</p>';
}
});
}else{
tooltip = '<p>○ 열람자 없음</p>';
}
var mouseMove = 'onmousemove="javascript:exsoft.util.common.tooltip(\'processDocGridList\',\''+tooltip+'\',\'true\',event)"';
var mouseOut = 'onmouseout="javascript:$(\'.tooltip\').addClass(\'hide\')"';
return mouseMove+' '+mouseOut;
}
},
],
viewrecords: true,
multiselect:true,
multikey: 'ctrlKey',
sortable: true,
shrinkToFit:true,
gridview: true,
autowidth:true,
height:'auto',
sortname : 'create_date',
sortorder:'desc',
scrollOffset: 0,
viewsortcols:'vertical',
emptyDataText: "데이터가 없습니다.",
rowNum : exsoftProcessFunc.pageSize,
loadBeforeSend: function() {
exsoft.util.grid.gridTitleBarHide('processDocGridList');
exsoft.util.grid.gridNoDataMsgInit('processDocGridList');
}
,loadComplete: function(data) {
if ($('#processDocGridList').getGridParam("records")==0) {
exsoft.util.grid.gridNoRecords('processDocGridList','no_data');
}else {
exsoft.util.grid.gridViewRecords('processDocGridList');
}
exsoft.util.grid.gridInputInit(false);
exsoft.util.grid.gridPager("#processDocGridPager",data);
}
,loadError:function(xhr, status, error) {
exsoft.util.error.isErrorChk(xhr);
}
});
// 컬럼 헤더 정렬 및 다국어 변경 처리 //프로세스id|업무명|상태|완료예정일|최종변경일|작성자|승인|열람
var headerData = '{"process_id":"키값1","doc_root_id":"키값2","name":"제목","status_nm":"상태","expect_date":"완료예정일","complete_date":"최종수정일","author_nm":"작성자","approval_count":"승인","receiver_count":"열람"}';
exsoft.util.grid.gridColumHeader('processDocGridList',headerData,'left');
}, // End of Grid .....
// 페이지이동 처리(공통)
gridPage : function(nPage) {
$('#processDocGridList').setGridParam({page:nPage,postData:{is_search:'false',page_init:'false'}}).trigger("reloadGrid");
},
// 페이지목록 선택시(공통)
rowsPage : function(rowNum) {
$('#processDocGridList').setGridParam({rowNum:rowNum}); // 페이지목록 설정값 변경처리
$('#processDocGridList').setGridParam({page:1,postData:{is_search:'false',page_init:'true'}}).trigger("reloadGrid");
},
// 협업문서 조회
processView : function(processId, doc_root_id){
// processView.js의 init 호출
exsoftProcessViewFunc.init.initProcessView(processId, doc_root_id);
},
},
//5. 화면 UI 변경 처리
ui : {
},
//6. callback 처리
callback : {
},
}
<file_sep>/EDMS3/WebContent/js/common/include.js
/**
* 공통으로 호출 되는 JS 및 CSS include 처리
*/
//windowType은 호출하는 페이지에서 넘겨줘야 함
if(windowType == 'userLayout'){
// 사용자 공통화면
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jquery/jquery-ui.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/bxslider/jquery.bxslider.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/ddslick/jquery.ddslick.custom.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/cookie/jquery.cookie.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jalert/jquery.alerts.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/scrollbar/jquery.tinyscrollbar.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/json/json2.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jstree/dist/jstree.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/upload/jquery.uploadfile.3.1.10.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/common/util.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/common/tree.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/common/constant.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/layout/layout.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/common/databinder.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/common/validator.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/common/ecm.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/common/initbind.js"></script>');
// jQgrid
/*
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid/js/i18n/grid.locale-kr.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid/plugins/ui.multiselect.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid/js/jquery.jqGrid.src.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid/plugins/jquery.tablednd.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid/plugins/jquery.contextmenu.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid/plugins/jquery.searchFilter.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid/src/grid.celledit.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid/src/grid.subgrid.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid/src/grid.treegrid.js"></script>');
*/
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid_old/js/i18n/grid.locale-kr.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid_old/plugins/ui.multiselect.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid_old/js/jquery.jqGrid.min.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid_old/plugins/jquery.tablednd.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid_old/plugins/jquery.contextmenu.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid_old/plugins/jquery.searchFilter.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid_old/src/grid.celledit.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid_old/src/grid.subgrid.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid_old/src/grid.treegrid.js"></script>');
// CSS inculde
$('head').append('<link href="'+exsoft.contextRoot+'/css/common/ecm.css" rel="stylesheet" type="text/css">');
$('head').append('<link href="'+exsoft.contextRoot+'/css/common/reset.css" rel="stylesheet" type="text/css">');
$('head').append('<link href="'+exsoft.contextRoot+'/css/plugins/themes/jquery-ui.css" rel="stylesheet" type="text/css">');
$('head').append('<link href="'+exsoft.contextRoot+'/css/plugins/jalert/jquery.alerts.css" rel="stylesheet" type="text/css">');
$('head').append('<link href="'+exsoft.contextRoot+'/css/plugins/ddslick/jquery.ddslick.custom.css" rel="stylesheet" type="text/css">');
$('head').append('<link href="'+exsoft.contextRoot+'/css/plugins/bxslider/jquery.bxslider.css" rel="stylesheet" type="text/css">');
$('head').append('<link href="'+exsoft.contextRoot+'/js/plugins/jstree/dist/themes/default/style.css" rel="stylesheet" type="text/css">');
$('head').append('<link href="'+exsoft.contextRoot+'/css/plugins/upload/uploadfile.3.1.10.css" rel="stylesheet" type="text/css">');
// jQgrid CSS include
//$('head').append('<link href="'+exsoft.contextRoot+'/js/plugins/jqgrid/css/ui.jqgrid.css" rel="stylesheet" type="text/css">');
//$('head').append('<link href="'+exsoft.contextRoot+'/js/plugins/jqgrid/plugins/ui.multiselect.css" rel="stylesheet" type="text/css">');
$('head').append('<link href="'+exsoft.contextRoot+'/js/plugins/jqgrid_old/css/ui.jqgrid.css" rel="stylesheet" type="text/css">');
$('head').append('<link href="'+exsoft.contextRoot+'/js/plugins/jqgrid_old/plugins/ui.multiselect.css" rel="stylesheet" type="text/css">');
} else if(windowType == 'userConfig'){
// 사용자 환경설정
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/ddslick/jquery.ddslick.custom.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jalert/jquery.alerts.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/common/common.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/common/util.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/common/databinder.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/common/validator.js"></script>');
// CSS inculde
$('head').append('<link href="'+exsoft.contextRoot+'/css/common/ecm_pop.css" rel="stylesheet" type="text/css">');
$('head').append('<link href="'+exsoft.contextRoot+'/css/common/reset.css" rel="stylesheet" type="text/css">');
$('head').append('<link href="'+exsoft.contextRoot+'/css/plugins/ddslick/jquery.ddslick.custom.css" rel="stylesheet" type="text/css">');
$('head').append('<link href="'+exsoft.contextRoot+'/css/plugins/jalert/jquery.alerts.css" rel="stylesheet" type="text/css">');
} else if(windowType == 'noteMain'){
// 쪽지 메인 화면
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/ddslick/jquery.ddslick.custom.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/common/util.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/cookie/jquery.cookie.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jalert/jquery.alerts.js"></script>');
// CSS inculde
$('head').append('<link href="'+exsoft.contextRoot+'/css/common/ecm_note.css" rel="stylesheet" type="text/css">');
$('head').append('<link href="'+exsoft.contextRoot+'/css/common/reset.css" rel="stylesheet" type="text/css">');
$('head').append('<link href="'+exsoft.contextRoot+'/css/plugins/themes/jquery-ui.css" rel="stylesheet" type="text/css">');
$('head').append('<link href="'+exsoft.contextRoot+'/css/plugins/jalert/jquery.alerts.css" rel="stylesheet" type="text/css">');
$('head').append('<link href="'+exsoft.contextRoot+'/css/plugins/ddslick/jquery.ddslick.custom.css" rel="stylesheet" type="text/css">');
} else if(windowType == 'noteUserSelect'){
// 쪽지 사용자 선택 화면
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/ddslick/jquery.ddslick.custom.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/cookie/jquery.cookie.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jalert/jquery.alerts.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/common/util.js"><\/script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'"><\/script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'"><\/script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'"><\/script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'"><\/script>');
// CSS inculde
$('head').append('<link href="'+exsoft.contextRoot+'/css/common/ecm_note.css" rel="stylesheet" type="text/css">');
$('head').append('<link href="'+exsoft.contextRoot+'/css/common/reset.css" rel="stylesheet" type="text/css">');
$('head').append('<link href="'+exsoft.contextRoot+'/css/plugins/themes/jquery-ui.css" rel="stylesheet" type="text/css">');
$('head').append('<link href="'+exsoft.contextRoot+'/css/plugins/jalert/jquery.alerts.css" rel="stylesheet" type="text/css">');
$('head').append('<link href="'+exsoft.contextRoot+'/css/plugins/ddslick/jquery.ddslick.custom.css" rel="stylesheet" type="text/css">');
$('head').append('<link href="'+exsoft.contextRoot+'" rel="stylesheet" type="text/css">');
$('head').append('<link href="'+exsoft.contextRoot+'" rel="stylesheet" type="text/css">');
$('head').append('<link href="'+exsoft.contextRoot+'" rel="stylesheet" type="text/css">');
} else if(windowType == 'adminLayout'){
// 관리자 고통화면
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jquery/jquery-ui.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/bxslider/jquery.bxslider.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/ddslick/jquery.ddslick.custom.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/cookie/jquery.cookie.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jalert/jquery.alerts.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/upload/jquery.fileupload.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/scrollbar/jquery.tinyscrollbar.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/json/json2.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jstree/dist/jstree.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/common/util.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/common/admin/ecm.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/common/tree.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/common/constant.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/layout/adminLayout.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/common/databinder.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/common/validator.js"></script>');
// jQgrid
/*
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid/js/i18n/grid.locale-kr.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid/plugins/ui.multiselect.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid/js/jquery.jqGrid.src.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid/plugins/jquery.tablednd.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid/plugins/jquery.contextmenu.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid/plugins/jquery.searchFilter.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid/src/grid.celledit.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid/src/grid.subgrid.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid/src/grid.treegrid.js"></script>');
*/
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid_old/js/i18n/grid.locale-kr.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid_old/plugins/ui.multiselect.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid_old/js/jquery.jqGrid.min.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid_old/plugins/jquery.tablednd.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid_old/plugins/jquery.contextmenu.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid_old/plugins/jquery.searchFilter.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid_old/src/grid.celledit.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid_old/src/grid.subgrid.js"></script>');
$('head').append('<script type="text/javascript" src="'+exsoft.contextRoot+'/js/plugins/jqgrid_old/src/grid.treegrid.js"></script>');
// CSS inculde
$('head').append('<link href="'+exsoft.contextRoot+'" rel="stylesheet" type="text/css">');
$('head').append('<link href="'+exsoft.contextRoot+'/css/common/admin/ecm.css" rel="stylesheet" type="text/css">');
$('head').append('<link href="'+exsoft.contextRoot+'/css/common/reset.css" rel="stylesheet" type="text/css">');
$('head').append('<link href="'+exsoft.contextRoot+'/css/plugins/themes/jquery-ui.css" rel="stylesheet" type="text/css">');
$('head').append('<link href="'+exsoft.contextRoot+'/css/plugins/jalert/jquery.alerts.css" rel="stylesheet" type="text/css">');
$('head').append('<link href="'+exsoft.contextRoot+'/css/plugins/ddslick/jquery.ddslick.custom.css" rel="stylesheet" type="text/css">');
$('head').append('<link href="'+exsoft.contextRoot+'/css/plugins/bxslider/jquery.bxslider.css" rel="stylesheet" type="text/css">');
$('head').append('<link href="'+exsoft.contextRoot+'/js/plugins/jstree/dist/themes/default/style.css" rel="stylesheet" type="text/css">');
// jQgrid CSS include
//$('head').append('<link href="'+exsoft.contextRoot+'/js/plugins/jqgrid/css/ui.jqgrid.css" rel="stylesheet" type="text/css">');
//$('head').append('<link href="'+exsoft.contextRoot+'/js/plugins/jqgrid/plugins/ui.multiselect.css" rel="stylesheet" type="text/css">');
$('head').append('<link href="'+exsoft.contextRoot+'/js/plugins/jqgrid_old/css/ui.jqgrid.css" rel="stylesheet" type="text/css">');
$('head').append('<link href="'+exsoft.contextRoot+'/js/plugins/jqgrid_old/plugins/ui.multiselect.css" rel="stylesheet" type="text/css">');
}
<file_sep>/EDMS3/src/kr/co/exsoft/user/vo/LoginLogVO.java
package kr.co.exsoft.user.vo;
/**
* 사용자 로그인 세션 정보 VO
* @author 패키지 개발팀
* @since 2014.07.16
* @version 3.0
*
*/
public class LoginLogVO {
// 테이블 객체(XR_LOGIN_LOG)
private String user_id; // 사용자 ID
private String session_id; // 세션 ID
private String connect_ip; // 접속 IP
private String login_time; // 로그인 시간
public LoginLogVO() {
this.user_id = "";
this.session_id = "";
this.connect_ip = "";
this.login_time = "";
}
public String getUser_id() {
return user_id;
}
public void setUser_id(String user_id) {
this.user_id = user_id;
}
public String getSession_id() {
return session_id;
}
public void setSession_id(String session_id) {
this.session_id = session_id;
}
public String getConnect_ip() {
return connect_ip;
}
public void setConnect_ip(String connect_ip) {
this.connect_ip = connect_ip;
}
public String getLogin_time() {
return login_time;
}
public void setLogin_time(String login_time) {
this.login_time = login_time;
}
}
<file_sep>/EDMS3/src/kr/co/exsoft/statistics/vo/DocumentDecadeVO.java
package kr.co.exsoft.statistics.vo;
import kr.co.exsoft.eframework.configuration.Constant;
import kr.co.exsoft.eframework.vo.VO;
/**
* 기간별 등록/활용 현황 VO
*
* @author 패키지팀
* @since 2014. 11. 20.
* @version 1.0
*
*/
public class DocumentDecadeVO extends VO {
// TABLE : XR_DOCUMENT_USER_HT , XR_DOCUMENT_GROUP_HT :: ALIAS = DecadeDocHt
private String gdate; // 부서별 문서현황 기준일
private String udate; // 사용자별 문서현황 기준일
private int create_cnt; // 등록
private int read_cnt; // 조회
private int update_cnt; // 수정
private int delete_cnt; // 삭제
// 조회항목
@SuppressWarnings("unused")
private String dateStr; // 기준일 문자열 변경처리
private String isType; // 구분(일별/월별)
private String isGroup; // 구분(사용자/부서)
private String order_str; // 정렬기준컬럼
public String getOrder_str() {
return order_str;
}
public void setOrder_str(String order_str) {
this.order_str = order_str;
}
public String getIsGroup() {
return isGroup;
}
public void setIsGroup(String isGroup) {
this.isGroup = isGroup;
}
public String getIsType() {
return isType;
}
public void setIsType(String isType) {
this.isType = isType;
}
public String getDateStr() {
String ret = "";
if(isType.equals(Constant.DAY_TERM) && isGroup.equals(Constant.IS_USER)) { // 사용자 & 일별
ret = udate.replaceAll("-",".");
}else if(isType.equals(Constant.MONTH_TERM) && isGroup.equals(Constant.IS_USER)) { // 사용자 & 월별
ret = udate.replaceAll("-",".");
}else if(isType.equals(Constant.DAY_TERM) && isGroup.equals(Constant.IS_GROUP)) { // 부서 & 일별
ret = gdate.replaceAll("-",".");
}else if(isType.equals(Constant.MONTH_TERM) && isGroup.equals(Constant.IS_GROUP)) { // 부서 & 월별
ret = gdate.replaceAll("-",".");
}
return ret;
}
public void setDateStr(String dateStr) {
this.dateStr = dateStr;
}
public String getGdate() {
return gdate;
}
public void setGdate(String gdate) {
this.gdate = gdate;
}
public String getUdate() {
return udate;
}
public void setUdate(String udate) {
this.udate = udate;
}
public int getCreate_cnt() {
return create_cnt;
}
public void setCreate_cnt(int create_cnt) {
this.create_cnt = create_cnt;
}
public int getRead_cnt() {
return read_cnt;
}
public void setRead_cnt(int read_cnt) {
this.read_cnt = read_cnt;
}
public int getUpdate_cnt() {
return update_cnt;
}
public void setUpdate_cnt(int update_cnt) {
this.update_cnt = update_cnt;
}
public int getDelete_cnt() {
return delete_cnt;
}
public void setDelete_cnt(int delete_cnt) {
this.delete_cnt = delete_cnt;
}
}
<file_sep>/EDMS3/src/kr/co/exsoft/common/vo/CommentVO.java
package kr.co.exsoft.common.vo;
import kr.co.exsoft.eframework.configuration.Constant;
/**
* 문서댓글 VO
* @author <NAME>
* @since 2015.03.02
* @version 3.0
*
*/
public class CommentVO {
private String com_id; // 키값 :: XR_COUNTER TABLE REF
private String doc_root_id; // 댓글이 달린 문서에 대한 ROOT_ID(XR_DOCUMENT)
private String com_step; // 댓글의 그룹번호 즉, 원글 값이 1일경우 원글의 대한 답글 및 답글의 답글 값도 1이 된다.
private String com_order; // 동일한 COM_STEP내에서 정렬순서. 원글은 무조건 0
private String creator_id; // 댓글 등록자 ID
private String creator_name; // 댓글 등록자 이름
private String parent_creator_name; // 답글일 경우 원글 등록자 이름, 답글의 답글일 경우 답글의 등록자 이름
private String create_date; // 댓글 등록일
private String content; // 등록된 댓글의 내용
private String status; // C:댓글등록, D:댓글삭제
public CommentVO() {
this.com_id="";
this.doc_root_id="";
this.com_step="";
this.com_order="0";
this.creator_id="";
this.creator_name="";
this.parent_creator_name="";
this.create_date="";
this.content="";
this.status=Constant.C;
}
public String getCom_id() {
return com_id;
}
public void setCom_id(String com_id) {
this.com_id = com_id;
}
public String getDoc_root_id() {
return doc_root_id;
}
public void setDoc_root_id(String doc_root_id) {
this.doc_root_id = doc_root_id;
}
public String getCom_step() {
return com_step;
}
public void setCom_step(String com_step) {
this.com_step = com_step;
}
public String getCom_order() {
return com_order;
}
public void setCom_order(String com_order) {
this.com_order = com_order;
}
public String getCreator_id() {
return creator_id;
}
public void setCreator_id(String creator_id) {
this.creator_id = creator_id;
}
public String getCreator_name() {
return creator_name;
}
public void setCreator_name(String creator_name) {
this.creator_name = creator_name;
}
public String getParent_creator_name() {
return parent_creator_name;
}
public void setParent_creator_name(String parent_creator_name) {
this.parent_creator_name = parent_creator_name;
}
public String getCreate_date() {
return create_date;
}
public void setCreate_date(String create_date) {
this.create_date = create_date;
}
public String getContent() {
return content;
}
public void setContent(String content) {
this.content = content;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
}
<file_sep>/EDMS3/WebContent/js/process/processView.js
/**
* 협업 진행단계 관련
*/
var exsoftProcessViewFunc = {
process_id : '',
doc_root_id : '',
isModify : false, // 속성수정
isDelete : false, // 삭제
isApproveRequest : false, // 승인요청
isApprove : false, // 승인
isApproveReject : false, // 반려
isFileModify : false, // 파일수정
isContent : false, // 내용 사용 여부
// 0. 초기화
init : {
initProcessView : function(processId, doc_root_id){
exsoftProcessViewFunc.process_id = processId;
exsoftProcessViewFunc.doc_root_id = doc_root_id;
$('.coopUser_detail_wrapper').removeClass('hide');
$('.coopUser_detail').removeClass('hide');
exsoft.util.layout.lyrPopupWindowResize($(".coopUser_detail"));
exsoftProcessViewFunc.doFunction.setProcessRole(false,false,false,false,false,false,false);
// 1. 협업정보 가져오기
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback({actionType:Constant.ACTION_VIEW, process_id:processId},exsoft.contextRoot + '/process/processControl.do',null,function(data, param){
if(data.result == 'true'){
var processVo = data.processVo;
var processExecutorList = data.processExecutorList;
// 현 단계 이미지 색상 칠하기
$('#processView_step li').removeClass('current');
$('#processView_step li:nth-child('+processVo.status_number+')').addClass('current');
// 협업 기본 정보 set
$('#processView_info tr:nth-child(1)').find('span').text(processVo.name); // 업무명
$('#processView_info tr:nth-child(3)').find('span').text(processVo.creator_name); // 업무요청자
$('#processView_info tr:nth-child(4)').find('span').text(processVo.expect_date); // 업무완료 예정일
$('#processView_info tr:nth-child(5)').find('span').text(processVo.content); // 업무 요청 내용
// 협업자 정보 set
var isModifyNdelete = false;
$(processExecutorList).each(function(idx){
var executor_name = this.executor_name;
var executor_id = this.executor_id;
switch (this.type) {
case Constant.PROCESS.TYPE_AUTHOR:exsoftProcessViewFunc.doFunction.setExecutorName(6, executor_name);break;
case Constant.PROCESS.TYPE_COAUTHOR:exsoftProcessViewFunc.doFunction.setExecutorName(7, executor_name);break;
case Constant.PROCESS.TYPE_APPROVER:exsoftProcessViewFunc.doFunction.setExecutorName(8, executor_name);break;
case Constant.PROCESS.TYPE_RECEIVER:exsoftProcessViewFunc.doFunction.setExecutorName(9, executor_name);break;
default:break;
};
// 현 단계 및 사용자에 맞는 추가기능 set
// 1:작성, 2:승인, 3:보완, 4:완료
var user_id = exsoft.user.user_id;
if(processVo.status_number == 1 || processVo.status_number == 3){
//processVo.creator_id 요청자 exsoft.user.user_id
if(user_id == processVo.creator_id){ // 요청자
// isModify,isDelete,isApproveRequest,isApprove,isApproveReject,isFileModify,isContent
exsoftProcessViewFunc.doFunction.setProcessRole(true,true,false,false,false,true,false);
}else if(user_id == executor_id && this.type == Constant.PROCESS.TYPE_AUTHOR){ // 작성자
// isModify,isDelete,isApproveRequest,isApprove,isApproveReject,isFileModify,isContent
exsoftProcessViewFunc.doFunction.setProcessRole(true,true,true,false,false,true,true);
}else if(user_id == executor_id && this.type == Constant.PROCESS.TYPE_COAUTHOR){ // 공동 작성자
// isModify,isDelete,isApproveRequest,isApprove,isApproveReject,isFileModify,isContent
exsoftProcessViewFunc.doFunction.setProcessRole(false,false,false,false,false,true,false);
}
}else if(processVo.status_number == 2){
// TODO : 승인 목록 보이기...
if(this.type == Constant.PROCESS.TYPE_APPROVER){
console.log('user_id:excutor_id:status ==> ' +user_id+' : '+executor_id+' : '+this.status);
if(user_id == executor_id && this.status == Constant.PROCESS.EXECUTOR_START){
// isModify,isDelete,isApproveRequest,isApprove,isApproveReject,isFileModify,isContent
exsoftProcessViewFunc.doFunction.setProcessRole(true,false,false,true,true,true,true);
}else if(user_id == executor_id && this.status != Constant.PROCESS.EXECUTOR_START){
exsoftProcessViewFunc.doFunction.setProcessRole(true,false,false,false,false,false,false);
}
}
}
// 단계에 상관없이 요청자, 대표작성자는 수정, 삭제 권한을 부여
if(user_id == processVo.creator_id || this.type == Constant.PROCESS.TYPE_AUTHOR){
exsoftProcessViewFunc.isModify = true;
exsoftProcessViewFunc.isDelete = true;
}
});
// 승인자, 수신자 tooltip 설정
exsoftProcessViewFunc.doFunction.setTooltip('processView_approver', 'processView_approverTooltip', processVo, Constant.PROCESS.TYPE_APPROVER);
exsoftProcessViewFunc.doFunction.setTooltip('processView_receiver', 'processView_receiverTooltip', processVo, Constant.PROCESS.TYPE_RECEIVER);
// 기능 버튼 hide 제거
//, , , , , (requestApproval_wordcnts)
exsoftProcessViewFunc.isModify ? $('#processView_modify').removeClass('hide') : $('#processView_modify').addClass('hide');
exsoftProcessViewFunc.isDelete ? $('#processView_delete').removeClass('hide') : $('#processView_delete').addClass('hide');
exsoftProcessViewFunc.isApproveRequest ? $('#processView_approveRequest').removeClass('hide') : $('#processView_approveRequest').addClass('hide');
exsoftProcessViewFunc.isApprove ? $('#processView_approve').removeClass('hide') : $('#processView_approve').addClass('hide');
exsoftProcessViewFunc.isApproveReject ? $('#processView_approveReject').removeClass('hide') : $('#processView_approveReject').addClass('hide');
exsoftProcessViewFunc.isContent ? $('#processView_content').removeClass('hide') : $('#processView_content').addClass('hide');
exsoftProcessViewFunc.isContent ? $('.requestApproval_wordcnts').removeClass('hide') : $('.requestApproval_wordcnts').addClass('hide');
// TODO : 파일 관련 처리
// 승인요청 목록 처리
exsoftProcessViewFunc.ui.printProcessSituation(processId);
}else{
jAlert(data.message);
}
});
// 2. 문서정보 가져오기
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback({doc_id:doc_root_id}, exsoft.contextRoot+"/document/documentDetail.do", null, function(data,param) {
//exsoft.document.event.printPageList(data);//첨부파일
//exsoft.document.event.printDocumentVO(data);//기본정보
var docVO = data.documentVO;
$('#processView_info tr:nth-child(2)').find('span').text(data.folderPath); // 기본폴더
$('#processView_info tr:nth-child(10)').find('span').text(docVO.type_name); // 문서유형
// 권한 Setting
$("#processView_aclName").html(data.aclDetail.acl_name); //권한
exsoft.util.table.tableDocumentAclItemPrintList('processView_acl', data.aclItemList);
exsoft.util.table.tableDocumentAclItemPrintList('processView_extAcl', data.aclExItemList);
});
},
},
// 1. 팝업
open : {
},
//2. layer + show
layer : {
},
//3. 닫기 + hide
close : {
},
//4. 화면 이벤트 처리
event : {
approveAction : function(action_type) {
//exsoftProcessViewFunc.process_id APPROVEREQUEST|APPREVE|APPROVEREJECT
if(exsoftProcessViewFunc.isModify == true && $("#processWriteFrm").validation()){
var jsonObject = {actionType : action_type,
process_id : exsoftProcessViewFunc.process_id,
doc_root_id : exsoftProcessViewFunc.doc_root_id,
content : $('#processView_content').text()};
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback(jsonObject, exsoft.contextRoot+'/process/processControl.do', null, function(data, param){
if(data.result == 'true'){
exsoft.util.layout.divLayerClose('coopUser_detail_wrapper','coopUser_detail');
// 리스트 새로 고침, processList.jsp에 ID 정의 되어 있음
console.log('++++++++++++++++++++++++: ' + $('#processDocGridList').length);
if($('#processDocGridList').length > 0){
exsoft.util.grid.gridRefresh('processDocGridList',exsoft.contextRoot+'/process/processList.do');
}
}
jAlert(data.message);
});
}
}
},
//5. 화면 UI 변경 처리
ui : {
// 처리현황(xr_comment) 내용
printProcessSituation : function(process_id){
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback({root_id:process_id}, exsoft.contextRoot+'/document/documentCommentList.do', null, function(data, param){
$('.approvalResult_cnt').text(data.records);
$('#processView_situation').empty();
if(data.result == 'true'){
$(data.list).each(function(idx){
var liContent = '<li class="approvalResult_list">';
liContent += '<p><span class="bold">'+this.creator_name +'</span>';
liContent += '<span>' + this.create_date + '</span></p>';
liContent += '<p>' + this.content+'</p></li>'
$('#processView_situation').append(liContent);
});
}else{
$('#processView_situation').append('<li class="approvalResult_list"><p>등록된 데이터가 없습니다.</p></li>');
}
});
}
},
//6. callback 처리
callback : {
},
//7. doFunction
doFunction : {
// 협업자 정보 set
setExecutorName : function(idx, name){
var currentText = $('#processView_info tr:nth-child('+idx+')').find('span').text();
$('#processView_info tr:nth-child('+idx+')').find('span').text(function(idx){
var tempStr = currentText + ',' + name;
var firstComma = tempStr.substring(0,1);
if(firstComma == ','){
tempStr = tempStr.substring(1);
}
return tempStr;
});
},
// 승인자, 수신자 현황 툴팁 set
setTooltip : function(id, tooltipId, processVo, type){
var tooltip = '';
var count = 0;
var status = '';
var executorArray = new Array();
if(Constant.PROCESS.TYPE_APPROVER == type){
count = processVo.approval_count;
status = '승인완료';
executorArray = processVo.approval_list;
}else{
count = processVo.receiver_count;
status = '열람완료';
executorArray = processVo.receiver_list;
}
if(executorArray.length > 0 > 0){
$(exsoft.util.common.getIdFormat(id)).text('['+count+']');
var tempList = executorArray;
$.each(tempList, function(){
var excutors = this.split('|');
if(excutors[1] == status){
tooltip += '<p>● '+excutors[0]+' : '+excutors[1]+'</p>';
}else{
tooltip += '<p>○ '+excutors[0]+' : '+excutors[1]+'</p>';
}
});
$(exsoft.util.common.getIdFormat(tooltipId)).html(tooltip);
}
},
// 현 단계 및 사용자에 맞는 추가기능 set
// isModify,isDelete,isApproveRequest,isApprove,isApproveReject,isFileModify,isContent
setProcessRole : function(isModify,isDelete,isApproveRequest,isApprove,isApproveReject,isFileModify,isContent){
exsoftProcessViewFunc.isModify = isModify != undefined ? isModify : false; // 속성수정
exsoftProcessViewFunc.isDelete = isDelete != undefined ? isDelete : false; // 삭제
exsoftProcessViewFunc.isApproveRequest = isApproveRequest != undefined ? isApproveRequest : false; // 승인요청
exsoftProcessViewFunc.isApprove = isApprove != undefined ? isApprove : false; // 승인
exsoftProcessViewFunc.isApproveReject = isApproveReject != undefined ? isApproveReject : false; // 반려
exsoftProcessViewFunc.isFileModify = isFileModify != undefined ? isFileModify : false; // 파일수정
exsoftProcessViewFunc.isContent = isContent != undefined ? isContent : false; // 내용 사용 여부
},
}
}<file_sep>/EDMS3/WebContent/js/popup/registGroupWindow.js
var registGroupWindow = {
groupInfo : new Object(),
groupWriteBinder : new DataBinder("#p_group_form"),
// 0. 초기화
init : function(fullPath, parentId, mapId) {
// 등록버튼 클릭, 하위부서 추가 - 창 닫기
$('.window_close').bind("click", function(e){
e.preventDefault();
$(this).parents('.subDept_add').addClass('hide');
$('.subDept_add_wrapper').addClass('hide');
});
// 등록버튼 클릭, 하위부서 추가 - 창 닫기 : 음영진 부분 클릭 시 닫기
$('.subDept_add_wrapper').bind("click", function(){
$(this).addClass('hide');
$('.subDept_add').addClass('hide');
});
// 팝업창 form clear
exsoft.util.common.formClear("p_group_form");
// parameter 저장
registGroupWindow.groupInfo.type = "groupWrite";
registGroupWindow.groupInfo.fullPath = fullPath;
registGroupWindow.groupInfo.parentId = parentId;
registGroupWindow.groupInfo.mapId = mapId;
// DataBinder 처리
registGroupWindow.groupWriteBinder.set("parent_id",parentId);
registGroupWindow.groupWriteBinder.set("group_full_path",fullPath);
registGroupWindow.groupWriteBinder.set("map_id",mapId);
registGroupWindow.groupWriteBinder.set("p_group_status","C");
exsoft.util.common.ddslick('#p_group_status','use_yn','p_group_status',79, function(){});
exsoft.util.layout.divLayerOpen("subDept_add_wrapper", "subDept_add");
exsoft.util.filter.maxNumber();
},
// 1. 팝업
open : {
},
//2. layer + show
layer : {
},
//3. 닫기 + hide
close : function() {
exsoft.util.layout.divLayerClose("subDept_add_wrapper", "subDept_add");
},
//4. 화면 이벤트 처리
event : {
selectGroup : function() {
alert("그룹선택팝업 미처리");
},
// 버튼 : 그룹 저장
registGroupSubmit : function() {
// 필수항목 유효성 체크
if (!registGroupWindow.event.validationForm()) {
return;
}
registGroupWindow.groupWriteBinder.set("type","insert");
registGroupWindow.groupWriteBinder.set("group_name_ko",$("#p_group_name_ko").val());
registGroupWindow.groupWriteBinder.set("group_name_en",$("#p_group_name_en").val());
registGroupWindow.groupWriteBinder.set("sort_index",$("#p_sort_index").val());
registGroupWindow.groupWriteBinder.set("group_status",exsoft.util.layout.getSelectBox('p_group_status','option'));
// Server Call
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback(registGroupWindow.groupWriteBinder.getDataToJson() ,
exsoft.contextRoot +exsoft.contextRoot +"/admin/groupInfoManager.do", "groupWrite",
function(data, param) {
if(data.result == "success") {
groupManager.event.refreshTree(); // 그릅관리 트리를 초기화 한다
} else {
jAlert(data.message, "그룹관리", 0);
}
});
// 창닫기
registGroupWindow.close();
},
// Form Validation
validationForm : function() {
// 부서명 체크 (group_name_ko컬럼만 NN이므로 해당 컬럼만 체크)
if ($("#p_group_name_ko").val().length == 0) {
jAlert("부서명을 입력해주세요.");
return false;
}
// 정렬순서 체크
if ($("#p_sort_index").val().length == 0) {
jAlert("정렬순서를 입력해주세요.");
return false;
}
return true;
},
// 부서명 변경 시 부서경로 변경 이벤트 핸들링
groupNameChanged : function() {
registGroupWindow.groupInfo.groupNameKo = $("#p_group_name_ko").val();
$("#p_group_full_path").val(registGroupWindow.groupInfo.fullPath + " > " + registGroupWindow.groupInfo.groupNameKo);
},
},
//5. 화면 UI 변경 처리
ui : {
},
//6. callback 처리
callback : {
},
}<file_sep>/EDMS3/src/kr/co/exsoft/user/controller/GroupAuthController.java
package kr.co.exsoft.user.controller;
import java.util.Map;
import java.util.HashMap;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import org.springmodules.validation.commons.DefaultBeanValidator;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.MessageSource;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import kr.co.exsoft.eframework.configuration.Constant;
import kr.co.exsoft.eframework.util.StringUtil;
import kr.co.exsoft.user.service.GroupService;
import kr.co.exsoft.user.vo.GroupVO;
/**
* Group Controller
* @author 패키지 개발팀
* @since 2014.07.17
* @version 3.0
*
*/
@Controller
@RequestMapping("/group")
public class GroupAuthController {
@Autowired
private GroupService groupService;
@Autowired
private MessageSource messageSource;
@Autowired
private DefaultBeanValidator beanValidator;
protected static final Log logger = LogFactory.getLog(GroupAuthController.class);
/**
*
* <pre>
* 1. 개요 : 부서 목록 조회 샘플
* 2. 처리내용 :
* </pre>
* @Method Name : groupList
* @param model
* @param request
* @return
*/
@RequestMapping(value="/groupList.do", method=RequestMethod.POST)
@ResponseBody
public Map<String,Object> groupLists(Model model, HttpServletRequest request,@RequestParam HashMap<String,Object> map) {
Map<String, Object> resultMap = new HashMap<String, Object>();
HashMap<String,Object> param = new HashMap<String, Object>();
try {
String mapId = map.get(Constant.TREE_MAP_ID) != null ? map.get(Constant.TREE_MAP_ID).toString() : null;
String parentId = map.get(Constant.TREE_PARENT_ID) != null ? map.get(Constant.TREE_PARENT_ID).toString() : null;
String rootId = map.get(Constant.TREE_ROOT_ID) != null ? map.get(Constant.TREE_ROOT_ID).toString() : null;
String treeType = map.get("treeType") != null ? map.get("treeType").toString() : ""; // 그룹관리 > 구성원추가시 미소속그룹을 보이기위한 변수
param.put("mapId", mapId);
param.put("parentId", parentId);
param.put("rootId", rootId);
param.put("treeType", treeType);
List<GroupVO> groupList = null;
if (StringUtil.isNull(parentId) || (StringUtil.isNull(parentId) && !StringUtil.isNull(rootId))) {
groupList = groupService.rootGroupList(param);
} else {
groupList = groupService.childGroupList(param);
}
resultMap.put("groupList", groupList);
resultMap.put("result", Constant.SUCCESS);
} catch (Exception e) {
resultMap.put("result", Constant.RESULT_FAIL);
resultMap.put("message", e.getMessage());
e.printStackTrace();
}
return resultMap;
}
}
<file_sep>/EDMS3/WebContent/js/common/util.js
/**
* EDMS HTML5 Global JS Function
* util관련 js
*/
/**
* Javascript 기본 String 자료형의 메서드를 확장합니다.
*
* 예 ) 'The {0} is dead. Don\'t code {0}. Code {1} that is open source!'.format('ASP', 'PHP');
* 반환 : 'The ASP is dead. Don\'t code ASP. Code PHP that is open source!'
* @returns {String}
*/
String.prototype.format = function() {
var formatted = this;
var lengthCnt = arguments.length; // 속도를 위해 for문에 선언 안함
for (var i = 0; i < lengthCnt; i++) {
var regexp = new RegExp('\\{'+i+'\\}', 'gi');
formatted = formatted.replace(regexp, arguments[i]);
}
return formatted;
};
exsoft.namespace('util.common');
exsoft.namespace('util.layout');
exsoft.namespace('util.ajax');
exsoft.namespace('util.check');
exsoft.namespace('util.date');
exsoft.namespace('util.filter');
exsoft.namespace('util.error');
exsoft.namespace('util.grid');
exsoft.namespace('util.table');
exsoft.namespace('util.websocket');
/***********************************************
* common
**********************************************/
/**
* 공통으로 사용되는 util
*
* @namespace : exsoft.util.common
*
*/
exsoft.util.common = {
/**
* checkbox 전부 체크
* @param name
*/
allCheck : function(name) {
var check = $('#checkAll').is(':checked');
$('input[name='+ name +']').each(function() {
if($(this).attr("disabled") != "disabled") {
this.checked = check;
}
});
},
/**
* checkbox 전부 체크
* @param checkId
* @param name
*/
allCheckBox : function(checkId,name) {
var check = $('#'+checkId).is(':checked');
$('input[name='+ name +']').each(function() {
if($(this).attr("disabled") != "disabled") {
this.checked = check;
}
});
},
/**
* 전체선택 체크박스 초기화
* @param id
*/
checkboxInit : function(id) {
$("#"+id).prop("checked",false);
},
/**
* 체크박스 선택된 개수 가져오기
*/
checkBoxCheckedLength : function(chkName) {
return $("input:checkbox[name="+chkName+"]:checked").length;
},
//////////// 파일다운로드 관련 ////////////
/**
* 파일 다운로드
* @param page_id
*/
fileDown : function(page_id) {
var JsonArr = [];
var JsonArrIndex = 0;
var jsonData = {page_id:""};
jsonData['page_id'] = page_id;
if(jsonData.page_id){
JsonArr[JsonArrIndex] = jsonData;
JsonArrIndex++;
}
$(location).attr("href", exsoft.contextRoot+"/common/downLoad.do?pageList="+JSON.stringify(JsonArr));
},
/**
* 모두 저장 ZIP
* @param formId - 폼명
* @param checkBox - 체크박스명
*/
zipFileDown : function(formId,checkBox) {
var JsonArr = [];
var JsonArrIndex = 0;
$('#'+formId+' input[name='+ checkBox +']').each(function() {
var jsonData = {page_id:""};
jsonData['page_id'] = this.value;
if(jsonData.page_id){
JsonArr[JsonArrIndex] = jsonData;
JsonArrIndex++;
}
});
if(JsonArrIndex == 0) {
jAlert('No Files.');
return false;
}else {
$(location).attr("href", exsoft.contextRoot+"/common/downLoad.do?pageList="+JSON.stringify(JsonArr)+"&isZip=T");
}
},
/**
* 파일사이즈 구하기
* @param bytes Number of bytes to convert
* @param precsion Number of digits after the decimal separator
* @returns {String}
*/
bytesToSize : function(bytes,precision) {
var kilobyte = 1024;
var megabyte = kilobyte * 1024;
var gigabyte = megabyte * 1024;
var terabyte = gigabyte * 1024;
if ((bytes >= 0) && (bytes < kilobyte)) {
return bytes + ' B';
} else if ((bytes >= kilobyte) && (bytes < megabyte)) {
return (bytes / kilobyte).toFixed(precision) + ' KB';
} else if ((bytes >= megabyte) && (bytes < gigabyte)) {
return (bytes / megabyte).toFixed(precision) + ' MB';
} else if ((bytes >= gigabyte) && (bytes < terabyte)) {
return (bytes / gigabyte).toFixed(precision) + ' GB';
} else if (bytes >= terabyte) {
return (bytes / terabyte).toFixed(precision) + ' TB';
} else {
return bytes + ' B';
}
},
/**
* JQgrid 에서 File용량 계산하기(함수명 변경처리)
* @param bytes
* @returns {String}
*/
gridBytesToSize : function(bytes) {
var sizes = ['B', 'KB', 'MB', 'GB', 'TB'];
if (bytes == 0) return '0B';
else if(bytes == -1) return '-1';
var i = parseInt(Math.floor(Math.log(bytes) / Math.log(1024)));
var size = (i<2) ? Math.round((bytes / Math.pow(1024, i))) : Math.round((bytes / Math.pow(1024, i)) * 100)/100;
return size + ' ' + sizes[i];
},
////////// 쿠키관련 //////////
/**
* 쿠키 생성
* @param key
* @param value
* @param expire
*/
setCookie : function(key, value, expire) {
var cookieDate = new Date();
if(expire < 1) {
expire = expire * 10;
cookieDate.setHours(cookieDate.getHours() + expire);
} else {
cookieDate.setDate(cookieDate.getDate() + expire);
}
document.cookie = key + "=" + escape(value) + "; expires=" + cookieDate.toGMTString() + "; path=/";
},
/**
* 쿠키 정보 가져오기
* @param key
* @returns
*/
getCookie : function(key) {
var cookie = document.cookie;
var first = cookie.indexOf(key+"=");
if(first >= 0) {
var str = cookie.substring(first, cookie.length);
var last = str.indexOf(";");
if(last < 0) {
last = str.length;
}
str = str.substring(0,last).split("=");
return unescape(str[1]);
} else {
return null;
}
},
/**
*cookie 지우기
* @param key
*/
delCookie : function(key) {
today = new Date();
today.setDate(today.getDate() - 1);
document.cookie = key + "=; path=/; expires=" + today.toGMTString() + ";";
},
//////////XSS & Sql Injection 관련 //////////
/**
* Sql Injection 키워드 제거 처리
* @param str
*/
sqlInjectionReplace : function(str) {
str = str.replace("'" , "");
str = str.replace( "--" , "");
str = str.replace("--, #" , " " );
str = str.replace("/* */" , " " );
str = str.replace("' or 1=1--" , " " );
str = str.replace("union" , " " );
str = str.replace("select" , " " );
str = str.replace("delete" , " " );
str = str.replace("insert" , " " );
str = str.replace("update" , " " );
str = str.replace("drop" , " " );
str = str.replace("on error resume" , " " );
str = str.replace("execute" , " " );
str = str.replace("windows" , " " );
str = str.replace("boot" , " " );
str = str.replace("-1 or" , " " );
str = str.replace("-1' or" , " " );
str = str.replace("../" , " " )
str = str.replace("-1' or" , " " );
str = str.replace("unexisting" , " " );
str = str.replace("win.ini" , " " );
return str;
},
/**
* HTML Tag 제거처리
* @param str
* @returns
*/
stripHTMLtag : function(str) {
var objStrip = new RegExp();
objStrip = /[<][^>]*[>]/gi;
return str.replace(objStrip, "");
},
/**
* String에서 html태그를 제거하여 반환한다.
* @param v
* @returns
*/
stripHtml : function(v) {
v = String(v);
var regexp = /<("[^"]*"|'[^']*'|[^'">])*>/gi;
if (v) {
v = v.replace(regexp,"");
return (v && v !== ' ' && v !== ' ') ? v.replace(/\"/g,"'") : "";
}
return v;
},
/**
* Div 객체나 Tr 객체 삭제처리
* @param divIds
*/
removeDivIds : function(divIds) {
$("#"+divIds).remove();
},
////////// Etc //////////
/**
* 에디터 내용 가져오기
*/
editorContent : function() {
return iframe_editor.Editor.getContent();
},
/**
* 유니크값 생성 - UUID
*/
uniqueId : function() {
var d = new Date().getTime();
var uuid = 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
var r = (d + Math.random()*16)%16 | 0;
d = Math.floor(d/16);
return (c=='x' ? r : (r&0x7|0x8)).toString(16);
});
return uuid;
},
/**
* 문서 등록/수정을 위한 파일첨부 관련 옵션 정의
* @param type : 환경설정 옵션
* @param is_use : 사용유무
* @param fval : 키값
* @param defaultValue : 기본값으로 사용할 값
*/
fileConfig : function(type, is_use, fval, defaultValue) {
var ret = null;
switch(type) {
case "EXT":
if(is_use == "Y") {ret = fval.replace(";",",");}
else { ret = defaultValue; }
break;
case "FILECNT":
if(is_use == "Y") {ret = fval;}
else { ret = defaultValue; }
break;
case "FILESIZE":
if(is_use == "Y") {ret = fval*1024*1024;}
else { ret = defaultValue*1024*1024; }
break;
case "FILETOTAL":
if(is_use == "Y") {ret = fval*1024*1024;}
else { ret = defaultValue*1024*1024; }
break;
}
return ret;
},
/**
* 클립보드 복사처리 divId 내용
* @param divId
*/
copyToClipboard : function(divId) {
var text = $('#'+divId).html();
if(window.clipboardData){
// IE
window.clipboardData.setData('text', text);
}else {
// IE 외 Browser
window.prompt ("Copy to clipboard: Ctrl+C, Enter", text);
}
},
/**
* 해당 입력칸의 숫자N이면 다음값으로 포커스 이동처리
* @param N
* @param Obj
* @param nextID
* @usage onKeyUp="exsoft.util.common.nextBlank(3,this.id,'ip2')"
*/
nextBlank : function(N,Obj,nextID) {
if(document.getElementById(Obj).value.length == N) {
document.getElementById(nextID).focus();
}
},
/**
* Textarea max length 구하기
* @param obj
* @usage <textarea name="reason" id="reason" rows="3" cols="30" class="w2" maxlength="50" onKeyUp="return exsoft.util.common.isMaxlength(this);"></textarea>
*/
isMaxlength : function(obj) {
var mlength=obj.getAttribute? parseInt(obj.getAttribute("maxlength")) : ""
if (obj.getAttribute && obj.value.length > mlength) {
obj.value = obj.value.substring(0,mlength)
}
},
/**
* element의 ID 포맷이 맞는지 확인
*/
isIdFormat : function(eId) {
if (eId.indexOf("#") === 0) {
return true;
} else {
return false;
}
},
/**
* element의 ID 포맷이 맞는지 확인하고, 맞지 않으면 ID포맷으로 만들어서 리턴
*/
getIdFormat : function(eId) {
if (!exsoft.util.common.isIdFormat(eId)) {
eId = "#" + eId;
}
return eId;
},
/**
* Form 초기화
* @param formId
*/
formClear : function(formId) {
$(exsoft.util.common.getIdFormat(formId))[0].reset();
},
/**
* selectbox 디자인 변경
*/
ddslick : function(divId, className, dataBindName, width, callback, arrParam){
divId = exsoft.util.common.getIdFormat(divId);
$(divId).ddslick({
width:width,
background:"rgba(255, 255, 255, 0)",
onSelected: function(selectedData){
$(divId).addClass(className); //css변경
//databinder 사용 여부 dd-selected-value input name
if(dataBindName){ // if(dataBindName !== '') 동일한 조건문, boolean 아님 혼돈하지 말 것
$(divId + ' input[name=dd-selected-value]').attr('data-bind',dataBindName);
$(divId + ' input[name=dd-selected-value]').attr('data-select','true');
//$(divId + ' input[name=dd-selected-value]').trigger('change'); // hidden type은 강제로 change event 부여
}
callback(divId, selectedData, arrParam);
}
});
},
/**
* Chart Type 초기화
*/
chartTypeInit : function() {
$("#chartType").empty();
$("#chartType").append("<option value='bar'>막대차트</option>");
$("#chartType").append("<option value='line'>라인차트</option>");
$("#chartType").append("<option value='pie'>파이차트</option>");
},
/**
* Chart 구분 컬럼
* @param type - 구분자
*/
colTypeInit : function(type) {
$("#colType").empty();
if(type == 1){
$("#colType").append("<option value='create_cnt'>등록건수</option>");
$("#colType").append("<option value='read_cnt'>조회건수</option>");
$("#colType").append("<option value='update_cnt'>수정건수</option>");
$("#colType").append("<option value='delete_cnt'>삭제건수</option>")
}else {
$("#colType").append("<option value='doc_cnt'>문서수</option>");
$("#colType").append("<option value='page_cnt'>파일수</option>");
$("#colType").append("<option value='page_total'>용량</option>");
}
},
/**
* ACL의 가중치를 구한다 (수행 권한이 충분한지 비교하기 위함)
*/
getAclLevel : function(en_title) {
var Levels = {
NONE : 0,
BROWSE : 1,
READ : 2,
UPDATE : 3,
DELETE : 4
}
return Levels[en_title];
},
/**
* AclItem 기본권한 한글 -> 영문
*/
getAclItemTitleEn :function(ko_title) {
switch(ko_title) {
case "삭제" : return "DELETE"; break;
case "수정" : return "UPDATE"; break;
case "조회" : return "READ"; break;
case "목록" : return "BROWSE"; break;
case "없음" : return "NONE"; break;
}
},
/**
*
* @param id : jquery id
* @param obj : jqgrid row Object
* @param isLeft : 마우스 포인트 기준 show 위치 왼쪽, 오른쪽 선택
*/
tooltip : function(id, tooltip, isLeft, event){
$('.tooltip').empty().append(tooltip);
$('.tooltip').removeClass('hide');
$('.tooltip').css('top',event.clientY+'px');
if(isLeft == 'true'){
$('.tooltip').css('left',(event.clientX-$('.tooltip').width()-25)+'px');
}else{
$('.tooltip').css('left',(event.clientX+15)+'px');
}
},
/**
* 탭 브라우저 전환시 특정 div show&hide
* @param tabIdList, divId
*/
tabControldivId : function(tabIdList, divId) {
$(tabIdList).each(function(index) {
tabIdList[index] = exsoft.util.common.getIdFormat(this);
});
$(function(){
$(tabIdList.join(", ")).click(function() {
var selectTab = $(this); //현재 탭
$(tabIdList).each(function(index) {
if ('#'+selectTab.attr("id") == tabIdList[index]) {
$(tabIdList[index] + divId).removeClass("hide");
} else {
$(tabIdList[index] + divId).addClass("hide");
}
});
});
})
},
// List에서 해당 키값에 해당하는 값을 찾는다.
findCodeName : function(list,compareValue) {
var ret = null;
$(list).each(function(i) {
if (list[i].code_id == compareValue) {
ret = list[i].code_nm;
return false;
}
})
return ret;
},
/**
* 관리자 문서권한 속성부분 Show/Hide
* @param imgId - 이미지아이콘 id
* @param viewId - view영역 id
*/
showHide : function(imgId,viewId) {
if($(imgId).hasClass("down")) {
$(imgId).removeClass("down");
$(imgId).addClass("up");
$(viewId).removeClass("hide")
}else {
$(imgId).removeClass("up");
$(imgId).addClass("down");
$(viewId).addClass("hide")
}
},
/**
* ddslick 값을 가져온다
* @param targetId - ddslick ID
*/
getDdslick : function(targetId) {
return $(targetId + ' input[name=dd-selected-value]').val()
},
/**
* form객체의 엘리먼트 값을 json형식으로 반환
* @param form_id
* @returns json string
*/
getFormToJsonObject : function(form_id)
{
var obj = {};
var arr = $(exsoft.util.common.getIdFormat(form_id)).serializeArray();
$.each(arr, function() {
if (obj[this.name] !== undefined) {
if (!obj[this.name].push) {
obj[this.name] = [obj[this.name]];
}
obj[this.name].push(this.value || '');
} else {
obj[this.name] = this.value || '';
}
});
return obj;
},
/**
* XFTree 객체에서 선택된 Node 정보를 구성하여 반환한다
*
* @param treeObject - XFTrees 객체
* @param treeDivId - Tree Div ID
*/
getReturnTreeObject : function(treeObject, treeDivId, mapId) {
var objectArray = new Array();
var returnObj;
$(treeObject.getCurrentNodeIds()).each(function(index){
returnObj = new Object();
returnObj.id = this; // 선택한 노드의 ID
returnObj.parentIdList = $(treeDivId).jstree("get_path", this, "", true); // 선택한 노드부터 최상위 노드까지의 폴더 ID 목록
returnObj.fullPath = $(treeDivId).jstree("get_path", this); // 선택한 노드부터 최상위 노드까지의 폴더 이름 목록
returnObj.mapId = mapId;
returnObj.text = $(treeDivId).jstree("get_text", this); // 선택된 노드 이름
returnObj.acl_id = $(treeDivId).jstree("get_node", this).original.acl_id; // acl_id(custom data는 original 객체 안에 있다.)
returnObj.original = $(treeDivId).jstree("get_node", this).original;
returnObj.is_type = $(treeDivId).jstree("get_node", this).original.is_type; // 문서유형ID
returnObj.parentGroup = treeObject.getFolderGroupId(this);
objectArray.push(returnObj);
});
return objectArray;
},
/**
* fadin fadout Noty
*
* @param arg[0] msg - noty message
* @param arg[1] divId - noty target div id
* @param arg[2] duration - noty fade in out duration
*/
noty : function() {
if (arguments[1] === undefined && exsoft.notyDivId === undefined)
return;
var _opt = {
msg : arguments[0],
divId : arguments[1] === undefined ? exsoft.notyDivId : arguments[1],
duration : arguments[2] === undefined ? 2000 : arguments[2],
}
$(_opt.divId).html(_opt.msg).fadeIn(_opt.duration).fadeOut(_opt.duration);
},
// textarea 최대 글자 수 체크
limitStringCheck : function(obj, maxLength){
var koreaTextLength = 0;
var byteCount = 0;
var currentText = $(obj).val();
// 한글은 2byte 계산한다.
for (var k=0;k < currentText.length; k++)
{
onechar = currentText.charAt(k);
if (escape(onechar) =='%0D') {} else if (escape(onechar).length > 4) { byteCount += 2; } else { byteCount++; }
if(byteCount <= maxLength){
koreaTextLength ++;
}
}
if(byteCount > maxLength){
$(obj).val(currentText.substr(0, koreaTextLength));
$(obj).next().children('span').text(maxLength);
}else{
$(obj).next().children('span').text(byteCount);
}
}
}; // exsoft.util.common end...
exsoft.util.common.prototype = {
}; // exsoft.util.common.prototype end...
/***********************************************
* layout
**********************************************/
/**
* layout 관련 util
*
* @namespace : exsoft.util.layout
*
*/
exsoft.util.layout = {
////////// 레이아웃관련 //////////
/**
* 로그아웃
*/
logout : function(urls) {
location.href = urls
},
/**
* 레이어 empty
* @param divIds - 레이어ID
*/
layerEmpty : function(divIds) {
$(exsoft.util.common.getIdFormat(divIds)).empty();
},
browser : function() {
var s = navigator.userAgent.toLowerCase();
var match = /(webkit)[ \/](\w.]+)/.exec(s) ||
/(opera)(?:.*version)?[ \/](\w.]+)/.exec(s) ||
/(msie) ([\w.]+)/.exec(s) ||
/(mozilla)(?:.*? rv:([\w.]+))?/.exec(s) ||
[];
return { name: match[1] || "", version: match[2] || "0" };
},
/**
* 새창닫기 : IE/FireFox/Chrome
*/
windowClose : function() {
window.open('','_self').close();
},
/**
* 상단메뉴 표시처리
* @param menuType
*/
topMenuSelect : function(menuType) {
var menuArray = ['myDocMenu','myWorkMenu','workSpaceMenu','workProcessMenu','statisticsMenu'];
for (var i = 0; i < menuArray.length ; i++) {
$("#"+menuArray[i]).removeClass('selected');
$("#"+menuArray[i]+"Selected").removeClass('selected');
if(menuType == menuArray[i]) {
$("#"+menuArray[i]).addClass('selected');
$("#"+menuArray[i]+"Selected").addClass('selected');
}
}
},
/**
* 관리자 메뉴 선택시 contents 영역 변경처리
* @param url
*/
goAdminContent : function(url){
jQuery.ajax({
url: exsoft.contextRoot+"/common/sessionCheck.do",
global: false,
type: "POST",
data: {session_id:'check'},
dataType: "json",
async:true,
cache:false,
clearForm:true,
resetForm:true,
success: function(data){
$('#admContents').load(url);
},
error: function(response, textStatus, errorThrown){
exsoft.util.error.isErrorChk(response);
}
});
},
/**
* 사용자 메뉴 선택시 contents 영역 변경처리
* @param url
*/
goUserContent : function(url){
jQuery.ajax({
url: exsoft.contextRoot+"/common/sessionCheck.do",
global: false,
type: "POST",
data: {session_id:'check'},
dataType: "json",
async:true,
cache:false,
clearForm:true,
resetForm:true,
success: function(data){
$('#userContents').load(url);
},
error: function(response, textStatus, errorThrown){
exsoft.util.error.isErrorChk(response);
}
});
},
/**
* 레이어팝업 수직가운데 정렬
* @param obj
*/
lyrPopupWindowResize : function(obj) {
var wrap = $('.wrap');
obj.prev().css({
height:$(document).height(),
width:$(document).width()
});
obj.css({
top:(wrap.height()-obj.height())/2,
left:(wrap.width()-obj.width())/2
});
},
//레이어팝업 수직가운데 정렬
lyrPopupWindowResizeArr : function(arr) {
var wrap = $('.wrap');
for(var i = 0; i < arr.length; i++) {
arr[i].prev().css({
height:$(document).height(),
width:$(document).width()
});
arr[i].css({
top:(wrap.height()-arr[i].height())/2,
left:(wrap.width()-arr[i].width())/2
});
}
},
/**
* Layer Open
* @param wrapperClass
* @param layerClass
*/
divLayerOpen : function(wrapperClass,layerClass) {
$("."+wrapperClass).removeClass('hide');
$("."+layerClass).removeClass('hide');
exsoft.util.layout.lyrPopupWindowResize($("."+layerClass));
},
/**
* Layer Close
* @param wrapperClass
* @param layerClass
*/
divLayerClose : function(wrapperClass,layerClass) {
$("."+wrapperClass).addClass('hide');
$("."+layerClass).addClass('hide');
},
popDivLayerClose : function(layerClass) {
$("."+layerClass).addClass('hide');
},
/***
* DropMenu Proc
* @param dropMenuClass
*/
dropMenu : function(dropMenuClass) {
if($("."+dropMenuClass).hasClass('hide')) {
$("."+dropMenuClass).removeClass('hide');
}else {
$("."+dropMenuClass).addClass('hide');
}
},
/**
* SelectBox 값 설정하기 :: ddslick Version
* @param selectId
* @param setVal
*/
setSelectBox : function(selectId,setVal) {
var currVal = "";
var findDivs = "#"+selectId + " li";
$(findDivs).each(function( index ) {
currVal = $(this).find('.dd-option-value').val();
if(currVal == setVal) {
$("#"+selectId).ddslick('select', {index: $(this).index()});
}
});
},
/**
* 선택된 SelectBox 값 가져오기 :: :: ddslick Version
* @param selectId
* @param type - option,text
*/
getSelectBox : function(selectId,type) {
var retVal = "";
if(type == "option") {
retVal = $("#"+selectId+" .dd-selected-value").val();
}else {
retVal = $("#"+selectId+" .dd-selected-text").html();
}
return retVal;
},
/**
* Radio 버튼 Check
* @param radioNm
* @param setVal
*/
setRadioVal : function(radioNm,setVal) {
$("input:radio[name='"+radioNm+"']:radio[value='"+setVal+"']").prop("checked", true);
},
/**
* 선택된 Radio 값 가져오기
* @param radioNm
*/
getRadioVal : function(radioNm) {
return $(":input:radio[name="+radioNm+"]:checked").val();
},
// Layer 닫기 관련 이벤트 함수
divLayerOn : function(wrapperClass,layerClass,closeClass) {
// Layer close Event func
$('.'+closeClass).on("click", function(e){
e.preventDefault();
$(this).parents('.'+layerClass).addClass('hide');
$('.'+wrapperClass).addClass('hide');
});
// Layer 음영 Click Event Func
$('.'+wrapperClass).on("click", function(){
$(this).addClass('hide');
$('.'+layerClass).addClass('hide');
});
exsoft.util.layout.lyrPopupWindowResize($("."+layerClass));
$(window).resize(function(){
exsoft.util.layout.lyrPopupWindowResize($("."+layerClass));
});
}
}; // exsoft.util.layout end...
exsoft.util.layout.prototype = {
}; // exsoft.util.layout.prototype end...
/***********************************************
* ajax
**********************************************/
/**
* ajax 관련 util
*
* @namespace : exsoft.util.ajax
*
*/
exsoft.util.ajax = {
/**
* ajax form 공통처리 함수
* @param formId - 서버에 넘겨질 form id
* @param urls - 서버 호출 urls
* @param param - return 처리시 분기할 파라미터값
* @param divId - buttonId
*/
ajaxFunction : function(formId,urls,param) {
jQuery.ajax({
url: urls,
global: false,
type: "POST",
data: $("#"+formId).serialize(),
dataType: "json",
async:true,
cache:false,
clearForm:true,
resetForm:true,
success: function(data){
exsoft.util.ajax.loadingHide();
// 결과값을 받아서 처리할 함수
returnAjaxFunction(data,param);
},
beforeSend:function() {
exsoft.util.ajax.loadingShow();
},
error: function(response, textStatus, errorThrown){
exsoft.util.error.isErrorChk(response);
}
});
},
/**
* ajax multi form 공통처리 함수
* @param formId1 - 서버에 넘겨질 form id1
* @param formId2 - 서버에 넘겨질 form id2
* @param urls - 서버 호출 urls
* @param param - return 처리시 분기할 파라미터값
*/
ajaxMuitiFrmFunction : function(formId1,formId2,urls,param) {
jQuery.ajax({
url: urls,
global: false,
type: "POST",
data: $("#"+formId1+",#"+formId2).serialize(),
dataType: "json",
async:true,
cache:false,
clearForm:true,
resetForm:true,
success: function(data){
exsoft.util.ajax.loadingHide();
// 결과값을 받아서 처리할 함수
returnAjaxMuitiFrmFunction(data,param);
},
beforeSend:function() {
exsoft.util.ajax.loadingShow();
},
error: function(response, textStatus, errorThrown){
exsoft.util.error.isErrorChk(response);
}
});
},
/**
* ajax json 공통처리함수
* @param jsonObject - 서버에 넘겨질 data 값 json 형식으로 key:value
* @param urls - 서버 호출 urls
* @param param - return 처리시 분기할 파라미터값
* @usage var jsonObject = { "id":"outsider", "sex":"male" }; 형태로 파라미터를 생성해서 처리해준다.
*/
ajaxDataFunction : function(jsonObject,urls,param) {
jQuery.ajax({
url: urls,
global: false,
type: "POST",
data: jsonObject,
dataType: "json",
async:true,
cache:false,
clearForm:true,
resetForm:true,
success: function(data){
exsoft.util.ajax.loadingHide();
// 결과값을 받아서 처리할 함수
returnAjaxDataFunction(data,param);
},
beforeSend:function() {
exsoft.util.ajax.loadingShow();
},
error: function(response, textStatus, errorThrown){
exsoft.util.error.isErrorChk(response);
}
});
},
/**
* ajax json 공통처리함수
*/
ajaxFunctionNoLodingWithCallback : function(formId,urls,param,callbackFunction) {
jQuery.ajax({
url: urls,
global: false,
type: "POST",
data: $("#"+formId).serialize(),
dataType: "json",
async:true,
cache:false,
clearForm:true,
resetForm:true,
success: function(data){
// 결과값을 받아서 처리할 함수
callbackFunction(data,param);
},
beforeSend:function() {
},
error: function(response, textStatus, errorThrown){
exsoft.util.error.isErrorChk(response);
}
});
},
/**
* ajax json 공통처리함수
* @param jsonObject - 서버에 넘겨질 data 값 json 형식으로 key:value
* @param urls
* @param param
* @param callbackFunction
*/
ajaxDataFunctionWithCallback : function(jsonObject, urls, param, callbackFunction) {
jQuery.ajax({
url: urls,
global: false,
type: "POST",
data: jsonObject,
dataType: "json",
async:true,
cache:false,
clearForm:true,
resetForm:true,
success: function(data){
exsoft.util.ajax.loadingHide();
// 결과값을 받아서 처리할 함수
callbackFunction(data,param);
},
beforeSend:function() {
exsoft.util.ajax.loadingShow();
},
error: function(response, textStatus, errorThrown){
exsoft.util.error.isErrorChk(response);
}
});
},
/**
* ajax json 공통처리함수 - 로딩메세지 없음
* @param jsonObject
* @param urls
* @param param
* @param callbackFunction
*/
ajaxDataFunctionNoLodingWithCallback : function(jsonObject, urls, param, callbackFunction) {
jQuery.ajax({
url: urls,
global: false,
type: "POST",
data: jsonObject,
dataType: "json",
async:true,
cache:false,
clearForm:true,
resetForm:true,
success: function(data){
callbackFunction(data,param);
},
error: function(response, textStatus, errorThrown){
exsoft.util.error.isErrorChk(response);
}
});
},
/**
*
* ajax form callback 공통처리 함수
* @param formId - 서버에 넘겨질 form id
* @param urls - 서버 호출 urls
* @param param - return 처리시 분기할 파라미터값(사용하지 않음)
* @callbackFunction - return 처리함수
*/
ajaxFunctionWithCallback : function(formId,urls,param,callbackFunction) {
jQuery.ajax({
url: urls,
global: false,
type: "POST",
data: $("#"+formId).serialize(),
dataType: "json",
async:true,
cache:false,
clearForm:true,
resetForm:true,
success: function(data){
exsoft.util.ajax.loadingHide();
// 결과값을 받아서 처리할 함수
callbackFunction(data,param);
},
beforeSend:function() {
exsoft.util.ajax.loadingShow();
},
error: function(response, textStatus, errorThrown){
exsoft.util.error.isErrorChk(response);
}
});
},
/**
* 새창에 Ajax Call Function
* @param formId
* @param urls
* @param callbackFunction
*/
ajaxPopFunctionWithCallback : function(formId,urls,callbackFunction) {
jQuery.ajax({
url: urls,
global: false,
type: "POST",
data: $("#"+formId).serialize(),
dataType: "json",
async:true,
cache:false,
clearForm:true,
resetForm:true,
success: function(data){
exsoft.util.ajax.loadingHide();
// 결과값을 받아서 처리할 함수
callbackFunction(data,param);
},
beforeSend:function() {
exsoft.util.ajax.loadingShow();
},
error: function(response, textStatus, errorThrown){
exsoft.util.error.isErrorPopChk(response);
}
});
},
/**
* ajax json 공통처리함수
* @param jsonObject - 서버에 넘겨질 data 값 json 형식으로 key:value
* @param urls
* @param callbackFunction
*/
ajaxPopDataFunctionWithCallback : function(jsonObject, urls, callbackFunction) {
jQuery.ajax({
url: urls,
global: false,
type: "POST",
data: jsonObject,
dataType: "json",
async:true,
cache:false,
clearForm:true,
resetForm:true,
success: function(data){
exsoft.util.ajax.loadingHide();
// 결과값을 받아서 처리할 함수
callbackFunction(data);
},
beforeSend:function() {
exsoft.util.ajax.loadingShow();
},
error: function(response, textStatus, errorThrown){
exsoft.util.error.isErrorPopChk(response);
}
});
},
/**
* Ajax Loading 메세지 보이지
*/
loadingShow : function() {
$(".loading_wrapper").removeClass("hide");
$(".loading").removeClass("hide");
},
/**
* Ajax Loading 메세지 숨기기
*/
loadingHide : function() {
$(".loading_wrapper").addClass("hide");
$(".loading").addClass("hide");
},
}; // exsoft.util.ajax end...
exsoft.util.ajax.prototype = {
}; // exsoft.util.ajax.prototype end...
/***********************************************
* check
**********************************************/
/**
* check 관련 util
*
* @namespace : exsoft.util.error
*
*/
exsoft.util.check = {
////////// 유효성 체크 ////////
/**
* webfolder 파일/폴더명 체크
* @param str
* @returns {Boolean}
*/
webfolderCheck : function(str) {
var word = /[\\/:*?\"<>|]/g;
if(word.test(str)) {
return false;
} else {
return true;
}
},
/**
* 폴더경로 체크
* @param str
* @returns {Boolean}
*/
folderPathCheck : function(str) {
var word = /[\*?\"<>|]/g;
if(word.test(str)) {
return false;
} else {
return true;
}
},
/**
* 이메일 유효성 체크
* @param str
* @returns {Boolean}
*/
emailCheck : function(str) {
var email = /^[-_.0-9A-Za-z]+@[0-9A-Za-z]+[-0-9A-Za-z]*[0-9A-Za-z]+\.[A-Za-z]+(\.[A-Za-z]+)*$/i;
if(email.test(str)) {
return true;
} else {
return false;
}
},
/**
* 핸드폰 번호 유효성 체크
* @param str
* @returns {Boolean}
*/
phoneCheck : function(str) {
var phone = /^\d{2,3}-\d{3,4}-\d{4}$/;
if (phone.test(str)) {
return true;
} else {
return false;
}
},
/**
* 사용자 아이디 유효성 체크
* @param str
* @returns {Boolean}
*/
userIdCheck : function(str) {
var word = /^[a-z]{1}[a-z0-9._%-]*$/;
if(word.test(str)) {
return true;
} else {
return false;
}
},
/**
* 검색어 필터 유효성 체크
* @param str
*/
searchWordCheck : function(str) {
var filter = "!@#$%^&*._-\\\,'`~/=+;:<>/?(){}[]|";
var temp_reg = "";
var temp_qry = str;
var lengthCnt = filter.length; // 속도를 위해 for문에 선언 안함
for (var i = 0; i < lengthCnt; i++) {
temp_reg = filter.charAt(i);
temp_qry = temp_qry.split(temp_reg).join(" ");
}
if (str.length > 0 && temp_qry.trim().length <= 0) {
return false;
}else {
return true;
}
},
/**
* 통계 - 검색날짜 유효성 체크
* @param startDt - 검색시작일
* @param endDt - 검색종료일
*/
searchValid : function(startDt,endDt) {
if(startDt.length != 10 || endDt.length != 10) {
jAlert('검색기간을 정확히 입력하세요',"확인",0);
return false;
}
var startDate = startDt.split("-");
var endDate = endDt.split("-");
var sDate = new Date(startDate[0], startDate[1], startDate[2]).valueOf();
var eDate = new Date(endDate[0], endDate[1], endDate[2]).valueOf();
if( sDate > eDate ) {
jAlert("시작일이 종료일보다 클 수 없습니다.","확인",0);
return false;
}
if(exsoft.util.date.diffDays(sDate,eDate) > 365) {
jAlert("최대 검색기간은 1년을 초과할 수 없습니다.","확인",0);
return false;
}
return true;
},
/**
* url 이 유효한지 체크한다.
* @param urls
* @returns {Boolean}
*/
isValidUrl : function(urls) {
var chkExp = /http:\/\/([\w\-]+\.)+/g;
if (chkExp.test(urls)) {
return true;
} else {
return false;
}
},
/**
* 한글체크
* @param obj
* @returns {Boolean}
*/
hanWordCheck : function(obj) {
var pattern = /[^(ㄱ-힣)]/; // 한글인 경우
if (pattern.test(obj.value)) {
return true;
}else {
return false;
}
},
/**
* 문서유형 속성리스트에서 중복된 속성값이 입력되었는지 체크한다.
* @param jsonArr
* @param param
* @returns {Boolean} :: true - 중복값 존재 false - OK
*/
inputArrayValid : function(jsonArr,param) {
var results = new Array();
for (var j=0; j<jsonArr.length; j++) {
var key = jsonArr[j][param];
if (!results[key]) results[key] = 1;
else results[key] = results[key] + 1;
}
for (var k in results) {
if(results[k] > 1) {
return true;
}
}
return false;
},
}; // exsoft.util.check end...
exsoft.util.check.prototype = {
}; // exsoft.util.check.prototype end...
/***********************************************
* date
**********************************************/
/**
* date 관련 util
*
* @namespace : exsoft.util.date
*
*/
exsoft.util.date = {
////////// Date 관련 //////////
/**
* 날짜 계산하기
* @param type - d:일 / m:월
* @param addVal - 변경한 일/월
* @param dateType - 날짜
* @param delimiter - 구분자
* @usage 2008-01-01 3일 => exsoft.util.date.addDate("d",3,"2008-01-01","-")
* @usage 2008-01-01 3개월 => exsoft.util.date.addDate("m",,3,"2008-01-01","-")
*/
addDate : function(type,addVal,dateType,delimiter) {
var yyyy,mm,dd;
var cDate,oDate;
var cYear,cMonth,cDay;
if(delimiter != "") {
dateType = dateType.replace(eval("/\\" + delimiter + "/g"),"");
}
yyyy = dateType.substr(0,4);
mm = dateType.substr(4,2);
dd = dateType.substr(6,2);
if(type == "yyyy") {
yyyy = (yyyy*1) + (addVal*1);
}else if(type == "m") {
mm = (mm*1) + (addVal*1);
}else if(type == "d") {
dd = (dd*1) + (addVal*1);
}
cDate = new Date(yyyy,mm-1,dd);
cYear = cDate.getFullYear();
cMonth = cDate.getMonth()+1;
cDay = cDate.getDate();
cMonth = cMonth < 10 ? "0" + cMonth : cMonth;
cDay = cDay < 10 ? "0" + cDay : cDay;
if(delimiter != "") {
return cYear + delimiter + cMonth + delimiter + cDay;
}else {
return cYear + cMonth + cDay;
}
},
/**
* 두 입력날짜의 차이 :: Javascript Date형
* @param start
* @param end
* @returns {Number}
*/
diffDays : function(start,end) {
return (end-start)/(1000*60*60*24)
},
/**
* DatePicker Today 구하기
*/
todayStr : function() {
var toDate = "";
var today = new Date();
var year = today.getFullYear(); // FF Bug Patch
var month = today.getMonth()+1;
if(month < 10) {
month = "0"+month;
}
var day = today.getDate();
if(day < 10) {
day = "0"+day;
}
toDate = year+"-"+month+"-"+day;
return toDate;
},
/**
* 통계 및 검색화면 날짜 선택 처리
* @param obj
*/
changeDate : function(value, sdateId, edateId) {
var obj = {};
obj.value = value;
obj.startId = sdateId;
obj.endId = edateId;
exsoft.util.date.setDate(obj);
},
/**
* 날짜 변경 처리
* @param obj
*/
setDate : function(obj) {
var toDate = exsoft.util.date.todayStr();
switch(obj.value) {
case "one_month": // 1month
$(exsoft.util.common.getIdFormat(obj.startId)).val(exsoft.util.date.addDate("m",-1,toDate,"-"));
$(exsoft.util.common.getIdFormat(obj.endId)).val(toDate);
break;
case "three_month": // 3month
$(exsoft.util.common.getIdFormat(obj.startId)).val(exsoft.util.date.addDate("m",-3,toDate,"-"));
$(exsoft.util.common.getIdFormat(obj.endId)).val(toDate);
break;
case "half_year": // 6month
$(exsoft.util.common.getIdFormat(obj.startId)).val(exsoft.util.date.addDate("m",-6,toDate,"-"));
$(exsoft.util.common.getIdFormat(obj.endId)).val(toDate);
break;
case "one_year": // 1year
$(exsoft.util.common.getIdFormat(obj.startId)).val(exsoft.util.date.addDate("m",-12,toDate,"-"));
$(exsoft.util.common.getIdFormat(obj.endId)).val(toDate);
break;
default : // 직접입력 OR 전체
$(exsoft.util.common.getIdFormat(obj.startId)).val('');
$(exsoft.util.common.getIdFormat(obj.endId)).val('');
break;
}
},
/**
* 년도 선택박스 설정하기
*/
selectYearBox : function(minYear,maxYear,divIds) {
var buffer = "";
for(var i=minYear;i<maxYear;i++) {
$("#"+divIds).append("<option value='"+i+"'>"+i+"년</option>");
}
var today = new Date();
$("#"+divIds).val(today.getFullYear());
},
}; // exsoft.util.date end...
exsoft.util.date.prototype = {
}; // exsoft.util.date.prototype end...
exsoft.util.grid = {
/**
* 페이지번호 입력창 숫자 입력 및 캡션 보이기여부
* @param isCaption - 캡션 사용여부
* @param skinId - 스킨클래스명
*/
gridInputInit : function(isCaption,skinId) {
//$('.ui-pg-input').numeric();
//$('.ui-pg-input').css("ime-mode","disabled");
if(isCaption) {
$(".ui-jqgrid-titlebar").hide();
}
// 레이아웃 공통 스킨처리 :: 기본테마=Blue
//if (skinId == undefined) { skinId = "grid_liteGray"; }
// Jqgrid 공통 CSS 변경처리
/*
$(".ui-jqgrid-htable" ).removeClass( "ui-jqgrid-htable" ).addClass( skinId );
$(".ui-jqgrid-btable" ).addClass( skinId );
$('.ui-jqgrid .ui-jqgrid-bdiv').css({ 'overflow-y': 'auto','overflow-x':'hidden' });
$('.ui-pg-input').css({'height':'19px','margin-bottom':'0px','margin-left':'0px','width':'35px'}); // 페이징 숫자 위치 조정
*/
//$('.ui-jqgrid .ui-jqgrid-bdiv').css({ 'overflow-y': 'auto','overflow-x':'hidden' });
//$('.ui-jqgrid-sortable').css({'height':'19px'}); // Table 헤더 정렬 변경
},
/**
* 그리드 Title 바 숨기기
* @param gridIds
*/
gridTitleBarHide : function(gridIds) {
$("#gview_"+gridIds+" > .ui-jqgrid-titlebar").hide();
},
/**
* 그리드 No Msg 초기화
* @param gridIds
*/
gridNoDataMsgInit : function(gridIds) {
$("#"+gridIds+"_nodata").remove();
},
/**
* 그리드 Pager 숨기기
* @param gridIds
*/
gridPagerHide : function(gridIds) {
$(".ui-pg-table",$('#'+gridIds).getGridParam('pager')).hide();
},
/**
* 그리드 레코드 없음 표시 공통처리
* @param divIds
* @param layerId
*/
gridNoRecords : function(divIds,layerId) {
$("#emptyPage").show();
$("#dataPage").hide();
exsoft.util.grid.gridPagerViewHide(divIds);
exsoft.util.grid.gridNoDataMsg(divIds,layerId);
exsoft.util.grid.gridPagerHide(divIds);
},
/**
* 그리드 레코드 수 보기 숨김
* @param gridIds
*/
gridPagerViewHide : function(gridIds) {
$(".ui-paging-info",$('#'+gridIds).getGridParam('pager')).html('');
},
/**
* 그리드 Pager 보이기
* @param gridIds
*/
gridPagerShow : function(gridIds) {
$(".ui-pg-table",$('#'+gridIds).getGridParam('pager')).show();
},
/**
* 그리드 레코드 있음 표시 공통처리
* @param divIds
*/
gridViewRecords : function(divIds) {
$("#emptyPage").hide();
$("#dataPage").show();
exsoft.util.grid.gridPagerViewHide(divIds);
exsoft.util.grid.gridPagerShow(divIds);
},
/**
* 그리드 내 페이징 처리
*
* @param gridId - 그리드 id
* @param newValue - 사용자가 입력한 페이지값
* @param pgButton - 페이지 버튼 객체
*/
onPager : function(gridId,newValue,pgButton) {
var requestedPage = $("#"+gridId).getGridParam("page");
var lastPage = $("#"+gridId).getGridParam("lastpage");
var rows = $('#'+gridId).closest('.ui-jqgrid').find('.ui-pg-selbox').val();
var nPage = 0;
if (parseInt(requestedPage) > parseInt(lastPage) || parseInt(newValue) > parseInt(lastPage) ) {
$("#"+gridId).setGridParam({page:lastPage,postData:{is_search:'false'}}).trigger("reloadGrid");
}
if (pgButton.indexOf("next") >= 0)
nPage = ++requestedPage;
else if (pgButton.indexOf("prev") >= 0)
nPage = --requestedPage;
else if (pgButton.indexOf("last") >= 0)
nPage = $("#"+gridId).getGridParam('lastpage');
else if (pgButton.indexOf("first") >= 0)
nPage = 1;
else if (pgButton.indexOf("user") >= 0)
nPage = newValue;
// 페이지 개수 재설정 및 reload
$("#"+gridId).setGridParam({rowNum:rows});
$("#"+gridId).setGridParam({page:nPage,postData:{is_search:'false',page_init:'false'}}).trigger("reloadGrid");
},
/**
* Grid 컬럼정렬 처리
* @param gridIds 그리드ID
* @param headerData 컬럼 JsonData
* @param align 정렬기준
*/
gridColumHeader : function(gridIds,headerData,align) {
var result = $.parseJSON(headerData);
$.each(result, function(key, value) {
$("#"+gridIds).jqGrid('setLabel',key,value,{'text-align':align});
});
},
/**
* 그리드 Edit 모드에서 rowid 개수 체크
* @param gridIds
*/
gridEditRowCnt : function(gridIds) {
var rowIDs = $("#"+gridIds).jqGrid('getDataIDs');
return rowIDs.length;
},
/**
* 목록 개수/크기 공통 Array
*/
listArraySize : function() {
var listSize = [5,10,15,20,30,50];
return listSize;
},
/**
* 그리드 자동 리사이즈 처리
* @param gridId - 그리드ID
* @param targetId - 레이어ID
* @param resizeHeight - 조정높이값 = 72
* @param resizeWidth - 조정넓이값 = 72
*/
gridResize : function(gridId,targetId,resizeHeight,resizeWidth) {
$(window).bind('resize', function() {
var width = jQuery("#"+targetId).width();
var height = jQuery("#"+targetId).height();
// CSS 변경에 따른 사이즈 조정처리
if (resizeWidth == undefined) {
$("#"+gridId).setGridWidth(width-2);
}else {
$("#"+gridId).setGridWidth(width-resizeWidth);
}
$("#"+gridId).setGridHeight(height-resizeHeight);
}).trigger('resize');
},
/**
* 자측메뉴 펼치기/숨기기 이벤트에 대한 그리드 자동 리사이즈 처리
* @param gridId - 그리드ID
* @param targetId - 레이어ID
* @param resizeHeight - 조정높이값 = 72
* @param resizeWidth - 조정넓이값 = 72
*/
gridIsLeftMenuResize : function(gridId,targetId,resizeHeight,resizeWidth){
var width = jQuery("#"+targetId).width();
var height = jQuery("#"+targetId).height();
// CSS 변경에 따른 사이즈 조정처리
$("#"+gridId).setGridWidth(width-resizeWidth);
$("#"+gridId).setGridHeight(height-resizeHeight);
},
/**
* 그리드 전체 체크box 해제
* @param gridIds
*/
gridCheckBoxInit : function(gridIds) {
$('#'+gridIds).jqGrid('resetSelection');
},
/**
* 그리드 체크된 row 지우기
* @param gridIds
* @param noDeleteRowId : 해당 grid rowId(값)는 지우지 않는다
* @param msg : 삭제 제외 메세지
* @param isDeleteRow : noDeleteRowId 값 row 삭제 여부
* @returns {Boolean}
*/
gridDeleteRow : function(gridIds, noDeleteRowId, msg, isDeleteRow) {
if(!this.gridSelectCheck(gridIds)) {
jAlert("삭제할 항목을 선택하세요.");
return false;
}
// 선택한 ROW 삭제 버그가 있어서 역순으로 처리
try {
var selectedRowIds = $("#"+gridIds).getGridParam('selarrrow');
for(var i=selectedRowIds.length-1; i>=0; i--){
if( !isDeleteRow && selectedRowIds[i] == noDeleteRowId){
$('#'+gridIds).jqGrid('setSelection',selectedRowIds[i],false); //checkbox 해제
}
else {
$("#"+gridIds).delRowData(selectedRowIds[i]);
}
}
}finally{
selectedRowIds = null;
}
},
/**
* Grid Refresh
* @param divId - 그리드ID
* @param urls - url 주소
*/
gridRefresh : function(divId,urls) {
$("#"+divId).jqGrid('setGridParam', {
url: urls,
datatype: "json"
}).trigger("reloadGrid");
},
/**
* Jqgrid excel download
* @param divIds
* @param urls
*/
excelDown : function(divIds,urls) {
$("#"+divIds).jqGrid('excelExport',{tag:'excel',url:urls});
},
/**
* Grid Refresh
* @param divId - 그리드ID
* @param urls - url 주소
* @param postData - 넘길data json 형식
*/
gridPostDataRefresh : function(divId,urls,postData) {
// postData값과 중복되지 않는 기존 postData 값은 변경되지 않는다.
$(exsoft.util.common.getIdFormat(divId)).jqGrid('setGridParam', {
url: urls,
datatype: "json",
postData: postData
}).trigger("reloadGrid");
},
/**
* Grid Refresh(postData 전체 초기화)
* @param divId - 그리드ID
* @param urls - url 주소
* @param postData - 넘길data json 형식
*/
gridPostDataInitRefresh : function(divId,urls,postData) {
// 기존 postData를 전체 초기화 시킨다
$(exsoft.util.common.getIdFormat(divId)).setGridParam ({postData: null});
$(exsoft.util.common.getIdFormat(divId)).jqGrid('setGridParam', {
url: urls,
datatype: "json",
postData: postData
}).trigger("reloadGrid");
},
/**
* 그리드 No Msg 보여주기
* @param gridIds
* @param classId - no_data : 메인 nolayer_data : 레이어Pop
*/
gridNoDataMsg : function(gridIds,classId) {
var noMessage = "<div id='"+gridIds+"_nodata' class='"+classId+"'>"+$('#'+gridIds).getGridParam('emptyDataText')+"</div>";
$(exsoft.util.common.getIdFormat(gridIds)).after(noMessage);
},
/**
* JQgrid 에서 File용량 계산하기
* @param bytes
* @returns {String}
*/
bytes2Size : function(bytes) {
var sizes = ['B', 'KB', 'MB', 'GB', 'TB'];
if (bytes == 0) return '0B';
else if(bytes == -1) return '-1';
var i = parseInt(Math.floor(Math.log(bytes) / Math.log(1024)));
var size = (i<2) ? Math.round((bytes / Math.pow(1024, i))) : Math.round((bytes / Math.pow(1024, i)) * 100)/100;
return size + ' ' + sizes[i];
},
/**
* JqGrid 페이징처리 : 1차고도화 디자인 적용
* @param gridId
* @param data
* @returns {String}
*/
gridPager : function(gridPagerId,data){
$(gridPagerId).empty();
return $(gridPagerId).append(data.pagingInfo.strLinkPagePrev)
.append(data.pagingInfo.strLinkPageList)
.append(data.pagingInfo.strLinkPageNext);
},
/**
* AclItem 기본권한 영문 -> 한글
*/
getAclItemTitle : function(en_title) {
switch(en_title) {
case "DELETE" : return "삭제"; break;
case "UPDATE" : return "수정"; break;
case "READ" : return "조회"; break;
case "BROWSE" : return "목록"; break;
case "NONE" : return "권한없음"; break;
default : return "권한없음"; break;
}
},
/**
* 그리드 체크박스 선택여부
* @param gridIds 그리드 ID
*/
gridSelectCheck : function(gridIds) {
var list = $("#"+gridIds).getGridParam('selarrrow');
if(list.length == 0) {
return false;
}
return true;
},
/**
* grid row_id에 해당하는 특정 cell 값을 비교
* @param gridIds : grid divID
* @param row_id : 비교 대상 row의 id
* @param row_name : 비교 대상 row의 특정 cell name
* @param compare_value : 비교 대상 row의 특정 cell값과 비교할 값
* @returns {Boolean}
*/
gridIsRowDataExist : function(gridIds, row_id, row_name, compare_value){
// 주의 : row_name이 변수인 관계로 chaining 방식이 아닌 배열 호출 방식으로 값을 불러 온다
if( $(exsoft.util.common.getIdFormat(gridIds)).getRowData(row_id)[row_name] == compare_value)
return true;
else
return false;
},
/**
* 그리드 체크된 컬럼ID값을 가져온다.
* @param gridIds 그리드 ID
* @param param 그리드 컬럼ID
* @returns {String}
*/
gridSelectData : function(gridIds,param) {
var result = "";
var list = $("#"+gridIds).getGridParam('selarrrow');
for (var j = 0; j < list.length; j++) {
var rowdata = $("#"+gridIds).getRowData(list[j]);
result += rowdata[param];
// 마지막 원소일 경우 ','를 추가하지 않는다
if ((j + 1) != list.length) {
result += ",";
}
}
return result;
},
/**
*
* @param gridIds
* @param rowId
* @returns {String} 1:edited , 0:not edited
*/
gridEditMode : function(gridIds,rowId) {
var edited = "0";
var ind = $("#"+gridIds).getInd(rowId,true);
if(ind != false){
edited = $(ind).attr("editable");
}
return edited;
},
/**
* Tree그리드 헤더내 체크박스 선택
* @param e
* @param id
* @param name
*/
gridAllCheck : function(e,id,name) {
// grid colum header exception
e = e || event;
e.stopPropagation ? e.stopPropagation() : e.cancelBubble = true;
var check = $('#'+id).is(':checked');
$('input[name='+ name +']').each(function() {
if($(this).attr("disabled") != "disabled") {
this.checked = check;
}
});
},
gridAllCheckFF : function(id,name) {
var check = $('#'+id).is(':checked');
$('input[name='+ name +']').each(function() {
if($(this).attr("disabled") != "disabled") {
this.checked = check;
}
});
},
/**
* 접근권한 라디오 선택 처리.
* @param level - 메뉴레벨
* @param rowId - 메뉴아이디
* @param cellValue - 접근권한(전사/하위부서포함/소속부서)
*/
radioFormatter : function(level,rowId,cellValue,type) {
var disableOpt = "";
var returnVal = "";
if(level == 0){ disableOpt = "disabled"; }
switch(type) {
case 'ALL' :
if(cellValue == "ALL") {
returnVal = '<input type="radio" ' + disableOpt + ' onclick="javascript:exsoft.util.grid.checkBoxChanged(\''+ rowId +'\',event);" value="ALL" name="radio_' + rowId + '" checked />';
}else {
returnVal = '<input type="radio" ' + disableOpt + ' onclick="javascript:exsoft.util.grid.checkBoxChanged(\''+ rowId +'\',event);" value="ALL" name="radio_' + rowId + '"/>';
}
break;
case 'GROUP' :
if(cellValue == "GROUP") {
returnVal = '<input type="radio" ' + disableOpt + ' onclick="javascript:exsoft.util.grid.checkBoxChanged(\''+ rowId +'\',event);" value="GROUP" name="radio_' + rowId + '" checked />';
}else {
returnVal = '<input type="radio" ' + disableOpt + ' onclick="javascript:exsoft.util.grid.checkBoxChanged(\''+ rowId +'\',event);" value="GROUP" name="radio_' + rowId + '"/>';
}
break;
case 'TEAM' :
if(cellValue == "TEAM") {
return '<input type="radio" ' + disableOpt + ' onclick="javascript:exsoft.util.grid.checkBoxChanged(\''+ rowId +'\',event);" value="TEAM" name="radio_' + rowId + '" checked />';
}else {
return '<input type="radio" ' + disableOpt + ' onclick="javascript:exsoft.util.grid.checkBoxChanged(\''+ rowId +'\',event);" value="TEAM" name="radio_' + rowId + '"/>';
}
break;
}
return returnVal;
},
// 라디오버튼 선택처리
checkBoxChanged : function(menu_cd,e) {
e = e || event;
e.stopPropagation ? e.stopPropagation() : e.cancelBubble = true;
$("input:checkbox[name='idx']").each(function() {
var str = $(this).val();
if(str.indexOf(menu_cd) != -1) {
$(this).prop("checked",true);
}
});
},
/**
* treeGrid 체크박스 선택헤제 처리
* @param e
* @param checkBox
*/
treeGridChckAllRelease : function(e,checkBox) {
e = e || event;
e.stopPropagation ? e.stopPropagation() : e.cancelBubble = true;
$("#"+checkBox).attr('checked',false);
},
/**
* treeGrid 체크박스 선택헤제 처리
*/
treeGridChckAllReleaseFF : function(checkBox) {
$("#"+checkBox).attr('checked',false);
},
// CheckBox Browser Bug Fix
checkBox : function(e) {
e = e||event;/* get IE event ( not passed ) */
e.stopPropagation? e.stopPropagation() : e.cancelBubble = true;
},
/**
* 그리드 전체 Row의 컬럼ID값을 가져온다.
* @param gridIds 그리드 ID
* @param param 그리드 컬럼ID
* @returns {String}
*/
gridSelectDataAllRow : function(gridIds,param) {
var result = "";
var list = $("#"+gridIds).jqGrid('getDataIDs');
for (var j = 0; j < list.length; j++) {
var rowdata = $("#"+gridIds).getRowData(list[j]);
result += rowdata[param];
// 마지막 원소일 경우 ','를 추가하지 않는다
if ((j + 1) != list.length) {
result += ",";
}
}
return result;
},
/**
* selectAccessorWindow 공통 callback 합수
* grid에 선택한 접근자를 추가 한다.
* @param aclItemRowList
*/
gridSetAclItemAddCallback : function(gridIds, aclItemRowList) {
var aclItemIdList = exsoft.util.grid.gridSelectArrayDataAllRow(gridIds, "accessor_id", "accessor_id");
// 선택된 접근자 목록을 추가함
$(aclItemRowList).each(function() {
// 중복데이터 필터링
var isDuplicate = false;
var row = this;
$(aclItemIdList).each(function(i) {
if (this.accessor_id == row.accessor_id)
isDuplicate = true;
})
if (!isDuplicate) {
$("#"+gridIds).jqGrid("addRowData", this.accessor_id, this);
$('#'+gridIds).editRow(this.accessor_id,true);
}
});
},
/**
* 그리드 전체 컬럼ID값을 가져온다.
* @param gridIds 그리드 ID
* @param param 그리드 컬럼ID
* @returns {Array}
*/
gridSelectArrayDataAllRow : function(gridIds,param,key) {
var jsonArr = [];
var jsonArrIndex = 0;
var list = $("#"+gridIds).jqGrid('getDataIDs');
for (var j = 0; j < list.length; j++) {
var result = {};
var rowdata = $("#"+gridIds).getRowData(list[j]);
result[key] = rowdata[param];
if(result[key]) {
jsonArr[jsonArrIndex] = result;
jsonArrIndex++;
}
}
try {
return jsonArr;
}finally{
result = null;
rowdata = null;
jsonArr = null;
}
},
}; // exsoft.util.grid end...
exsoft.util.grid.prototype = {
}; // exsoft.util.grid.prototype end...
/***********************************************
* filter
**********************************************/
/**
* filter 관련 util
*
* @namespace : exsoft.util.filter
*
*/
exsoft.util.filter = {
////////// Filter 관련 //////////
/**
* 입력창 필터함수
* @param filter - filterKey함수의 매개변수로 [0-9], [a-z], [A-Z]를 넣어주면 각각 숫자만 영문소문자, 영문대문자만 입력가능하게 한다.
*/
inputBoxFilter : function(filter) {
if(filter) {
var sKey = String.fromCharCode(event.keyCode);
var re = new RegExp(filter);
if(!re.test(sKey)) return false;
}
return true;
},
/**
* 숫자만 입력되게 처리 - IE를 제외한 Browser 처리
* @param val
* @returns
*/
numLine : function() {
$('.numline').numeric();
$('.numline').css("ime-mode","disabled");
// IE Browser 제외
$('.numline').keyup(function(event) {
this.value=this.value.replace(/[^0-9]/g,'')
});
$('.numline').focusout(function(event) {
this.value=this.value.replace(/[^0-9]/g,'')
});
},
/**
* 입력창 최대값 처리
*/
maxNumber : function() {
$(":input").keyup(function(){
var inputLength = 0;
var strTemp = "";
var input = 0;
// 한글(2Byte),영어(1Byte) 공통 적용 로직
var lengthCnt = $(this).val().length; // 속도를 위해 for문에 선언 안함
for(var i=0;i<lengthCnt;i++) {
if (escape($(this).val().charAt(i)).length >= 4) {
inputLength += 2;
}else if (escape($(this).val().charAt(i)) != "%0D") {
inputLength++;
}
// maxlength 비교처리
if (inputLength > $(this).attr('maxlength')) {
strTemp = $(this).val().substr(0,input);
break;
}
input++;
}
if(strTemp.length > 0) {
$(this).prop('value',strTemp);
$(this).focus();
}
});
},
/**
* 숫자만 체크
* @param e
* @usage onKeyPress="exsoft.util.filter.numInput(event);"
*/
numInput : function(e) {
event = event || window.event;
var keyID = (event.which) ? event.which : event.keyCode;
if( ( keyID >=48 && keyID <= 57 ) || ( keyID >=96 && keyID <= 105 )
|| keyID == 8 || keyID == 46 || keyID == 37 || keyID == 39 ){
return;
}else{
return false;
}
},
}; // exsoft.util.filter end...
exsoft.util.filter.prototype = {
}; // exsoft.util.filter.prototype end...
/***********************************************
* error
**********************************************/
/**
* error 관련 util
*
* @namespace : exsoft.util.error
*
*/
exsoft.util.error = {
////////// Error Handling //////////
/**
* String To Xml Object
* @param text
*/
stringToXml : function(text) {
var doc = "";
if (window.ActiveXObject){
doc=new ActiveXObject('Microsoft.XMLDOM');
doc.async='false';
doc.loadXML(text);
} else {
var parser=new DOMParser();
doc=parser.parseFromString(text,'text/xml');
}
return doc;
},
/**
* 세션에러 처리
* @param response
* @returns {Boolean}
*/
isErrorChk : function(response){
var xmlDoc = exsoft.util.error.stringToXml(response.responseText);
if(xmlDoc != null && xmlDoc.getElementsByTagName("RESPONSES").length != 0) {
var kk = xmlDoc.getElementsByTagName("RESULT")[0].childNodes[0].nodeValue;
if(kk == "common.session.error"){
alert(xmlDoc.getElementsByTagName("MESSAGE")[0].childNodes[0].nodeValue);
top.location.href = "/";
return false;
}else {
alert(xmlDoc.getElementsByTagName("MESSAGE")[0].childNodes[0].nodeValue);
return false;
}
}
},
/**
* 세션에러 처리
* @param response
* @returns {Boolean}
*/
isErrorPopChk : function(response){
var xmlDoc = exsoft.util.error.stringToXml(response.responseText);
if(xmlDoc != null && xmlDoc.getElementsByTagName("RESPONSES").length != 0) {
var kk = xmlDoc.getElementsByTagName("RESULT")[0].childNodes[0].nodeValue;
if(kk == "common.session.error"){
alert(xmlDoc.getElementsByTagName("MESSAGE")[0].childNodes[0].nodeValue);
exsoft.util.layout.windowClose();
return false;
}else {
alert(xmlDoc.getElementsByTagName("MESSAGE")[0].childNodes[0].nodeValue);
return false;
}
}
},
}; // exsoft.util.error end...
exsoft.util.error.prototype = {
}; // exsoft.util.error.prototype end...
/***********************************************
* table
**********************************************/
/**
* table 공통 함수
*
* @namespace : exsoft.util.table
*
*/
exsoft.util.table = {
/**
* AclItem 기본권한 영문 -> 한글
*/
getAclItemTitle : function(en_title) {
switch(en_title) {
case "DELETE" : return "삭제";
case "UPDATE" : return "수정";
case "READ" : return "조회";
case "BROWSE" : return "목록";
case "NONE" : return "없음";
}
},
getAclImg : function(aclItem) {
switch(aclItem) {
case "DELETE" : return "d";
case "UPDATE" : return "u";
case "READ" : return "r";
case "BROWSE" : return "r";
case "NONE" : return "r";
}
},
getAclCheckerImg : function(aclItem) {
if (aclItem == "T") return "";
else return "no";
},
/**
* Table : <thead> 제외한 Row를 모두 삭제한다
*/
tableRemoveAll : function(tableId) {
$(exsoft.util.common.getIdFormat(tableId) + " tbody").empty();
},
/**
* Table : Row를 추가한다
*/
tableAddRow : function(tableId) {
var trModel = $(exsoft.util.common.getIdFormat(tableId) + " thead tr:eq(0)").clone();
// 첫행이 숨김처리 되있을 경우 추가한 Row를 보이도록 처리함
if (!trModel.is(":visible")) {
trModel.css("display","");
}
// th일 경우 td로 변환
trModel.children("th").replaceWith(function(i, html) {
return "<td>" + html + "</td>";
});
// tr, td css remove
trModel.removeClass();
trModel.find("td").removeClass();
$(exsoft.util.common.getIdFormat(tableId)+' tbody').append(trModel);
},
/**
* Table : 데이터를 테이블 목록에 출력한다
* @param tableId : data를 표시 할 table의 ID
* @param data - 테이블 목록
* @param isCheckBox - 체크박스 표시 여부
* @param isRemoveAll - body의 전체 항목을 지우는 옵션
*
*/
tablePrintList : function(tableId, data, isCheckBox, isRemoveAll) {
// 모든 Row를 삭제함
if(isRemoveAll)
exsoft.util.table.tableRemoveAll(tableId);
// 출력할 데이터의 Row를 추가하고 값을 설정한다.
$(data).each(function(index){
exsoft.util.table.tableAddRow(tableId);
var tdCols = $(exsoft.util.common.getIdFormat(tableId) + " thead tr:eq(0) th");
$(tdCols).each(function(tdIndex) {
var colName = $(this).attr("name");
if(colName != null) {
if(isCheckBox && tdIndex == 0){
//<input type="checkbox" id="\'{0}\'" name="\'{0}\'">
$(exsoft.util.common.getIdFormat(tableId) + " tr:last td:eq(" + tdIndex + ")").html('<input type="checkbox" id="{0}" name="{1}">'.format(tableId+'_'+data[index][colName], tableId+'_'+colName));
} else {
$(exsoft.util.common.getIdFormat(tableId) + " tr:last td:eq(" + tdIndex + ")").html(data[index][colName]);
}
}
});
});
},
/**
* Table : 데이터를 테이블 목록에 출력한다
* @param tableId : data를 표시 할 table의 ID
* @param data - 테이블 목록
* @param isCheckBox - 체크박스 표시 여부
* @param isRemoveAll - body의 전체 항목을 지우는 옵션
*
*/
tablePrintMainList : function(tableId, data, isCheckBox, isRemoveAll,isExtension,contextRoot) {
// 모든 Row를 삭제함
if(isRemoveAll)
exsoft.util.table.tableRemoveAll(tableId);
// 출력할 데이터의 Row를 추가하고 값을 설정한다.
$(data).each(function(index){
exsoft.util.table.tableAddRow(tableId);
var tdCols = $(exsoft.util.common.getIdFormat(tableId) + " thead tr:eq(0) th");
$(tdCols).each(function(tdIndex) {
var colName = $(this).attr("name");
if(colName != null) {
if(isCheckBox && tdIndex == 0){
//<input type="checkbox" id="\'{0}\'" name="\'{0}\'">
$(exsoft.util.common.getIdFormat(tableId) + " tr:last td:eq(" + tdIndex + ")").html('<input type="checkbox" id="{0}" name="{1}">'.format(tableId+'_'+data[index][colName], tableId+'_'+colName));
} else {
if(colName == "doc_name_limit" && isExtension) {
var imgsrc = "<img src='"+contextRoot+data[index]['page_extension_img']+"'>";
$(exsoft.util.common.getIdFormat(tableId) + " tr:last td:eq(" + tdIndex + ")").html(imgsrc + data[index][colName]);
}else {
$(exsoft.util.common.getIdFormat(tableId) + " tr:last td:eq(" + tdIndex + ")").html(data[index][colName]);
}
}
}
});
});
},
/**
* Table : 데이터를 테이블 목록에 출력한다
* @param tableId : data를 표시 할 table의 ID
* @param data - 테이블 목록
* @param isCheckBox - 체크박스 표시 여부
* @param isRemoveAll - body의 전체 항목을 지우는 옵션
*
*/
tablePrintNoteList : function(tableId, data, isCheckBox, isRemoveAll) {
// 모든 Row를 삭제함
if(isRemoveAll)
exsoft.util.table.tableRemoveAll(tableId);
// 출력할 데이터의 Row를 추가하고 값을 설정한다.
$(data).each(function(index){
if(index == 11) return;
exsoft.util.table.tableAddRow(tableId);
var tdCols = $(exsoft.util.common.getIdFormat(tableId) + " thead tr:eq(0) th");
$(tdCols).each(function(tdIndex) {
var colName = $(this).attr("name");
if(colName != null) {
if(isCheckBox && tdIndex == 0){
$(exsoft.util.common.getIdFormat(tableId) + " tr:last td:eq(" + tdIndex + ")").html('<input type="checkbox" id="{0}" name="{1}">'.format(tableId+'_'+data[index][colName], tableId+'_'+colName));
} else {
$(exsoft.util.common.getIdFormat(tableId) + " tr:last td:eq(" + tdIndex + ")").html(data[index][colName]);
}
}
});
});
},
/**
* Table : 문서에 대한 AclItem table list
* @param tableId : data를 표시 할 table의 ID
* @param aclItemList - 권한 접근자 목록
*
*/
tableDocumentAclItemPrintList : function(tableId, aclItemList) {
var tableId = exsoft.util.common.getIdFormat(tableId);
// 헤더를 제외한 모든 Row를 삭제함
exsoft.util.table.tableRemoveAll(tableId);
var rowSpanVal = aclItemList == null || aclItemList.length == 0 ? 2 : aclItemList.length+1;
// thead와 tbody의 tr끼리는 rowspan을 할 수 없음
/* 레이아웃 변경에 의한 주석처리
$(tableId + " thead tr:eq(0)").addClass('hide');
var trHead = $(tableId + " thead tr:eq(0)").clone();
trHead.removeClass('hide');
trHead.children('th:eq(0)').attr("rowspan", rowSpanVal);
$(tableId+' tbody').append(trHead);
*/
$(aclItemList).each(function(index){
var trModel = '<tr>';
trModel += "<td>{0}</td>".format(this.accessor_name);
trModel += "<td><img src='{0}/img/icon/prev_{1}.png' class='auth_grade'>{2}</td>".format(exsoft.contextRoot, exsoft.util.table.getAclImg(this.doc_default_acl), exsoft.util.table.getAclItemTitle(this.doc_default_acl));
trModel += "<td><img src='{0}/img/icon/auth_{1}pass.png'></td>".format(exsoft.contextRoot, exsoft.util.table.getAclCheckerImg(this.doc_act_create));
trModel += "<td><img src='{0}/img/icon/auth_{1}pass.png'></td>".format(exsoft.contextRoot, exsoft.util.table.getAclCheckerImg(this.doc_act_cancel_checkout));
trModel += "<td><img src='{0}/img/icon/auth_{1}pass.png'></td>".format(exsoft.contextRoot, exsoft.util.table.getAclCheckerImg(this.doc_act_change_permission));
trModel += '</tr>';
$(tableId+' tbody').append(trModel);
});
//추가 접근자만 해당
if(aclItemList == null || aclItemList.length == 0){
//레이아웃 변경에 따른 주석처리
//trHead.children('th:eq(0)').attr("rowspan", rowSpanVal);
$(tableId+' tbody').append("<tr><td colspan='5'>데이타가 없습니다.</td></tr>");
}else{
}
},
/**
* Table : 폴더에 대한 AclItem table list
* @param tableId : data를 표시 할 table의 ID
* @param aclItemList - 권한 접근자 목록
*
*/
tableFolderAclItemPrintList : function(tableId, aclItemList) {
var tableId = exsoft.util.common.getIdFormat(tableId);
// 헤더를 제외한 모든 Row를 삭제함
exsoft.util.table.tableRemoveAll(tableId);
var tableStr = "";
$(aclItemList).each(function(idx) {
tableStr += "<tr>";
tableStr += "<td>{0}</td>".format(this.accessor_name);
tableStr += "<td><img src='{0}/img/icon/prev_{1}.png' class='auth_grade'>{2}</td>".format(exsoft.contextRoot, exsoft.util.table.getAclImg(this.fol_default_acl), exsoft.util.table.getAclItemTitle(this.fol_default_acl));
tableStr += "<td><img src='{0}/img/icon/auth_{1}pass.png'></td>".format(exsoft.contextRoot, exsoft.util.table.getAclCheckerImg(this.fol_act_create));
tableStr += "<td><img src='{0}/img/icon/auth_{1}pass.png'></td>".format(exsoft.contextRoot, exsoft.util.table.getAclCheckerImg(this.fol_act_change_permission));
tableStr += "<td><img src='{0}/img/icon/prev_{1}.png' class='auth_grade'>{2}</td>".format(exsoft.contextRoot, exsoft.util.table.getAclImg(this.doc_default_acl), exsoft.util.table.getAclItemTitle(this.doc_default_acl));
tableStr += "<td><img src='{0}/img/icon/auth_{1}pass.png'></td>".format(exsoft.contextRoot, exsoft.util.table.getAclCheckerImg(this.doc_act_create));
tableStr += "<td><img src='{0}/img/icon/auth_{1}pass.png'></td>".format(exsoft.contextRoot, exsoft.util.table.getAclCheckerImg(this.doc_act_cancel_checkout));
tableStr += "<td><img src='{0}/img/icon/auth_{1}pass.png'></td>".format(exsoft.contextRoot, exsoft.util.table.getAclCheckerImg(this.doc_act_change_permission));
tableStr += "</tr>";
});
$(tableId + ' tbody').append(tableStr);
},
/**
* 확장문서 속성 표시 :: 기존 base.extendTypePrint
* @param tableId - 확장문서유형 속성을 표시할 table의 ID
* @param attrItemList - 확장문서유형 속성 목록
* @param type - 등록:C 수정:U 보기:V
*/
tableExtendTypeItemPrintList : function(tableId, attrItemList, type){
var tableId = exsoft.util.common.getIdFormat(tableId);
var buffer = "";
var defaultVal = "";
var splitVal = new Array();
// 헤더를 제외한 모든 Row를 삭제함
exsoft.util.table.tableRemoveAll(tableId);
$(attrItemList).each(function(index){
buffer += "<tr>";
buffer += "<th>"+this.attr_name+"</th>";
buffer += "<td colspan='3'>";
// XR_ATTRITEM 이 존재하는 경우
if(this.has_item == "T") {
// SELECT/RADIO/CHECK/INPUT
if(this.display_type == "SELECT" && type !='V') {
buffer += "<select name='"+this.attr_id+"'>";
}
// XR_ATTRITEM 파싱처리
$(this.item_list).each(function(idx){
// 문서등록/수정/보기에 따른 기본값 변경처리
if(type == "C") {
defaultVal = attrItemList[index].default_item_index;
}else {
defaultVal = attrItemList[index].attr_value;
}
if(type != 'V') {
if(attrItemList[index].display_type == "RADIO") {
if(this.item_index == defaultVal) {
buffer += "<input type='radio' name='"+this.attr_id+"' value='"+this.item_index+"' checked /><span class='radio'>" + this.item_name+" </span>";
}else {
buffer += "<input type='radio' name='"+this.attr_id+"' value='"+this.item_index+"' /><span class='radio'>" + this.item_name+" </span>";
}
}else if(attrItemList[index].display_type == "SELECT") {
if(this.item_index == defaultVal) {
buffer += "<option value='"+this.item_index+"' selected>"+this.item_name+"</option>";
}else {
buffer += "<option value='"+this.item_index+"'>"+this.item_name+"</option>";
}
}else if(attrItemList[index].display_type == "CHECK") {
if(type =="C") {
if(this.item_index == defaultVal) {
buffer += "<input type='checkbox' name='"+this.attr_id+"' value='"+this.item_index+"' checked/><span class='checkbox'>"+this.item_name+" </span>";
}else {
buffer += "<input type='checkbox' name='"+this.attr_id+"' value='"+this.item_index+"'/><span class='checkbox'>"+this.item_name+" </span>";
}
}
else {
if(defaultVal != -1 && defaultVal.indexOf(this.item_index) != -1) {
buffer += "<input type='checkbox' name='"+this.attr_id+"' value='"+this.item_index+"' checked/><span class='checkbox'>"+this.item_name+" </span>";
}else {
buffer += "<input type='checkbox' name='"+this.attr_id+"' value='"+this.item_index+"'/><span class='checkbox'>"+this.item_name+" </span>";
}
}
}
}else {
if(this.item_index == defaultVal) {
buffer += "<input type='text' name='"+this.item_index+"' value='"+this.item_name+"' size='60' maxlength='100' readonly/>";
}
}
defaultVal = "";
splitVal = new Array();
});
if(attrItemList[index].display_type == "SELECT" && type !='V') {
buffer += "</select>";
}
}else {
// INPUT
if(type != 'V') {
buffer += "<input type='text' name='"+this.attr_id+"' value='"+this.attr_value+"' size='60' maxlength='100'/>";
}else {
buffer += "<input type='text' name='"+this.attr_id+"' value='"+this.attr_value+"' size='60' maxlength='100' readonly/>";
}
}
buffer += "</td>";
buffer += "</tr>";
});
$(tableId+' tbody').append(buffer);
},
}; // exsoft.util.table end...
exsoft.util.table.prototype = {
}; // exsoft.util.table.prototype end...
/***********************************************
* websocket
**********************************************/
/**
* agent를 통한 통신
*
* @namespace : exsoft.util.websocket
*
*/
exsoft.util.websocket = {
connect : function(userId, callback){
try{
socket = new WebSocket(exsoft.util.websocket.prototype.host);
socket.onopen = function(){
socket.send('0'+ '|' + userId);
setTimeout(exsoft.util.websocket.keepAlive, 5000);
};
socket.onmessage = function(msg){
var str = msg.data;
var tokens = str.split('|');
if(tokens[0] == "1"){
if (callback != null) {
callback(tokens[1],tokens[2]); // action, result
}
};
};
socket.onclose = function(){};
} catch(exception){
alert('Error'+exception);
}
},
keepAlive : function() {
if (socket && socket.readyState==1) {
socket.send('0'+ '|PING');
setTimeout(keepAlive, 5000);
}
},
addToDownload : function(docId,pageId,fileName) {
// SERVICEID | CMD | DOCID1 * PAGEID1 * FILENAME1 ^ DOCID2 * PAGEID2 * FILENAME2
socket.send('1' + '|ADD_TO_DOWNLOAD' + "|" + docId +"*" + pageId + "*" + fileName );
},
checkout : function (docId,pageId,fileName,versionNo,isCurrent,ownerId) {
// SERVICEID | CMD | DOCID ^ PAGEID ^ FILENAME ^ VERSION ^ IS_CURRENT ^ OWNERID
socket.send('1' + '|CHECKOUT' + "|" + docId + "^" + pageId + "^" + fileName + "^" + versionNo + "^" + isCurrent +"^" + ownerId);
},
checkUser : function() {
// SERVICEID | CMD | DOCID1 * PAGEID1 * FILENAME1 ^ DOCID2 * PAGEID2 * FILENAME2
socket.send('1' + '|CHECK_USER' + "|DUMMY");
},
clearDownload : function() {
// SERVICEID | CMD | DOCID1 * PAGEID1 * FILENAME1 ^ DOCID2 * PAGEID2 * FILENAME2
socket.send('1' + '|CLEAR_DOWNLOAD' + "|DUMMY" );
},
doDownload : function() {
// SERVICEID | CMD | DOCID1 * PAGEID1 * FILENAME1 ^ DOCID2 * PAGEID2 * FILENAME2
socket.send('1' + '|DO_DOWNLOAD' + "|DUMMY" );
},
view : function(docId,pageId,fileName) {
// SERVICEID | CMD | DOCID ^ PAGEID ^ FILENAME ^ VERSION ^ IS_CURRENT ^ OWNERID
socket.send('1' + '|VIEW' + "|" + docId + "^" + pageId +"^" + fileName) ;
},
}; // exsoft.util.websocket end...
exsoft.util.websocket.prototype = {
host : 'ws://localhost:23232/EdmHelper', // 절대 수정하지 말 것. agent가 설정한 url 임
}; // exsoft.util.websocket.prototype end...
<file_sep>/EDMS3/src/kr/co/exsoft/quartz/controller/TempDocDelete.java
package kr.co.exsoft.quartz.controller;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import kr.co.exsoft.common.service.CommonService;
import kr.co.exsoft.eframework.configuration.Constant;
import kr.co.exsoft.eframework.util.ConfigData;
import kr.co.exsoft.quartz.service.QuartzService;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.quartz.JobExecutionContext;
/**
* 임시작업함 삭제처리 배치프로그램
*
* @author 패키지팀
* @since 2014. 10. 13.
* @version 1.0
*
*/
public class TempDocDelete extends QuartzJob {
protected static final Log logger = LogFactory.getLog(TempDocDelete.class);
protected SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss");
@Override
protected void executeJob(JobExecutionContext context){
CommonService commonService = (CommonService)super.getBean("commonService");
QuartzService quartzService = (QuartzService)super.getBean("quartzService");
List<HashMap<String,Object>> tempDocList = new ArrayList<HashMap<String,Object>>();
HashMap<String,Object> resultMap = new HashMap<String,Object>();
HashMap<String,Object> param1 = new HashMap<String,Object>();
long work_idx = 0;
long sTime = System.currentTimeMillis();
try {
logger.info("TempDocDelete START ="+df.format(sTime));
// 1. 로그등록처리
work_idx = commonService.commonNextVal(Constant.COUNTER_ID_BATCH_WORK);
quartzService.batchWorkWrite(work_idx,Constant.WORK_BATCH,Constant.BATCH_TEMP_DOC);
// 2.임시작업함 삭제대상 목록 가져오기
param1.put("decade", ConfigData.getInt("TEMP_DOC_DECADE"));
tempDocList = quartzService.tempDelDocList(param1);
// 3.임시작업함 목록 삭제처리
if(tempDocList != null && tempDocList.size() > 0) {
for(HashMap<String,Object> docInfo : tempDocList) {
HashMap<String,Object> param2 = new HashMap<String,Object>();
param2.put("root_id",docInfo.get("root_id"));
param2.put("user_id",docInfo.get("user_id"));
quartzService.tempDocDelete(param2);
}
resultMap.put("message","임시작업함 삭제건수 :: "+tempDocList.size());
}else {
resultMap.put("message","삭제 대상 목록이 없습니다.");
}
// 4. 로그결과처리
resultMap.put("work_state",Constant.T);
}catch(Exception e) {
logger.error(e.getMessage());
resultMap.put("work_state",Constant.F);
resultMap.put("message","EXCEPTION ERROR");
}finally{
Date now = new Date();
resultMap.put("work_idx",work_idx);
resultMap.put("work_edate",df.format(now));
try {
quartzService.batchWorkUpdate(resultMap);
}catch(Exception e){
logger.error(e.getMessage());
}
}
long eTime = System.currentTimeMillis();
logger.info("TempDocDelete END ="+df.format(eTime));
}
}
<file_sep>/EDMS3/src/kr/co/exsoft/eframework/library/LocaleLibrary.java
package kr.co.exsoft.eframework.library;
import java.util.Locale;
import org.springframework.web.servlet.i18n.SessionLocaleResolver;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
import kr.co.exsoft.eframework.configuration.Constant;
/**
* Locale 관련 클래스
* @author <NAME>
* @since 2014.07.15
* @version 3.0
*
*/
public class LocaleLibrary {
/**
* 로케일 변경
* @param request
* @param lang
*/
public void modifyLocale(HttpServletRequest request, String lang) {
HttpSession session = request.getSession();
if(lang.length() > 2) {
lang = lang.substring(0, 2);
}
if(lang.equals(Constant.JPN)) {
session.setAttribute(SessionLocaleResolver.LOCALE_SESSION_ATTRIBUTE_NAME, Locale.JAPAN);
} else if(lang.equals(Constant.ENG)) {
session.setAttribute(SessionLocaleResolver.LOCALE_SESSION_ATTRIBUTE_NAME, Locale.ENGLISH);
}else if(lang.equals(Constant.CHN)) {
session.setAttribute(SessionLocaleResolver.LOCALE_SESSION_ATTRIBUTE_NAME, Locale.CHINESE);
} else {
session.setAttribute(SessionLocaleResolver.LOCALE_SESSION_ATTRIBUTE_NAME, Locale.KOREAN);
}
}
/**
* 기본 로케일 설정
* @param lang
* @return Locale
*/
public static Locale setLocale(String lang) {
if(lang.length() > 2) {
lang = lang.substring(0, 2);
}
if(lang.equals(Constant.JPN)) {
Locale.setDefault(Locale.JAPAN);
} else if(lang.equals(Constant.ENG)) {
Locale.setDefault(Locale.ENGLISH);
}else if(lang.equals(Constant.CHN)) {
Locale.setDefault(Locale.CHINESE);
} else {
Locale.setDefault(Locale.KOREAN);
}
return Locale.getDefault();
}
/***
* locale sesson
* @param req
* @param lang
*/
public static void setLocaleInfo(HttpServletRequest req,String lang) {
HttpSession session = req.getSession(true);
if(lang.equals(Constant.JPN)) {
session.setAttribute(SessionLocaleResolver.LOCALE_SESSION_ATTRIBUTE_NAME, Locale.JAPAN);
} else if(lang.equals(Constant.ENG)) {
session.setAttribute(SessionLocaleResolver.LOCALE_SESSION_ATTRIBUTE_NAME, Locale.ENGLISH);
} else if(lang.equals(Constant.CHN)) {
session.setAttribute(SessionLocaleResolver.LOCALE_SESSION_ATTRIBUTE_NAME, Locale.CHINESE);
}else {
session.setAttribute(SessionLocaleResolver.LOCALE_SESSION_ATTRIBUTE_NAME, Locale.KOREAN);
}
}
}
<file_sep>/EDMS3/WebContent/js/process/processWrite.js
/**
* 협업 등록/수정 JavaScript
*/
var exsoftProcessWrite = {
binder : new DataBinder("#processWrite"),
coworkList : {},
wFileUploadJsonArr : new Array(),
actionType : "C",
defaultDocType : null,
// 0. 초기화
init : {
// 협업 등록
initProcessWrite : function(){
exsoftProcessWrite.binder.set("actionType", Constant.ACTION_CREATE);
exsoftProcessWrite.actionType = 'C'
// 문서유형 select-box
exsoftProcessWrite.init.initDdslick();
$("#processWrite").validation.clearShadowBox();
exsoft.util.table.tableDocumentAclItemPrintList('processWrite_acl', null);
exsoft.util.table.tableDocumentAclItemPrintList('processWrite_extAcl', null);
exsoftProcessWrite.wFileUploadJsonArr = new Array();
exsoftProcessWrite.coworkList = {};
//최근 업무 목록 표시
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback(null,exsoft.contextRoot + '/process/processRecentlyList.do',null,function(data, param){
var $ul = $('.coop_recent_wrap ul');
$ul.empty();
$(data.list).each(function(idx){
var name = this.name.length > 15 ? this.name.substr(0,15)+'...' : this.name;
var strHtml = '';
strHtml += '<li id="'+this.recently_id+'">';
strHtml += '<a href="javascript:exsoftProcessWrite.event.setInfoByRecent(\''+this.process_id+'\');">'+name+'</a>'
strHtml += '<a href="javascript:exsoftProcessWrite.event.deleteRecentRow(\''+this.recently_id+'\');" class="coop_recent_del"><img src="'+exsoft.contextRoot+'/img/icon/recent_doc_del.png"></a>'
strHtml += '</li>';
$ul.append(strHtml);
});
});
// 파일 관련
exsoft.common.file.init.initSettings('processfileuploader', exsoftProcessWrite.callback.fileupload);
// 초기화 작업
exsoftProcessWrite.binder.set("requestorName", exsoft.user.user_name);
exsoftProcessWrite.binder.set("requestorId", exsoft.user.user_id);
exsoftProcessWrite.binder.set("actionType", Constant.ACTION_CREATE);
// 문서유형 기본값으로 설정 한다.
if(exsoftProcessWrite.defaultDocType != null){
exsoftProcessWrite.doFunction.setExtendTypeAttrItem(exsoftProcessWrite.defaultDocType);
exsoftProcessWrite.binder.set("doc_type", exsoftProcessWrite.defaultDocType);
$('#processWrite_docType').ddslick('enable'); // 문서유형 selectbox 선택 가능하게 변경
}
exsoftProcessWrite.binder.bindingElement(); // data-bind 전체 bind
},
// 협업 수정
initProcessModify : function(){
exsoftProcessWrite.binder.set("actionType", Constant.ACTION_UPDATE);
exsoftProcessWrite.actionType = 'U';
},
/**
* select box :: ddslick 사용
*/
initDdslick : function(type){
//검색 selectbox
exsoft.util.common.ddslick('#processWrite_docType', 'srch_type1', 'doc_type', 85, function(divId, selectedData){
exsoftProcessWrite.binder.set("doc_type", selectedData.selectedData.value);
// 최초 default값 set :: 용도는 등록창 닫고 새로 열었을 경우 문서유형 기본값으로 변경
if(exsoftProcessWrite.defaultDocType == null){
exsoftProcessWrite.defaultDocType = selectedData.selectedData.value;
}
//문서유형에 맞는 확장 속성을 표시 한다.
exsoftProcessWrite.doFunction.setExtendTypeAttrItem(selectedData.selectedData.value);
}); // 문서유형 selectbox
},
},
// 1. 팝업
open : {
// 협업자 팝업
processCoworkWindow : function(){
exsoftProcessCoworkWindow.init.initProcessCoworkWindow(exsoftProcessWrite.coworkList, exsoftProcessWrite.callback.processCoworkWindow);
},
// 기본폴더 선택
selectFolderWindow : function() {
selectSingleFolderWindow.init(exsoftProcessWrite.callback.selectFolderWindow);
},
},
//2. layer + show
layer : {
},
//3. 닫기 + hide
close : {
layerClose : function(isFileDelete){
if(true){
// exRep ECM에 등록된 물리적 파일 삭제 대상으로 등록
exsoftProcessWrite.doFunction.deleteUploadFile()
}
// fileupload plug-in 목록 초기화
exsoft.common.file.prototype.wUploadObj.cancelAll();
exsoft.util.layout.divLayerClose('coop_register_wrapper','coop_register');
},
},
//4. 화면 이벤트 처리
event : {
// 최근 업무 등록 목록에서 선택 시 기본 정보 set
setInfoByRecent : function(process_id){
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback({process_id : process_id},exsoft.contextRoot + '/process/selectProcessRecently.do',null,function(data, param){
if(data.result == 'true'){
// 업무명
exsoftProcessWrite.binder.set("name", data.processName);
// 기본폴더
exsoftProcessWrite.binder.set("full_path", data.full_path);
exsoftProcessWrite.binder.set("folder_id", data.folder_id);
exsoftProcessWrite.binder.set("map_id", data.map_id);
exsoftProcessWrite.binder.set("acl_id", data.acl_id);
// 권한 셋팅
exsoftProcessWrite.doFunction.setAclItem(data.acl_id);
// 협업자 set
var coworkerObj = {};
coworkerObj.authorList = new Array();
coworkerObj.coauthorList = new Array();
coworkerObj.approverList = new Array();
coworkerObj.receiverList = new Array();
$(data.list).each(function(idx){
switch (this.type) {
case Constant.PROCESS.TYPE_AUTHOR:coworkerObj.authorList.push(this);break;
case Constant.PROCESS.TYPE_COAUTHOR:coworkerObj.coauthorList.push(this);break;
case Constant.PROCESS.TYPE_APPROVER:coworkerObj.approverList.push(this);break;
case Constant.PROCESS.TYPE_RECEIVER:coworkerObj.receiverList.push(this);break;
default:break;
}
});
exsoftProcessWrite.callback.processCoworkWindow(coworkerObj);
}else{
jAlert(data.message);
}
});
},
// 최근 업무 등록 목록 삭제
deleteRecentRow : function(recently_id){
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback({recently_id : recently_id},exsoft.contextRoot + '/common/deleteRecently.do',null,function(data, param){
if(data.result == 'true'){
$('.coop_recent_wrap ul').find(exsoft.util.common.getIdFormat(recently_id)).remove();
}else{
jAlert(data.message);
}
});
},
// 문서 권한 변경
chagneDocumentAcl : function() {
selectAclWindow.initDocument(""/*AclId*/, Constant.ACL.TYPE_DOC, "DOC000000033756"/*docId*/,exsoftProcessWrite.callback.selectAcl);
},
submit : function(){
console.log("[stephan][fileCounter]"+exsoft.common.file.prototype.wUploadObj.fileCounter);
console.info(exsoftProcessWrite.binder.getDataToJson());
if ($("#processWrite").validation()) {
/**********************************************************************
// fileCounter :: 업로드 영역에 있는 파일 수
// 1 : 첨부파일 없는 문서등록 처리
// 2이상 : 첨부파일 업로드 후 문서등록 처리
// upCounter :: 대상 파일 중 업로드 성공한 파일 수
**********************************************************************/
if(exsoft.common.file.prototype.wUploadObj.fileCounter == 1 ||
(exsoft.common.file.prototype.wUploadObj.fileCounter -1) == exsoft.common.file.prototype.wUploadObj.upCounter) {
// 1. 협업자 정보를 jsonArray에 담아서 서버 호출 한다.
exsoftProcessWrite.binder.setJsonArray("authorList", exsoftProcessWrite.coworkList.authorList);
exsoftProcessWrite.binder.setJsonArray("coauthorList", exsoftProcessWrite.coworkList.coauthorList);
exsoftProcessWrite.binder.setJsonArray("approverList", exsoftProcessWrite.coworkList.approverList);
exsoftProcessWrite.binder.setJsonArray("receiverList", exsoftProcessWrite.coworkList.receiverList);
var jsonObject = exsoftProcessWrite.binder.getDataToJson();
jsonObject.fileList = JSON.stringify(exsoftProcessWrite.wFileUploadJsonArr);
jsonObject.page_cnt = exsoft.common.file.prototype.wUploadObj.fileCounter - 1;
console.info(exsoftProcessWrite.binder.getDataToJson());
exsoft.util.ajax.ajaxDataFunctionWithCallback(jsonObject ,exsoft.contextRoot + '/process/processControl.do',null,function(data, param){
if(data.result == 'true'){
jAlert(data.message);
exsoftProcessWrite.close.layerClose(false);
}else{
jAlert(data.message);
}
});
}else {
// 대상 파일을 업로드 한다.
$("#loading_message").show();
exsoft.common.file.prototype.wUploadObj.startUpload();
}
} else {
jAlert("validation 실패");
}
},
},
//5. 화면 UI 변경 처리
ui : {
getWorkerList : function(bindName, arrayObj){
var textList = '';
$(arrayObj).each(function(index){
textList += this.user_nm+';';
});
exsoftProcessWrite.binder.set(bindName, textList);
}
},
//6. callback 처리
callback : {
// 협업자 목록 set
processCoworkWindow : function(coworkerObj){
exsoftProcessWrite.coworkList.authorList = coworkerObj.authorList;
exsoftProcessWrite.coworkList.coauthorList = coworkerObj.coauthorList;
exsoftProcessWrite.coworkList.approverList = coworkerObj.approverList;
exsoftProcessWrite.coworkList.receiverList = coworkerObj.receiverList;
//협업자 셋팅 필요
exsoftProcessWrite.ui.getWorkerList('coworkAuthor', exsoftProcessWrite.coworkList.authorList);
exsoftProcessWrite.ui.getWorkerList('coworkCoauthor', exsoftProcessWrite.coworkList.coauthorList);
exsoftProcessWrite.ui.getWorkerList('coworkApprover', exsoftProcessWrite.coworkList.approverList);
exsoftProcessWrite.ui.getWorkerList('coworkReceiver', exsoftProcessWrite.coworkList.receiverList);
},
// 기본폴더 set
selectFolderWindow : function(nodeInfo) {
console.info(nodeInfo);
exsoftProcessWrite.binder.set("full_path", nodeInfo.full_path.join("/"));
exsoftProcessWrite.binder.set("folder_id", nodeInfo.id);
exsoftProcessWrite.binder.set("map_id", nodeInfo.mapId);
exsoftProcessWrite.binder.set("acl_id", nodeInfo.original.acl_id);
// 문서유형 set
if(nodeInfo.original.is_type == 'ALL_TYPE'){
$('#processWrite_docType').ddslick('enable');
}else{
$('#processWrite_docType').ddslick('disable');
exsoftProcessWrite.binder.set("doc_type", nodeInfo.original.is_type);
//문서유형에 맞는 확장 속성을 표시 한다.
exsoftProcessWrite.doFunction.setExtendTypeAttrItem(nodeInfo.original.is_type);
}
// 권한 셋팅
exsoftProcessWrite.doFunction.setAclItem(nodeInfo.original.acl_id);
},
// 파일 처리 callback
fileupload : function(files,data,xhr){
exsoftProcessWrite.doFunction.setUploadFile(data);
// 전체 파일이 올라 갔을 경우
if((exsoft.common.file.prototype.wUploadObj.fileCounter -1) == exsoft.common.file.prototype.wUploadObj.upCounter) {
exsoftProcessWrite.event.submit();
}
},
// 폴더 선택 콜백
selectAcl : function(aclInfo) {
console.log("selectAclCallback");
console.log(aclInfo);
/*
* aclInfo 상세
* .aclDetail [object]
* .aclId [var]
* .aclItems [list]
* .exAclItems [list]
*
* 콘솔로그 확인 후 필요한거 쓰시면 됩니다.
*/
}
},
// 7. 내부 함수 처리
doFunction : {
// exRep ECM에 파일 등록을 성공하였을 경우 후 처리
setUploadFile : function(data){
// 파일 업로드 성공 처리 :: 성공한 파일개수 증가 및 성공 값 array 담기
exsoftProcessWrite.wFileUploadJsonArr.push({orgFile:data.orgFile,contentPath:data.contentPath,fileSize:data.fileSize,volumeId:data.volumeId});
exsoft.common.file.prototype.wUploadObj.upCounter += 1;
},
// 등록 취소 시 기존에 등록한 파일을 삭제 한다.
deleteUploadFile : function(){
console.log('[stephan][exsoftProcessWrite.wFileUploadJsonArr.length] : '+exsoftProcessWrite.wFileUploadJsonArr.length);
if(exsoftProcessWrite.wFileUploadJsonArr.length >0){
var jsonObject = {"fileList":JSON.stringify(exsoftProcessWrite.wFileUploadJsonArr)};
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback(jsonObject, exsoft.contextRoot+"/common/fileDelete.do", null, function(){})
}
},
// 폴더 기본 권한 set
setAclItem : function(acl_id){
exsoft.util.ajax.ajaxDataFunctionWithCallback({"acl_id" : acl_id}, exsoft.contextRoot+"/permission/aclItemList.do", "", function(data, acl_id) {
// 기본 접근자
exsoft.util.table.tableDocumentAclItemPrintList('processWrite_acl', data.list);
})
},
// 문서 유형에 따른 확장속성 표시 click.ddslick
setExtendTypeAttrItem : function(selectValue){
var jsonObject = {"type_id":selectValue,"is_extended":"T"};
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback(jsonObject,exsoft.contextRoot+'/type/attrList.do', '#processWrite_docAttrView', function(data, tableId){
// 확장속성 table을 그린다.
exsoft.util.table.tableExtendTypeItemPrintList(tableId, data.list, exsoftProcessWrite.actionType);
if(data.records != 0){
$(tableId).removeClass('hide');
exsoftProcessWrite.binder.set("is_extended", 'T');
// table에 select box가 존재하면 ddslick을 해준다.
var $extendType = $(tableId + ' tbody').find('input, select');
$($extendType).each(function(idx){
var name = $(this).attr('name');
if($(this).is('select')){
$(this).attr('id', name);
$(this).attr('data-select', 'true');
exsoft.util.common.ddslick(name,'srch_type1',name,80, function(divId, selectValue){
exsoftProcessWrite.binder.set(name, selectValue);
});
}else{
$(this).attr('data-bind', name);
}
});
exsoftProcessWrite.binder.bindingElement(); // data-bind 전체 bind
}else{
$(tableId).addClass('hide');
exsoftProcessWrite.binder.set("is_extended", 'F');
}
}); // 확장속성 set
}
},
}
<file_sep>/EDMS3/src/kr/co/exsoft/common/service/CacheServiceImpl.java
package kr.co.exsoft.common.service;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import kr.co.exsoft.eframework.configuration.Constant;
import kr.co.exsoft.eframework.util.StringUtil;
import kr.co.exsoft.folder.dao.FolderDao;
import net.sf.ehcache.Cache;
import net.sf.ehcache.CacheManager;
import net.sf.ehcache.Element;
import org.apache.commons.collections.map.CaseInsensitiveMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.ibatis.session.SqlSession;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Service;
/**
* 메모리 캐쉬 구현부분
*
* @author 패키지 개발팀
* @since 2014. 11. 4.
* @version 1.0
*
*/
@Service("CacheService")
public class CacheServiceImpl implements CacheService {
@Autowired
CacheManager cachemanager;
@Autowired
@Qualifier("sqlSession")
private SqlSession sqlSession;
protected static final Log logger = LogFactory.getLog(CacheServiceImpl.class);
/**
*
* <pre>
* 1. 개용 :
* 2. 처리내용 :
* </pre>
* @Method Name : getData
* @param cacheName
* @param cacheKey
* @return
* @throws Exception Object
*/
private Object getData(String cacheName, String cacheKey) throws Exception {
FolderDao folderDao = sqlSession.getMapper(FolderDao.class);
Object result = null;
if(cacheName.equals(Constant.EHCACHE_CACHE_NAME_FOLDERLIST)) {
Map<String, String> folderIDs = new HashMap<String, String>();
Map<String, String> folderNames = new HashMap<String, String>();
List<CaseInsensitiveMap> folderList = new ArrayList<CaseInsensitiveMap>();
// 1. folder 정보를 가져온다.
folderList = folderDao.folderIdsList();
// 2. map에 담는다. [key]folder_id, [value]parent_id :: 개인문서함 제외
for(CaseInsensitiveMap caseMap : folderList) {
folderIDs.put(caseMap.get("FOLDER_ID").toString(), caseMap.get("PARENT_ID").toString());
folderNames.put(caseMap.get("FOLDER_ID").toString(), caseMap.get("FOLDER_NAME_KO").toString());
}
switch (cacheKey) {
case Constant.EHCACHE_CACHE_KEY_FOLDERIDS:result = folderIDs;break;
case Constant.EHCACHE_CACHE_KEY_FOLDERNAMES:result = folderNames;break;
default:break;
}
}
return result;
}
@Override
public Object getCache(String cacheName, String cacheKey) throws Exception {
Object result = null;
// 1. cache factory를 가져온다
Cache cache = cachemanager.getCache(cacheName);
// 2. cache factory에 저장된 cache를 가져온다
Element element = cache.get(cacheKey);
// 3. cache가 미존재하면 생성 한다.
if(element == null) {
logger.debug("["+cacheName+"]["+cacheKey+"][getCache()] : 메모리에 캐쉬 미존재 신규 생성");
result = getData(cacheName, cacheKey);
cache.put(new Element(cacheKey, result));
} else {
logger.debug("["+cacheName+"]["+cacheKey+"][getCache()] : 메모리에 캐쉬 존재");
result = element.getObjectValue();
}
return result;
}
@Override
public void replaceCache(String cacheName, String cacheKey, Object obj) throws Exception {
// 1. cache factory를 가져온다
Cache cache = cachemanager.getCache(cacheName);
// 2. cache factory에 저장된 cache를 가져온다
Element element = cache.get(cacheKey);
// 3. cache가 미존재하면 생성 한다.
if(element == null) {
logger.debug("["+cacheName+"]["+cacheKey+"][replaceCache] : 메모리에 캐쉬가 존재하지 않습니다. 미존재 신규 생성");
cache.put(new Element(cacheKey, obj));
} else {
logger.debug("["+cacheName+"]["+cacheKey+"][replaceCache] : 메모리에 캐쉬를 변경 합니다.");
// element를 변경할 수 있는 방법이 없음 cache의 replace 사용 안함
cache.removeElement(element);
cache.put(new Element(cacheKey, obj));
}
}
@Override
public boolean menuAuthByFolderID(String folder_id, String group_id) throws Exception {
@SuppressWarnings("unchecked")
Map<String, String> srcFolderMap = (HashMap<String, String>)getCache(Constant.EHCACHE_CACHE_NAME_FOLDERLIST, Constant.EHCACHE_CACHE_KEY_FOLDERIDS);
folder_id = !StringUtil.isEmpty(folder_id) ? folder_id : "";
group_id = !StringUtil.isEmpty(group_id) ? group_id : "";
String group_folder_id = group_id.replace(Constant.ID_PREFIX_GROUP, Constant.ID_PREFIX_FOLDER);
boolean isAuth = false;
if(group_folder_id.equals(folder_id))
return true;
int loopCnt = 0;
while(!StringUtil.isEmpty(folder_id) && !folder_id.equals(Constant.FOLDER_TOP_ID)){
if(srcFolderMap.containsKey(folder_id)){
if(group_folder_id.equals(folder_id))
return true;
folder_id = srcFolderMap.get(folder_id);
} else {
return false;
}
//무한 loop 방지 :: 부서 depth 최대한 10000개
loopCnt++;
if( loopCnt == 10000)
break;
}
return isAuth;
}
@Override
public String getFolderFullpathNameByFolderId(String folder_id, boolean isFristSlash) throws Exception {
Map<String, String> objFoldeIds = (HashMap<String, String>)getCache(Constant.EHCACHE_CACHE_NAME_FOLDERLIST, Constant.EHCACHE_CACHE_KEY_FOLDERIDS);
Map<String, String> objFolderName = (HashMap<String, String>)getCache(Constant.EHCACHE_CACHE_NAME_FOLDERLIST, Constant.EHCACHE_CACHE_KEY_FOLDERNAMES);
String fullPathName = "";
String tempFolderId = folder_id;
int loopCnt = 0;
while(objFolderName.containsKey(tempFolderId)){
fullPathName = objFolderName.get(tempFolderId)+"/"+fullPathName;
tempFolderId = objFoldeIds.get(tempFolderId); //부모폴더ID
//무한 loop 방지 :: 부서 depth 최대한 1000개
loopCnt++;
if( loopCnt == 1000)
break;
if(tempFolderId.equals(objFoldeIds.get(tempFolderId)))
break;
}
return isFristSlash ? "/"+fullPathName : fullPathName;
}
}
<file_sep>/EDMS3/src/kr/co/exsoft/statistics/dao/AuditDao.java
package kr.co.exsoft.statistics.dao;
import java.util.HashMap;
import java.util.List;
import org.apache.commons.collections.map.CaseInsensitiveMap;
import kr.co.exsoft.quartz.vo.AuditTrailVO;
import org.springframework.stereotype.Repository;
/**
* 열람감사관리 DAO
*
* @author 패키지팀
* @since 2014. 9. 15.
* @version 1.0
*
*/
@Repository(value = "auditDao")
public interface AuditDao {
/**
*
* <pre>
* 1. 개용 : 대량문서 열람 카운트 가져오기.
* 2. 처리내용 :
* </pre>
* @Method Name : auditPagingCount
* @param map
* @return int
*/
public int auditPagingCount(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 대량문서 열람 리스트 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : auditPagingList
* @param map
* @return List<AuditTrailVO>
*/
public List<AuditTrailVO> auditPagingList(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 대량문서열람 상세 목록 카운트
* 2. 처리내용 :
* </pre>
* @Method Name : auditDetailCount
* @param map
* @return int
*/
public int auditDetailCount(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 대량문서열람 상세 목록 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : auditDetailList
* @param map
* @return List<CaseInsensitiveMap>
*/
public List<CaseInsensitiveMap> auditDetailList(HashMap<String,Object> map);
/**
* 감사기록 등록처리
* @param map
* @return
*/
public int writeAudit(HashMap<String,Object> map);
}
<file_sep>/EDMS3/src/kr/co/exsoft/common/dao/HistoryDao.java
package kr.co.exsoft.common.dao;
import java.util.HashMap;
import java.util.List;
import org.springframework.stereotype.Repository;
import kr.co.exsoft.common.vo.DocumentHtVO;
import kr.co.exsoft.common.vo.HistoryVO;
import kr.co.exsoft.common.vo.PageHtVO;
/**
* History 매퍼클래스
* @author 패키지 개발팀
* @since 2014.07.21
* @version 3.0
*
*/
@Repository(value = "historyDao")
public interface HistoryDao {
/**
*
* <pre>
* 1. 개용 : 폴더/문서유형/권한 이력을 등록한다.
* 2. 처리내용 :
* </pre>
* @Method Name : historyWrite
* @param historyVO
* @return int
*/
public int historyWrite(HistoryVO historyVO);
/**
*
* <pre>
* 1. 개용 : 문서이력을 등록한다.
* 2. 처리내용 :
* </pre>
* @Method Name : documentHtWrite
* @param documenthtVO
* @return int
*/
public int documentHtWrite(DocumentHtVO documenthtVO);
/**
*
* <pre>
* 1. 개용 : 첨부파일 조회이력을 등록한다.
* 2. 처리내용 :
* </pre>
* @Method Name : pageHtWrite
* @param pageHtVO
* @return int
*/
public int pageHtWrite(PageHtVO pageHtVO);
/**
* <pre>
* 1. 내용 : 문서 이력 목록을 조회한다. :: 삭제예정
* 2. 처리내용 :
* </pre>
* @Method Name : documentHtList
* @param map
* @return
*/
public List<DocumentHtVO> documentHtList(HashMap<String, Object> map);
/**
*
* <pre>
* 1. 개용 :
* 2. 처리내용 : 문서 이력을 조회한다. 페이징처리
* </pre>
* @Method Name : docHtList
* @param map
* @return List<DocumentHtVO>
*/
public List<DocumentHtVO> docHtList(HashMap<String, Object> map);
/**
*
* <pre>
* 1. 개용 : 문서 이력 조회수
* 2. 처리내용 :
* </pre>
* @Method Name : docHtPagingCount
* @param map
* @return int
*/
public int docHtPagingCount(HashMap<String, Object> map);
}
<file_sep>/EDMS3/WebContent/js/docadmin/typeManager.js
/**
* 문서유형관련 스크립트
*/
var exsoftAdminTypeFunc = {
buttons : ['atrrDelBtn','attrUpdateBtn','attrItemAdd','attrItemDel','is_hidden','attrCancelBtn'],
gRowData : [ { attr_id:"", attr_name:"", attr_size:100,sort_index:0,is_mandatory:"F",is_editable:"T",is_search:"F",display_type:"INPUT",config:"" } ],
gRowId : 0,
mRowId : 0,
gTypeId : "",
mList : 0,
gridId : "",
gridUrl : "",
pageSize : "",
gAttrItemIdx : 0, // 항목설정 INDEX 값
gRowItemId : "", // 항목설정 BTN의 GRID ROW 정보
gAttrItemType : "",
binder : new DataBinder("#frmView"),
gParameters : {
"successfunc" : null,
"url" : 'clientArray',
"extraparam" : {},
"aftersavefunc" : function( response ) {},
"errorfunc": null,
"afterrestorefunc" : null,
"restoreAfterError" : true,
"mtype" : "POST"
},
init : {
// 로그인 이력 초기 함수
initPage : function(gridId,gridUrl,pageSize) {
// 메인 GRID
exsoftAdminTypeFunc.gridId = gridId;
exsoftAdminTypeFunc.gridUrl = gridUrl;
exsoftAdminTypeFunc.pageSize = pageSize;
exsoft.util.common.ddslick('#is_hiddenU','use_yn','is_hiddenU',79, function(){});
exsoft.util.common.ddslick('#is_hiddenC','use_yn','is_hiddenC',79, function(){});
},
// 버튼 상태 변경처리 :: TODO
btnStateChange : function(flag) {
$.each(exsoftAdminTypeFunc.buttons,function(index,item){
if(flag == 'T') {
$("#"+item).prop('disabled', true);
$("#"+item).addClass("disabled");
}else {
$("#"+item).prop('disabled', false);
$("#"+item).removeClass("disabled");
}
});
},
},
open : {
// 문서유형등록
typeAdd : function() {
exsoft.util.layout.divLayerOpen('register_docuType_wrapper', 'register_docuType');
exsoft.util.common.formClear('frm');
exsoftAdminTypeFunc.event.fAttrGridList();
exsoft.util.grid.gridInputInit(false);
}
},
layer : {
},
close : {
},
event : {
// 문서유형 목록 GRID :: TO DO 컬럼헤더 사이즈 조정
fTypeListGrid : function() {
$("#typeGridList").jqGrid({
url:exsoft.contextRoot+exsoftAdminTypeFunc.gridUrl,
mtype:"post",
datatype:'json',
jsonReader:{
page:'page',toatl:'toatl',root:'list'
},
colNames:['type_id','type_name','is_base','create_date','status_nm','is_system'],
colModel:[
{name:'type_id',index:'type_id',width:25, editable:false,sortable:true,resizable:true,align:'left'},
{name:'type_name',index:'type_name',width:25, editable:false,sortable:true,resizable:true,align:'center'},
{name:'is_base',index:'is_base',width:10, editable:false,sortable:false,resizable:true,align:'center',hidden:true},
{name:'create_date',index:'create_date',width:10, editable:false,sortable:false,resizable:true,align:'center',hidden:true},
{name:'status_nm',index:'status_nm',width:20, editable:false,sortable:false,resizable:true,align:'center'},
{name:'is_system',index:'is_system',width:10, editable:false,sortable:false,resizable:true,align:'center',hidden:true}
],
autowidth:true,height:"auto",viewrecords: true,
multiselect:true,
sortname : "type_name",
sortorder:"desc",
sortable: true,
shrinkToFit:true,
gridview: true,
multikey: "ctrlKey",
viewsortcols:'vertical',
rowNum : exsoftAdminTypeFunc.pageSize,
emptyDataText: "데이터가 없습니다.",
caption:'문서유형 목록'
,onCellSelect : function(rowid,iCol,cellcontent,e){
exsoft.util.grid.checkBox(e);
if(iCol == 0){
$("#typeGridList").jqGrid('setSelection',rowid);
}else {
exsoftAdminTypeFunc.ui.typeViewRefresh('typeGridList',rowid);
//$("#select_list").remove();
//$("#"+rowid).find('td:eq(1)').prepend("<span id='select_list' class='select_list_icon'></span>");
}
}
,loadBeforeSend: function() {
exsoft.util.grid.gridTitleBarHide('typeGridList');
exsoft.util.grid.gridNoDataMsgInit('typeGridList');
}
,loadComplete: function(data) {
if ($("#typeGridList").getGridParam("records")==0) {
$(".sub_right").addClass("hide");
exsoft.util.grid.gridNoRecords('typeGridList','no_data');
}else {
$(".sub_right").removeClass("hide");
exsoft.util.grid.gridViewRecords('typeGridList');
// 조회화면 DISPLAY
var rowId = $("#typeGridList").getDataIDs()[0];
exsoftAdminTypeFunc.gTypeId = $("#typeGridList").getRowData(rowId).type_id;
exsoftAdminTypeFunc.ui.typeViewRefresh('typeGridList',rowId);
exsoft.util.grid.gridPager("#typeGridPager",data);
}
exsoft.util.grid.gridInputInit(false);
}
,loadError:function(xhr, status, error) {
exsoft.util.error.isErrorChk(xhr);
}
});
var headerData = '{"type_id":"문서유형ID","type_name":"문서유형명","status_nm":"상태"}';
exsoft.util.grid.gridColumHeader('typeGridList',headerData,'center');
},
// 문서유형목록 GRID
fAttrViewList : function() {
$('#attrViewList').jqGrid({
url:exsoft.contextRoot+'/admin/attrList.do',
mtype:"post",
datatype:'json',
jsonReader:{
root:'list'
},
colNames:['attr_id','attr_name','attr_size','sort_index','is_mandatory','is_editable','is_search','display_type','config','has_item','has_item_list','default_item_index','is_locked'],
colModel:[
{name:'attr_id',index:'attr_id',width:40, editable:true,sortable:false,resizable:true,align:'center',edittype:'text',
editoptions: {
dataInit: function (elem) {
$(elem).keypress(function(){
return exsoft.util.filter.inputBoxFilter('^[A-Za-z0-9_]');
});
$(elem).keyup(function(){
elem.value = elem.value.replace(/[^a-zA-Z0-9_]/g,'')
elem.value = elem.value.toUpperCase();
});
// 기 attr_id 에디터모드 후 신규항목 에디터모드인 경우 attr_id 오류 패치
$(elem).click(function (e) {
var row = jQuery(e.target).closest('tr.jqgrow');
var rowid = row.attr('id');
if(!exsoftAdminTypeFunc.ui.isSystem(exsoftAdminTypeFunc.gTypeId)) {
exsoftAdminTypeFunc.ui.gridNoEditColum('attrViewList','attr_id',rowid);
$('#attrViewList').editRow(rowid,false);
}
});
$(elem).focusout(function () {
elem.value = elem.value.replace(/[^a-zA-Z0-9_]/g,'')
});
},
size:'15', maxlength:'20'
}
},
{name:'attr_name',index:'attr_name',width:50, editable:true,sortable:false,resizable:true,align:'center',
editoptions: {
dataInit: function (elem) {
$(elem).keypress(function(){
// 특수문자 입력불가
return exsoft.util.filter.inputBoxFilter('[A-Za-z0-9]');
})
},
size:'15',maxlength:'20'
}
},
{name:'attr_size',index:'attr_size',width:30, editable:true,sortable:false,resizable:true,align:'center',
editoptions: {
dataInit: function (elem) {
// chrome version
$(elem).keyup(function(){
elem.value = elem.value.replace(/[^0-9]/g,'')
});
// chrome version
$(elem).focusout(function () {
elem.value = elem.value.replace(/[^0-9]/g,'')
});
},
size:'5',maxlength:'4'
}
},
{name:'sort_index',index:'sort_index',width:40, editable:true,sortable:false,resizable:true,align:'center',
editoptions: {
size:'5',maxlength:'2',
dataInit: function (elem) {
// chrome version
$(elem).keyup(function(){
elem.value = elem.value.replace(/[^0-9]/g,'')
});
// chrome version
$(elem).focusout(function () {
elem.value = elem.value.replace(/[^0-9]/g,'')
});
}
}
},
{name:'is_mandatory',index:'is_mandatory',width:40, editable:true,sortable:false,resizable:true,align:'center',edittype:'select',editoptions:{value:"T:T;F:F"}},
{name:'is_editable',index:'is_editable',width:40, editable:true,sortable:false,resizable:true,align:'center',edittype:'select',editoptions:{value:"T:T;F:F"}},
{name:'is_search',index:'is_search',width:40, editable:true,sortable:false,resizable:true,align:'center',edittype:'select',editoptions:{value:"T:T;F:F"}},
{name:'display_type',index:'display_type',width:100, editable:true,sortable:false,resizable:true,align:'center',edittype:'select',
editoptions:{
value:"RADIO:RADIO;INPUT:INPUT;CHECK:CHECK;SELECT:SELECT"
,dataEvents:[{ type:'change', fn: function(e){
var row = jQuery(e.target).closest('tr.jqgrow');
if($(e.target).val() == "INPUT") {
$("#vconf"+row.attr('id')).hide();
}else {
$("#vconf"+row.attr('id')).show();
}
// 문서유형 속성 GRID 값 초기화
$("#attrViewList").setRowData(row.attr('id'),{ 'has_item_list': '' });
$("#attrViewList").setRowData(row.attr('id'),{ 'default_item_index': '' });
}}]
}
},
{name:'config',index:'config',width:50, editable:false,sortable:true,resizable:true,align:'center'
,formatter: function (cellValue, option,rowObject) {
return "<button type='button' id='vconf"+option.rowId+"' style='display:none;' onclick='javascript:exsoftAdminTypeFunc.ui.attrConfig(\""+option.rowId+"\",\"attrViewList\")'><span>설정</span></button>";
}
},
{name:'has_item',index:'has_item',width:10, editable:false,sortable:true,resizable:true,align:'center',hidden:true},
{name:'has_item_list',index:'has_item_list',width:10, editable:false,sortable:true,resizable:true,align:'center',hidden:true},
{name:'default_item_index',index:'default_item_index',width:10, editable:false,sortable:true,resizable:true,align:'center',hidden:true},
{name:'is_locked',index:'is_locked',width:10, editable:false,sortable:true,resizable:true,align:'center',hidden:true}
],
autowidth:true,
height:"auto",
viewrecords: true,
multiselect:true,
sortable: true,
shrinkToFit:true,
gridview: true,
postData : {type_id:exsoftAdminTypeFunc.gTypeId},
emptyDataText: "데이터가 없습니다.",
caption:'문서유형 목록 조회'
,loadBeforeSend: function() {
exsoft.util.grid.gridTitleBarHide('attrViewList');
exsoft.util.grid.gridNoDataMsgInit('attrViewList');
}
,loadComplete: function(data) {
if ($("#attrViewList").getGridParam("records") ==0) {
exsoft.util.grid.gridNoDataMsg('attrViewList','nolayer_data');
exsoftAdminTypeFunc.mRowId = 0;
}else {
var rowIDs = $("#attrViewList").jqGrid('getDataIDs');
exsoftAdminTypeFunc.mRowId = rowIDs[rowIDs.length-1];
}
exsoftAdminTypeFunc.mList = exsoftAdminTypeFunc.mRowId; // atrr_id 수정못하게 처리위한 변수
exsoftAdminTypeFunc.ui.isConfigProc(); // 문서유형 함목 설정 SHOW/HIDE
exsoft.util.grid.gridInputInit(false);
$(".jqgfirstrow" ).addClass('hide');
}
,onCellSelect: function(rowid, iCol,cellcontent,e){
exsoft.util.grid.checkBox(e);
if(!exsoftAdminTypeFunc.ui.isSystem(exsoftAdminTypeFunc.gTypeId)) {
exsoftAdminTypeFunc.ui.gridNoEditColum('attrViewList','attr_id',rowid);
$('#attrViewList').editRow(rowid,false);
}
}
,onSelectRow: function(rowid,status,e){
if(!exsoftAdminTypeFunc.ui.isSystem(exsoftAdminTypeFunc.gTypeId)) {
exsoftAdminTypeFunc.ui.gridNoEditColum('attrViewList','attr_id',rowid);
// false 이면 row 저장처리
if(!status) {
$('#attrViewList').jqGrid('saveRow', rowid, exsoftAdminTypeFunc.gParameters );
}else {
if(exsoft.util.grid.gridEditMode('attrViewList',rowid) == "0") {
$('#attrViewList').editRow(rowid,false);
}
}
}
}
});
// Grid 컬럼정렬 처리
var headerData = '{"attr_id":"속성ID","attr_name":"속성명","attr_size":"길이","sort_index":"정렬순서","is_mandatory":"필수","is_editable":"편집","is_search":"검색","display_type":"입력유형","config":"항목설정"}';
exsoft.util.grid.gridColumHeader('attrViewList',headerData,'center');
},
// 문서유형 등록 속성 GRID
fAttrGridList : function() {
$('#attrGridList').jqGrid({
mtype:"post",
datatype:'json',
colNames:['attr_id','attr_name','attr_size','sort_index','is_mandatory','is_editable','is_search','display_type','config','has_item','default_item_index'],
colModel:[
{name:'attr_id',index:'attr_id',width:80, editable:true,sortable:false,resizable:true,align:'center',edittype:'text',
editoptions: {
dataInit: function (elem) {
$(elem).keypress(function(){
return exsoft.util.filter.inputBoxFilter('^[A-Za-z0-9_]');
})
$(elem).keyup(function(){
// chrome version
elem.value = elem.value.replace(/[^a-zA-Z0-9_]/g,'')
elem.value = elem.value.toUpperCase();
});
$(elem).focusout(function () {
elem.value = elem.value.replace(/[^a-zA-Z0-9_]/g,'')
});
},size:'15', maxlength:'20'
}
},
{name:'attr_name',index:'attr_name',width:80, editable:true,sortable:false,resizable:true,align:'center',
editoptions: {
dataInit: function (elem) {
$(elem).keypress(function(){
// 특수문자 입력불가
return exsoft.util.filter.inputBoxFilter('[A-Za-z0-9]');
})
},size:'15',maxlength:'20'
}
},
{name:'attr_size',index:'attr_size',width:45, editable:true,sortable:false,resizable:true,align:'center',
editoptions: {
dataInit: function (elem) {
// chrome version
$(elem).keyup(function(){
elem.value = elem.value.replace(/[^0-9]/g,'')
});
// chrome version
$(elem).focusout(function () {
elem.value = elem.value.replace(/[^0-9]/g,'')
});
},size:'5',maxlength:'4'
}
},
{name:'sort_index',index:'sort_index',width:45, editable:true,sortable:false,resizable:true,align:'center',
editoptions: {
size:'5',maxlength:'2',
dataInit: function (elem) {
// chrome version
$(elem).keyup(function(){
elem.value = elem.value.replace(/[^0-9]/g,'')
});
// chrome version
$(elem).focusout(function () {
elem.value = elem.value.replace(/[^0-9]/g,'')
});
}
}
},
{name:'is_mandatory',index:'is_mandatory',width:40, editable:true,sortable:false,resizable:true,align:'center',edittype:'select',editoptions:{value:"T:T;F:F"}},
{name:'is_editable',index:'is_editable',width:40, editable:true,sortable:false,resizable:true,align:'center',edittype:'select',editoptions:{value:"T:T;F:F"}},
{name:'is_search',index:'is_search',width:40, editable:true,sortable:false,resizable:true,align:'center',edittype:'select',editoptions:{value:"T:T;F:F"}},
{name:'display_type',index:'display_type',width:80, editable:true,sortable:false,resizable:true,align:'center',edittype:'select',
editoptions:{
value:"RADIO:RADIO;INPUT:INPUT;CHECK:CHECK;SELECT:SELECT"
,dataEvents:[{ type:'change', fn: function(e){
var row = jQuery(e.target).closest('tr.jqgrow');
var rowid = row.attr('id');
var option = $(e.target).val();
if(option == "INPUT") {
$("#conf"+rowid).hide();
}else {
$("#conf"+rowid).show();
}
// 문서유형 속성 GRID 값 초기화
$("#attrGridList").setRowData(rowid,{ 'has_item': '' });
$("#attrGridList").setRowData(rowid,{ 'default_item_index': '' });
}}]
}
},
{name:'config',index:'config',width:50, editable:false,sortable:true,resizable:true,align:'center'
,formatter: function (cellValue, option,rowObject) {
return "<button type='button' id='conf"+option.rowId+"' style='display:none;' onclick='javascript:exsoftAdminTypeFunc.ui.attrConfig(\""+option.rowId+"\",\"attrGridList\")'><span>설정</span></button>";
}
},
{name:'has_item',index:'has_item',width:10, editable:false,sortable:true,resizable:true,align:'center',hidden:true},
{name:'default_item_index',index:'default_item_index',width:50, editable:false,sortable:true,resizable:true,align:'center',hidden:true}
],
autowidth:true,
height:"auto",
viewrecords: true,
multiselect:true,
sortable: true,
shrinkToFit:true,
gridview: true,
caption:'문서유형 등록'
,loadBeforeSend: function() {
exsoft.util.grid.gridTitleBarHide('attrGridList');
}
,loadComplete: function() {
exsoft.util.grid.gridInputInit(false);
}
,onCellSelect: function(rowid, iCol,cellcontent,e){
exsoft.util.grid.checkBox(e);
$('#attrGridList').editRow(rowid,false);
}
,onSelectRow: function(rowid,status,e){
// 에디터모드인지 체크
var edited = exsoft.util.grid.gridEditMode('attrGridList',rowid);
// false 이면 row 저장처리
if(!status) {
$('#attrGridList').jqGrid('saveRow', rowid, exsoftAdminTypeFunc.gParameters );
}else {
if(edited == "0") {
$('#attrGridList').editRow(rowid,false);
}
}
},
});
// Grid 컬럼정렬 처리
var headerData = '{"attr_id":"속성ID","attr_name":"속성명","attr_size":"길이","sort_index":"정렬순서","is_mandatory":"필수","is_editable":"편집","is_search":"검색","display_type":"입력유형","config":"항목설정"}';
exsoft.util.grid.gridColumHeader('attrGridList',headerData,'center');
// 문서유형 속성 삭제처리( 초기화)
var rowIDs = $("#attrGridList").jqGrid('getDataIDs');
for (var i = 0; i < rowIDs.length ; i++) {
$("#attrGridList").jqGrid("delRowData",rowIDs[i]);
}
exsoftAdminTypeFunc.gRowId = 0;
},
// 페이지이동 처리(공통)
gridPage : function(nPage) {
$(exsoftAdminTypeFunc.gridId).setGridParam({page:nPage,postData:{is_search:'false',page_init:'false'}}).trigger("reloadGrid");
},
// 문서유형 검색처리
searchFunc : function() {
var postData = {strKeyword:$("#strKeyword").val(),strIndex:'TYPE_NAME',is_search:'true'} ;
exsoft.util.grid.gridPostDataRefresh(exsoftAdminTypeFunc.gridId,exsoft.contextRoot+exsoftAdminTypeFunc.gridUrl,postData);
},
// 문서유형 삭제처리
typeDel : function() {
var jsonArr = [];
var jsonArrIndex = 0;
if(!exsoft.util.grid.gridSelectCheck('typeGridList')) {
jAlert("삭제할 문서유형을 선택하세요.",'확인',0);
return false;
}else {
var id = $("#typeGridList").getGridParam('selarrrow');
for (var i = 0; i < id.length; i++) {
var rowData = {type_id:"",};
var rowId = $("#typeGridList").getRowData(id[i]);
if(rowId.is_system == 'T') {
jAlert("시스템제공 문서유형은 삭제할 수 없습니다.","확인",0);
return false;
}
rowData['type_id'] = rowId.type_id; // jsonObject
if(rowData.type_id){
jsonArr[jsonArrIndex] = rowData;
jsonArrIndex++;
}
}
if (id.length > 0) {
jConfirm('선택한 문서유형을 삭제하시겠습니까?', '확인',0,
function(r){
var jsonObject = { "type":"delete", "typeIdList":JSON.stringify(jsonArr)};
if(r) {
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback(jsonObject, exsoft.contextRoot + "/admin/typeControl.do" , "typeDelete",
function(data,e) {
if(data.result == "true"){
exsoft.util.grid.gridRefresh('typeGridList',exsoft.contextRoot+exsoftAdminTypeFunc.gridUrl);
}else {
jAlert(data.message,'확인',0);
}
});
}
});
}
}
},
// 문서유형 저장처리
applyTypeWrite : function(type) {
// XR_TYPE 속성체크
var gridIds = "";
var objForm = null;
if(type == "regist") {
objForm = document.frm;
gridIds = "attrGridList";
}else {
objForm = document.frmView;
gridIds = "attrViewList";
}
if (objForm.type_id.value.length == 0) {
jAlert("문서유형 ID를 입력하세요.","확인",0);
return false;
}
if (objForm.type_name.value.length == 0) {
jAlert("문서유형명을 입력하세요.","확인",0);
return false;
}
if (objForm.sortIndex.value.length == 0) {
jAlert("정렬순서를 입력하세요.","확인",0);
return false;
}
// XR_ATTR & XR_ATTRITEM 속성체크
if(exsoft.util.grid.gridEditRowCnt(gridIds) == 0){
jAlert("문서유형 속성을 입력하세요.","확인",0);
return false;
}
// 문서유형 속성 Valid
var chkVal = exsoftAdminTypeFunc.ui.validAttrItem(gridIds);
if(chkVal) {
// 문서유형 속성 Array 생성
var jsonArr = exsoftAdminTypeFunc.ui.returnAttrItem(gridIds);
// 문서유형 속성 입력값 중복체크 처리
if(exsoft.util.check.inputArrayValid(jsonArr,'attr_id')) {
jAlert("중복된 속성ID를 입력하셨습니다.");
return false;
}else if(exsoft.util.check.inputArrayValid(jsonArr,'attr_name')) {
jAlert("중복된 속성명을 입력하셨습니다.");
return false;
}
// 서버 데이터 전송처리
exsoftAdminTypeFunc.binder.set("type_id",objForm.type_id.value);
exsoftAdminTypeFunc.binder.set("type_name",objForm.type_name.value);
exsoftAdminTypeFunc.binder.set("sortIndex",objForm.sortIndex.value);
exsoftAdminTypeFunc.binder.set("attrArrayList",JSON.stringify(jsonArr));
if(type == "regist") {
exsoftAdminTypeFunc.binder.set("type","insert");
exsoftAdminTypeFunc.binder.set("is_hidden",exsoft.util.layout.getSelectBox('is_hiddenC','option'));
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback(exsoftAdminTypeFunc.binder.getDataToJson(),exsoft.contextRoot +'/admin/typeControl.do','typeInsert',
exsoftAdminTypeFunc.callback.returnAjaxDataFunction);
}else {
exsoftAdminTypeFunc.binder.set("type","update");
exsoftAdminTypeFunc.binder.set("is_hidden",exsoft.util.layout.getSelectBox('is_hiddenU','option'));
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback(exsoftAdminTypeFunc.binder.getDataToJson(),exsoft.contextRoot +'/admin/typeControl.do','typeUpdate',
exsoftAdminTypeFunc.callback.returnAjaxDataFunction);
}
}
},
// 조회화면에서 문서유형 삭제처리
typeViewDel : function() {
var jsonArr = [];
var rowData = {type_id:"",};
rowData['type_id'] = $("#type_id").val();
if(rowData.type_id){
jsonArr[0] = rowData;
}
jConfirm('선택한 문서유형을 삭제하시겠습니까?', 'Confirm',0,
function(r){
var jsonObject = { "type":"delete", "typeIdList":JSON.stringify(jsonArr)};
if(r) {
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback(jsonObject, exsoft.contextRoot + "/admin/typeControl.do" , "typeDelete",
function(data,e) {
if(data.result == "true"){
exsoft.util.grid.gridRefresh('typeGridList',exsoft.contextRoot+exsoftAdminTypeFunc.gridUrl);
}else {
jAlert(data.message,'확인',0);
}
});
}
});
}
},
ui : {
// 수정화면 취소 처리
cancelProc : function() {
exsoftAdminTypeFunc.ui.attrDetailCall(exsoftAdminTypeFunc.gTypeId);
},
// 상세화면 보기
typeViewRefresh : function(gridIds,rowid) {
exsoftAdminTypeFunc.gTypeId = $("#"+gridIds).getRowData(rowid).type_id;
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback({ "type_id":exsoftAdminTypeFunc.gTypeId}, exsoft.contextRoot + "/admin/typeInfo.do" , "typeDetail",exsoftAdminTypeFunc.callback.returnAjaxDataFunction);
},
// 문서유형 수정 후 상세보기 갱신
attrDetailCall : function(type_id) {
exsoft.util.ajax.ajaxDataFunctionNoLodingWithCallback({ "type_id":type_id}, exsoft.contextRoot + "/admin/typeInfo.do" , "typeDetail",exsoftAdminTypeFunc.callback.returnAjaxDataFunction);
},
// 속성ID 컬럼 수정못하게 처리
gridNoEditColum : function(gridIds,column,rowid) {
var rowData = $('#'+gridIds).jqGrid('getRowData',rowid);
if(rowData.is_locked != null && rowData.is_locked == 'T') {
$('#'+gridIds).setColProp(column,{editable:false});
}else {
$('#'+gridIds).setColProp(column,{editable:true});
}
},
// 문서유형 함목 설정 SHOW/HIDE
isConfigProc : function() {
var rowIDs = $("#attrViewList").jqGrid('getDataIDs');
for (var i = 0; i < rowIDs.length ; i++) {
var row =$("#attrViewList").getRowData(rowIDs[i]);
if(row.is_editable == 'F') { // 편집불가항목 수정못함.
$('#'+rowIDs[i], '#attrViewList').addClass('not-editable-row');
$("#vconf"+rowIDs[i]).hide();
}else {
if((row.display_type != null
&& (row.display_type == "INPUT" || row.display_type == "") )) {
$("#vconf"+rowIDs[i]).hide();
}else {
$("#vconf"+rowIDs[i]).show();
}
}
}
},
isSystem : function(type_id) {
var rowIDs = $("#typeGridList").jqGrid('getDataIDs');
for (var i = 0; i < rowIDs.length ; i++) {
var rowID = rowIDs[i];
var row =$("#typeGridList").getRowData(rowID);
if(type_id == row.type_id && row.is_system == "T" ) {
return true;
}
}
return false;
},
// 문서유형 ROW ADD
addRowReg : function(type) {
if(type == "regist"){ // 등록화면
exsoftAdminTypeFunc.gRowId++;
for (var i = 0; i <= exsoftAdminTypeFunc.gRowData.length; i++) {
$("#attrGridList").jqGrid('addRowData', exsoftAdminTypeFunc.gRowId, exsoftAdminTypeFunc.gRowData[i]);
$('#attrGridList').editRow(exsoftAdminTypeFunc.gRowId,false);
}
}else { // 조회/수정화면
exsoftAdminTypeFunc.mRowId++;
// 신규추가항목은 attr_id 수정모드로 변경.
$('#attrViewList').jqGrid('setColProp', 'attr_id', {editable:true});
for (var i = 0; i <= exsoftAdminTypeFunc.gRowData.length; i++) {
$("#attrViewList").jqGrid('addRowData', exsoftAdminTypeFunc.mRowId, exsoftAdminTypeFunc.gRowData[i]);
$('#attrViewList').editRow(exsoftAdminTypeFunc.mRowId,false);
}
}
},
// 문서유형 ROW DEL
delRowReg : function(type,gridIds) {
exsoft.util.grid.gridDeleteRow(gridIds, null, null, true);
exsoft.util.grid.gridCheckBoxInit(gridIds);
},
// 문서유형속성값 유효성 체크
validAttrItem : function(gridIds) {
var rowIDs = $("#"+gridIds).jqGrid('getDataIDs');
for (var i = 0; i < rowIDs.length ; i++) {
$('#'+gridIds).jqGrid('saveRow', rowIDs[i], exsoftAdminTypeFunc.gParameters ); // row 자동저장 처리
var rowId =$("#"+gridIds).getRowData( rowIDs[i]);
if(rowId.attr_id.length == 0 || rowId.attr_id.indexOf("input name") != -1) {
jAlert("속성ID를 입력하세요.","확인",0);
return false;
}
// 컬럼명은 영문으로 시작해야함 **
if(rowId.attr_id.substring(0,1) == "_" || rowId.attr_id.substring(0,1) == "0" || rowId.attr_id.substring(0,1) == "1" || rowId.attr_id.substring(0,1) == "2" || rowId.attr_id.substring(0,1) == "3"
|| rowId.attr_id.substring(0,1) == "4" || rowId.attr_id.substring(0,1) == "5" || rowId.attr_id.substring(0,1) == "6" || rowId.attr_id.substring(0,1) == "7" || rowId.attr_id.substring(0,1) == "8"
|| rowId.attr_id.substring(0,1) == "9" ) {
jAlert("속성ID는 반드시 영문자로 시작해야 됩니다.","확인",0);
return false;
}
if(rowId.attr_name.length == 0 || rowId.attr_name.indexOf("input name") != -1) {
jAlert("속성명을 입력하세요.","확인",0);
return false;
}
if(rowId.attr_size.length == 0 || rowId.attr_size.indexOf("input name") != -1) {
jAlert("길이를 입력하세요.","확인",0);
return false;
}
if(rowId.sort_index.length == 0 || rowId.sort_index.indexOf("input name") != -1) {
jAlert("정렬순서를 입력하세요.","확인",0);
return false;
}
}
return true;
},
// 문서유형 속성 Array 생성
returnAttrItem : function(gridIds) {
var jsonArr = [];
var jsonArrIndex = 0;
var rowIDs = $("#"+gridIds).jqGrid('getDataIDs');
for (var i = 0; i < rowIDs.length ; i++) {
var rowData = {attr_id:"",attr_name:"",attr_size:"",sort_index:"",is_mandatory:"",is_editable:"",is_search:"",display_type:"",has_item:"",default_item_index:""};
var rowId =$("#"+gridIds).getRowData( rowIDs[i]);
rowData['attr_id'] = rowId.attr_id;
rowData['attr_name'] = rowId.attr_name;
rowData['attr_size'] = rowId.attr_size;
rowData['sort_index'] = rowId.sort_index;
rowData['is_mandatory'] = rowId.is_mandatory;
rowData['is_editable'] = rowId.is_editable;
rowData['is_search'] = rowId.is_search;
rowData['display_type'] = rowId.display_type;
if(gridIds == "attrGridList") {
rowData['has_item'] = rowId.has_item;
}else {
rowData['has_item'] = rowId.has_item_list;
}
rowData['default_item_index'] = rowId.default_item_index;
if(rowData.attr_id){
jsonArr[jsonArrIndex] = rowData;
jsonArrIndex++;
}
}
return jsonArr;
},
// 세부항목등록
attrItemAdd : function() {
var buffer = "";
var addFlag = false;
// 이전 컬럼에 값이 입력되었는지 체크한다.
$('input[name="item_name[]"]').each(function() {
var aValue = $(this).val();
if( $(this).val().length == 0){
addFlag = true;
return;
}
});
if(!addFlag) { // 기존 항목 입력완료 이후에 추가할 수 있다.
exsoftAdminTypeFunc.gAttrItemIdx++;
buffer += "<tr id='itemIdx"+exsoftAdminTypeFunc.gAttrItemIdx+"' >";
buffer += "<td><input type='checkbox' id='itemIdx' name='itemIdx' value='itemIdx"+exsoftAdminTypeFunc.gAttrItemIdx+"' /></td>";
buffer += "<td><input type='text' class='line' name='item_name[]' id='item_name[]' size='20' maxlength='20' onkeypress='return exsoft.util.filter.inputBoxFilter(\""+"[A-Za-z0-9]"+"\");' /></td>";
buffer += "<td><input type='radio' class='line' name='is_default' id='is_default' value='"+exsoftAdminTypeFunc.gAttrItemIdx+"' /></td>";
buffer += "<input type='hidden' name='item_index[]' id='item_index[]' value='"+exsoftAdminTypeFunc.gAttrItemIdx+"'/>";
buffer += "</tr>";
$("#attrItemList").append(buffer);
}
},
//세부항목삭제
attrItemDel : function() {
$('input[name=itemIdx]:checked').each(function() {
$("#"+$(this).val()).remove();
});
},
//세부항목설정
attrConfig : function(rowid,gridIds) {
var buffer = "";
var has_item = "";
exsoftAdminTypeFunc.gRowItemId = rowid;
exsoft.util.layout.divLayerOpen('register_docuCateType_wrapper', 'register_docuCateType');
// 기 등록된 정보가 있는지 체크한다.
if(gridIds == "attrGridList") {
has_item = $("#"+gridIds).getRowData(rowid).has_item; // 등록
exsoftAdminTypeFunc.gAttrItemType = "regist";
}else {
has_item = $("#"+gridIds).getRowData(rowid).has_item_list; // 조회
exsoftAdminTypeFunc.gAttrItemType = "update";
}
var default_item_index = $("#"+gridIds).getRowData(rowid).default_item_index;
$('#attrItemList tr:gt(0)').remove();
if(has_item != null && has_item.length != 0) {
// has_item =1234:1,5678:2
// default_item_index=1
var result = has_item.split(",");
for (var i = 0; i < result.length; i++) {
var attrItems = result[i].split(":");
buffer += "<tr id='itemIdx"+attrItems[1]+"' >";
buffer += "<td><input type='checkbox' id='itemIdx' name='itemIdx' value='itemIdx"+attrItems[1]+"' /></td>";
buffer += "<td><input type='text' class='line' name='item_name[]' id='item_name[]' size='20' maxlength='20' value='"+attrItems[0]+"' onkeypress='return exsoft.util.filter.inputBoxFilter(\""+"[A-Za-z0-9]"+"\");' /></td>";
if(attrItems[1] == default_item_index) {
buffer += "<td class='center'><input type='radio' class='line' name='is_default' id='is_default' value='"+attrItems[1]+"' checked/></td>";
}else {
buffer += "<td class='center'><input type='radio' class='line' name='is_default' id='is_default' value='"+attrItems[1]+"' /></td>";
}
buffer += "<input type='hidden' name='item_index[]' id='item_index[]' value='"+attrItems[1]+"'/>";
buffer += "</tr>";
$("#attrItemList").append(buffer);
exsoftAdminTypeFunc.gAttrItemIdx = attrItems[1];
buffer = "";
}
}else {
exsoftAdminTypeFunc.gAttrItemIdx = 0;
}
exsoft.util.common.checkboxInit('itemCheck');
},
// 문서유형 항목설정 등록||수정 처리
applyAttrItem : function() {
// 기본여부 선택
if( $("input[name='is_default']:checked").length == 0 ) {
jAlert("기본여부를 선택하세요.","확인",0);
return false;
}
// default_item_index
var defaultIdx = $("input[name='is_default']:checked").val();
// has_item
$('input[name="item_name[]"]').each(function() {
if( $(this).val().length == 0){
jAlert("항목명을 입력하세요.","확인",0);
return false;
}
});
var has_item = [];
$('input[name="item_name[]"]').each(function(index) {
var name = $("input[name='item_name[]']").get(index);
var index = $("input[name='item_index[]']").get(index);
has_item.push(name.value+":"+index.value);
});
// 문서유형 속성 GRID 값 변경적용
if(exsoftAdminTypeFunc.gAttrItemType == "regist") {
$("#attrGridList").setRowData(exsoftAdminTypeFunc.gRowItemId,{ 'has_item': has_item });
$("#attrGridList").setRowData(exsoftAdminTypeFunc.gRowItemId,{ 'default_item_index': defaultIdx });
}else {
$("#attrViewList").setRowData(exsoftAdminTypeFunc.gRowItemId,{ 'has_item_list': has_item });
$("#attrViewList").setRowData(exsoftAdminTypeFunc.gRowItemId,{ 'default_item_index': defaultIdx });
}
// 문서유형속성 아이템창 닫기
exsoft.util.layout.divLayerClose('register_docuCateType_wrapper','register_docuCateType');
}
},
callback : {
returnAjaxDataFunction : function(data,param) {
if(param == 'typeInsert'){
if(data.result == "true") {
exsoft.util.grid.gridRefresh('typeGridList',exsoft.contextRoot+exsoftAdminTypeFunc.gridUrl);
exsoft.util.layout.divLayerClose('register_docuType_wrapper','register_docuType');
}else {
jAlert(data.message,"확인",0);
}
}else if(param == 'typeUpdate'){
if(data.result == "true") {
jAlert("수정완료 되었습니다.","확인",0);
exsoft.util.grid.gridRefresh('typeGridList',exsoft.contextRoot+exsoftAdminTypeFunc.gridUrl);
exsoftAdminTypeFunc.ui.attrDetailCall(exsoftAdminTypeFunc.gTypeId);
}else {
jAlert(data.message,"확인",0);
}
}else if(param == 'typeDetail'){
if(data.result == "true"){
// 조회 내용 출력
exsoftAdminTypeFunc.binder.set("attrTitle",data.typeVO.type_name);
exsoftAdminTypeFunc.binder.set("type_id",data.typeVO.type_id);
exsoftAdminTypeFunc.binder.set("type_name",data.typeVO.type_name);
exsoftAdminTypeFunc.binder.set("sort_index",data.typeVO.sort_index);
exsoftAdminTypeFunc.binder.set("create_date",data.typeVO.create_date);
exsoftAdminTypeFunc.binder.set("is_hiddenU",data.typeVO.is_hidden);
// BUTTON 처리 :: 기본문서형인 경우 비활성화 처리한다.
exsoftAdminTypeFunc.init.btnStateChange(data.typeVO.is_system);
// XR_ATTR 속성 리스트
if($('#attrViewList')[0].grid != undefined) {
var postData = {type_id:exsoftAdminTypeFunc.gTypeId} ;
exsoft.util.grid.gridPostDataRefresh('#attrViewList',exsoft.contextRoot+'/admin/attrList.do',postData);
}else {
exsoftAdminTypeFunc.event.fAttrViewList();
}
}else {
jAlert(data.message,'확인',0);
}
}
}
}
}<file_sep>/EDMS3/src/kr/co/exsoft/eframework/util/PagingAjaxUtil.java
package kr.co.exsoft.eframework.util;
/**
*
* Paging 처리 클래스 일반 (AJAX 방식인 경우)
* @author 패키지 개발팀
* @since 2014.07.21
* @version 3.0
*
*/
public class PagingAjaxUtil {
public int nPage; // 현재 페이지
public int nTotLineNum; // 총 라인수
public int nMaxListLine; // 한페이지에 보여지는 최대 라인수
public int nMaxListPage; // 한페이지에 보여지는 페이지수
public String strLink; // 페이지 이동시에 get방식으로 넘기는 데이터값
public int nTotPageSize; // 총 페이지 수
public String strFirstPage; // 첫페이지 이동버튼
public String strLastPage; // 마지막페이지 이동버튼
public String strLinkPageList; // 페이지 리스트 ([1][2]...)
public String strLinkNextPage; // 다음페이지 이동버튼
public String strLinkPrevPage; // 이전페이지 이동버튼
public String strLinkPageNext; // 다음 몇페이지 이동 버튼
public String strLinkPagePrev; // 이전 몇페이지 이동버튼
public int nListNum; // 페이지에 보여지는 글수
public int currentPageSetUp; // 페이징리스트의 첫페이지 번호
public int nFirstArticleNum; // 페이지의 첫글번호
public String strLinkQuery; // 현재페이지 쿼리스트링
public String contextRoot; // ContextRoot Path
public PagingAjaxUtil() { }
/**
* 페이징 초기값 설정
* @param req
* @param nPage
* @param nTotLineNum
* @param nMaxListLine
* @param nMaxListPage
* @param strLink
*/
public PagingAjaxUtil(int nPage, int nTotLineNum,int nMaxListLine, int nMaxListPage, String strLink,String contextRoot) {
this.contextRoot = contextRoot;
this.nPage = nPage == 0 ? 1 : nPage;
this.nTotLineNum = nTotLineNum;
this.nMaxListLine = nMaxListLine;
this.nMaxListPage = nMaxListPage;
this.strLink = strLink;
this.currentPageSetUp = getPageSetUp();
this.nTotPageSize = getTotPageSize();
this.strFirstPage = getFirstPage();
this.strLastPage = getLastPage();
this.strLinkPageList = getLinkPageList();
this.strLinkNextPage = getNextPage();
this.strLinkPrevPage = getPrevPage();
this.strLinkPageNext = getLinkPageNext() + "\n" + this.strLastPage;
this.strLinkPagePrev = this.strFirstPage + "\n" + getLinkPagePrev();
this.nListNum = getListNum();
this.nFirstArticleNum = getFirstArticleNum();
this.strLinkQuery = "nPage=" + nPage + strLink;
}
/**
* 총페이지 수 구하기
* @return int
*/
protected int getTotPageSize() {
if (nTotLineNum == 0)
nTotPageSize = 1;
else if ((nTotLineNum % nMaxListLine) != 0)
nTotPageSize = nTotLineNum / nMaxListLine + 1;
else
nTotPageSize = nTotLineNum / nMaxListLine;
return nTotPageSize;
}
/**
* 페이지이동버튼리스트의 마지막페이지 구하기
* @return int
*/
protected int getPageSetUp() {
int currentPageSetUp = (nPage / nMaxListPage) * nMaxListPage;
if (nPage % nMaxListPage == 0)
currentPageSetUp -= nMaxListPage;
return currentPageSetUp;
}
/**
* 페이지당 보여줄 글수(마지막페이지)
* @return int
*/
protected int getListNum() {
if (nTotPageSize == nPage && (nTotLineNum % nMaxListLine) != 0)
nListNum = nTotLineNum % nMaxListLine;
else
nListNum = nMaxListLine;
return nListNum;
}
/**
* 이전 몇개페이지 이동버튼의 post로 넘겨지는 값
* @return String
*/
protected String getLinkPagePrev() {
if (nPage > nMaxListPage)
return new StringBuffer("<li class='prev'><a href=\"")
.append(strLink)
.append("(")
.append(currentPageSetUp - nMaxListPage + 1)
.append(");\"")
.append("><img src='"+contextRoot+"/img/icon/pg_prev2.png' border='0'></a></li>").toString(); // 이미지교체
else
return "<li class='prev'><a href='javascript:void(0);'><img src='"+contextRoot+"/img/icon/pg_prev.png' border='0'></a></li>";
}
/**
* 첫페이지로 이동시 post로 넘겨지는 값
* @return String
*/
protected String getFirstPage() {
if (nPage > 1)
return new StringBuffer("<li class='first'><a href=\"")
.append(strLink)
.append("(1);\"><img src='"+contextRoot+"/img/icon/pg_first2.png' border='0'></a></li>").toString();
else
return "<li class='first'><a href='javascript:void(0);'><img src='"+contextRoot+"/img/icon/pg_first.png' border='0'></a></li>";
}
/**
* 이전페이지 이동시 post로 넘겨지는 값
* @return String
*/
protected String getPrevPage() {
int nPrevPage = nPage - 1;
if (nPage > 1)
return new StringBuffer("<li class='prev'><a href=")
.append("\"" + strLink + "(" + nPrevPage + ");\"")
.append("><img src='"+contextRoot+"/img/icon/pg_prev2.png' border='0'></a></li>").toString();
else
return "<li class='prev'><a href='javascript:void(0);'><img src='"+contextRoot+"/img/icon/pg_prev.png' border='0'></a></li>";
}
/**
* 페이지 리스트 보여주기 (링크될 페이지와 값을 같이 리턴)
* @return String
*/
protected String getLinkPageList() {
StringBuffer buf = new StringBuffer("");
for (int i = (currentPageSetUp + 1); i <= (nTotPageSize)
&& i <= (currentPageSetUp + nMaxListPage); i++) {
if (i != nPage)
buf.append("<li><a href =\"").append(strLink).append("(").append(i).append("); \">").append(i).append(" </a></li>");
else
buf.append("<li class='curr'><a href='javascript:void(0);'>").append(i).append("</a></li>"); // 현재 Page
}
return buf.toString();
}
/**
* 다음페이지 이동시 post로 넘겨지는 값
* @return String
*/
protected String getNextPage() {
int nNextPage = nPage + 1;
if (nPage < nTotPageSize)
return new StringBuffer("<li class='next'><a href=\"")
.append(strLink)
.append("(")
.append(nNextPage)
.append(");\"><img src='"+contextRoot+"/img/icon/pg_next2.png' border='0'></a></li>").toString();
else
return "<li class='next'><a href='javascript:void(0);'><img src='"+contextRoot+"/img/icon/pg_next.png' border='0'></a></li>";
}
/**
* 다음 몇개페이지 이동버튼의 post로 넘겨지는 값
* @return String
*/
protected String getLinkPageNext() {
if ((nTotPageSize - currentPageSetUp) > nMaxListPage)
return new StringBuffer("<li class='next'><a href=\"")
.append(strLink)
.append("(")
.append(currentPageSetUp + nMaxListPage + 1)
.append(");\"><img src='"+contextRoot+"/img/icon/pg_next2.png' border='0'></a></li>").toString();
else
return "<li class='next'><a href='javascript:void(0);'><img src='"+contextRoot+"/img/icon/pg_next.png' border='0'></a></li>";
}
/**
* 마지막페이지 이동버튼의 post로 넘겨지는 값
* @return String
*/
protected String getLastPage() {
if (nPage < nTotPageSize)
return new StringBuffer("<li class='last'><a href=\"")
.append(strLink)
.append("(")
.append(nTotPageSize)
.append(");\"><img src='"+contextRoot+"/img/icon/pg_last2.png' border='0'></a></li>").toString();
else
return "<li class='last'><a href='javascript:void(0);'><img src='"+contextRoot+"/img/icon/pg_last.png' alt='' title=''></a></li>";
}
/**
* 페이지의 첫글번호 구하기
* @return int
*/
protected int getFirstArticleNum() {
int nFirstArticleNum = (nPage - 1) * nMaxListLine + 1;
if (nFirstArticleNum <= 0)
nFirstArticleNum = 1;
return nFirstArticleNum;
}
/**
* 페이지이동시 post값으로 넘겨지는 값을 URL인코딩
* @param str
* @return String
*/
protected String getUrlLink(String str) {
try {
str = java.net.URLEncoder.encode(str, "ISO-8859-1");
} catch (java.io.UnsupportedEncodingException e) {
}
return str;
}
}
<file_sep>/EDMS3/src/kr/co/exsoft/eframework/util/ZipUtils.java
package kr.co.exsoft.eframework.util;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.util.zip.ZipInputStream;
import org.apache.tools.zip.ZipOutputStream;
import org.apache.tools.zip.ZipEntry;
/**
* ZipUtils
*
* @author 패키지팀
* @since 2014. 10. 28.
* @version 1.0
*
*/
public class ZipUtils {
private static final byte[] buf = new byte[1024];
/**
* Comment : 생성될 ZIP파일의 경로에 디렉토리가 없을경우 에러 발생
*/
public static void createZipFile(String targetPath, String zipPath)throws Exception{
createZipFile(targetPath, zipPath, false);
}
/**
* Comment : zip 파일을 생성.
*/
public static void createZipFile(String targetPath, String zipPath, boolean isDirCre)throws Exception{
File fTargetPath = new File(targetPath);
File[] files = null;
if(fTargetPath.isDirectory()){
files = fTargetPath.listFiles();
}else{
files = new File[1];
files[0] = fTargetPath;
}
File path = new File(zipPath);
File dir = null;
dir = new File(path.getParent());
if(isDirCre){
dir.mkdirs();
}
ZipOutputStream zipOut = new ZipOutputStream(new FileOutputStream(path));
// zip 파일 압축
makeZipFile(files, zipOut, "");
// stream을 닫음으로서 zip 파일 생성
zipOut.close();
}
/**
* Comment : # 일부 파일들을 배열로 설정하여 zip 파일 생성
* ex) String[] arrZip = new String[]{"C:\\aaa.txt", "C:\\bbb.txt", "C:\\ccc.txt"}
* ZipUtils.createZipFile(arrZip, "C:\\test.zip");
*/
public static void createZipFile(String[] targetFiles, String zipPath)throws Exception{
createZipFile(targetFiles, zipPath, false);
}
/**
* Comment : # 일부 파일들을 배열로 설정하여 zip 파일 생성 (디렉토리 생성여부 선택)
*/
public static void createZipFile(String[] targetFiles, String zipPath, boolean isDirCre)throws Exception{
File[] files = new File[targetFiles.length];
for(int i = 0; i < files.length; i++){
files[i] = new File(targetFiles[i]);
}
File path = new File(zipPath);
File dir = null;
dir = new File(path.getParent());
if(isDirCre){
// 디렉토리가 없을경우 생성
dir.mkdirs();
}
// zip 파일의 outputStream
ZipOutputStream zipOut = new ZipOutputStream(new FileOutputStream(path));
// zip 파일 압축
makeZipFile(files, zipOut, "");
// stream을 닫음으로서 zip 파일 생성
zipOut.close();
}
// byte 배열을 받아서 압축된 byte배열을 리턴
public static byte[] compressToZip(byte[] src)throws Exception{
byte[] retSrc = null;
ByteArrayOutputStream baos = null;
try{
// zip 파일의 output Stream
ByteArrayInputStream bais = new ByteArrayInputStream(src);
baos = new ByteArrayOutputStream();
ZipOutputStream zos = new ZipOutputStream(baos) ;
zos.putNextEntry(new ZipEntry("temp.tmp"));
int bytes_read = 0;
// 전달받은 src를 압축하여 파일에 씀
while((bytes_read = bais.read(buf)) != -1){
zos.write(buf, 0, bytes_read);
}
bais.close();
zos.close();
// 스트림을 닫은후 byte배열을 얻어옴
retSrc = baos.toByteArray();
}catch(Exception e){
throw new Exception(e);
}finally{
baos.close();
}
return retSrc;
}
// 압축된 byte 배열을 받아서 zipPath위치에 zip 파일을 생성한다.
private static void makeZipFile(byte[] src, String zipPath)throws Exception{
FileOutputStream fos = null;
ByteArrayInputStream bais = null;
try{
fos = new FileOutputStream(zipPath);
bais = new ByteArrayInputStream(src);
int bytes_read = 0;
while((bytes_read = bais.read(buf)) != -1){
fos.write(buf, 0, bytes_read);
}
}catch(Exception e){
throw new Exception(e);
}finally{
fos.close();
bais.close();
}
}
// 압축된 byte 배열의 압축을 해제하여 byte배열로 리턴
public static byte[] unZip(byte[] src)throws Exception{
byte[] retSrc = null;
ByteArrayOutputStream baos = null;
ZipInputStream zis = null;
int bytes_read = 0;
try{
zis = new ZipInputStream(new ByteArrayInputStream(src));
baos = new ByteArrayOutputStream();
zis.getNextEntry(); // entry는 하나밖에 없음을 보장
while((bytes_read = zis.read(buf)) != -1){
baos.write(buf, 0, bytes_read);
}
retSrc = baos.toByteArray();
}catch(Exception e){
throw new Exception(e);
}finally{
baos.close();
zis.close();
}
return retSrc;
}
// 문자열을 압축하여 byte배열로 리턴(UTF-8)
public static byte[] compressToZip(String src)throws Exception{
return compressToZip(src.getBytes("UTF-8"));
}
// byte배열을 압축하여 zip 파일로 생성
public static void srcToZipFile(byte[] src, String zipPath)throws Exception{
byte[] retSrc = null;
// 압축
retSrc = compressToZip(src);
// 파일로 만듬
makeZipFile(retSrc, zipPath);
}
// byte 배열을 압축하여 zip 파일로 생성
public static void srcToZipFile(String src, String zipPath)throws Exception{
byte[] retSrc = null;
// 압축
retSrc = compressToZip(src.getBytes("UTF-8"));
// 파일로 만듬
makeZipFile(retSrc, zipPath);
}
// 압축된 zip파일을 해제후 byte배열로 리턴
public static byte[] zipFileToSrc(String zipPath)throws Exception{
byte[] retSrc = null;
return retSrc;
}
public static void makeZipFile(File[] files, ZipOutputStream zipOut, String targetDir) throws Exception{
for(int i = 0; i < files.length; i++){
File compPath = new File(files[i].getPath());
if(compPath.isDirectory()){
File[] subFiles = compPath.listFiles();
makeZipFile(subFiles, zipOut, targetDir+compPath.getName()+"/");
continue;
}
FileInputStream in = new FileInputStream(compPath);
//zipOut.putNextEntry(new ZipEntry(targetDir+"/"+files[i].getName()));
// apache-ant-zip-1.8.0.jar 변경처리
zipOut.putNextEntry(new ZipEntry(compPath.getName()));
zipOut.setEncoding("euc-kr");
int data;
while((data = in.read(buf)) > 0){
zipOut.write(buf, 0, data);
}
zipOut.closeEntry();
in.close();
}
}
}
<file_sep>/EDMS3/src/kr/co/exsoft/note/service/NoteServiceImpl.java
package kr.co.exsoft.note.service;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import kr.co.exsoft.common.service.CommonService;
import kr.co.exsoft.common.vo.SessionVO;
import kr.co.exsoft.eframework.configuration.Constant;
import kr.co.exsoft.eframework.library.ExsoftAbstractServiceImpl;
import kr.co.exsoft.eframework.util.CommonUtil;
import kr.co.exsoft.eframework.util.PagingAjaxUtil;
import kr.co.exsoft.eframework.util.StringUtil;
import kr.co.exsoft.note.dao.NoteDao;
import kr.co.exsoft.note.vo.NoteManageVO;
import kr.co.exsoft.note.vo.NoteVO;
import kr.co.exsoft.user.dao.GroupDao;
import kr.co.exsoft.user.vo.GroupVO;
import kr.co.exsoft.user.vo.GroupedVO;
import net.sf.json.JSONArray;
import org.apache.ibatis.session.SqlSession;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Service;
/**
* Note 서비스 구현 부분
* @author 패키지 개발팀
* @since 2015.03.02
* @version 3.0
*
*/
@Service("noteService")
public class NoteServiceImpl extends ExsoftAbstractServiceImpl implements NoteService {
@Autowired
@Qualifier("sqlSession")
private SqlSession sqlSession;
@Autowired
private CommonService commonService;
/**
*
* <pre>
* 1. 개용 :
* 2. 처리내용 :
* </pre>
* @Method Name : noteListForInserting
* @param map
* @return List<NoteVO>
*/
public Map<String, Object> noteListForInserting(List<HashMap<String, Object>> noteList, HashMap<String, Object> map, SessionVO sessionVO) throws Exception {
// TODO Auto-generated method stub
int result=0;
HashMap<String,Object> resultMap = new HashMap<String, Object>();
NoteDao noteDao = sqlSession.getMapper(NoteDao.class);
//GroupDao groupDao = sqlSession.getMapper(GroupDao.class);
//
int noteid_seq = commonService.commonNextVal(Constant.COUNTER_ID_NOTE);
String newNoteID = CommonUtil.getStringID(Constant.ID_PREFIX_NOTE, noteid_seq);
/*
System.out.println("=======================================");
System.out.println("newNoteID = " + newNoteID);
System.out.println("=======================================");*/
String newRootID = newNoteID;
String gid = "";
//TEST ========================================================================
String ntlist ="";// "accessor_isgroup=accessor_id:"", accessor_isgroup:""";
//resultList = getNoteVOFromReciverList(noteList,groupDao);
/*
String gid = (String) resultList.get("from_group_id");
String pid = (String) resultList.get("from_pgroup_id");
String uid = (String) resultList.get("from_user_id");*/
//XR_NOTE INSERT
HashMap<String,Object> param = new HashMap<String, Object>();
param.put("note_id", newNoteID);
param.put("root_id", newRootID);
param.put("creator_id", sessionVO.getSessId());
param.put("content", map.get("content"));
param.put("note_from", map.get("note_from"));
param.put("note_from_userid", "");
param.put("note_from_groupid", "");
param.put("note_from_pgroupid", "");
//XR_NOTE INSERT
result = noteDao.noteWrite(param);
if(result == 0) { throw processException("common.system.error"); }
//XR_NOTEMANAGE INSERT
// 수신자 Insert
List<NoteManageVO> manageVOList = new ArrayList<NoteManageVO>();
NoteManageVO manageVO = new NoteManageVO();
// 발신자 Insert
//============================================
int notemid_seq = commonService.commonNextVal(Constant.COUNTER_ID_NOTEMANAGE);
String newManageID = CommonUtil.getStringID(Constant.ID_PREFIX_NOTEMANAGE, notemid_seq);
/*
System.out.println("=======================================");
System.out.println("newManageID = " + newManageID);
System.out.println("=======================================");*/
manageVO.setManage_id(newManageID);
manageVO.setNote_id(newNoteID);
manageVO.setRoot_id(newRootID);
manageVO.setNote_target_id(sessionVO.getSessId());
manageVO.setNote_type("S");
manageVO.setNote_save("N");
manageVO.setNote_read("N");
result = noteDao.noteManageWrite(manageVO);
if(result == 0) { throw processException("common.system.error"); }
//=============================================
for(HashMap<String,Object> nMap : noteList) {
// 수신자 리스트 가져오기
//accessor_isgroup
String accessor_id = nMap.get("accessor_id").toString();
String accessor_isgroup = nMap.get("accessor_isgroup").toString();
manageVO = new NoteManageVO();
notemid_seq = commonService.commonNextVal(Constant.COUNTER_ID_NOTEMANAGE);
newManageID = CommonUtil.getStringID(Constant.ID_PREFIX_NOTEMANAGE, notemid_seq);
manageVO.setManage_id(newManageID);
manageVO.setNote_id(newNoteID);
manageVO.setRoot_id(newRootID);
manageVO.setNote_target_id(accessor_id);
manageVO.setNote_type("R");
manageVO.setNote_save("N");
manageVO.setNote_read("N");
result = noteDao.noteManageWrite(manageVO);
if(result == 0) { throw processException("common.system.error"); }
//Rgate 연계를 위해
//XR_NOTE_QUEUE INSERT
result = noteDao.noteQueueWrite(manageVO);
if(result == 0) { throw processException("common.system.error"); }
}
resultMap.put("result",Constant.RESULT_TRUE);
return resultMap;
}
/**
*
* <pre>
* 1. 개용 :
* 2. 처리내용 :
* </pre>
* @Method Name : noteListForInserting
* @param map
* @return List<NoteVO>
*/
public Map<String, Object> noteListForReInserting(HashMap<String, Object> map, SessionVO sessionVO) throws Exception {
// TODO Auto-generated method stub
int result=0;
HashMap<String,Object> resultMap = new HashMap<String, Object>();
NoteDao noteDao = sqlSession.getMapper(NoteDao.class);
//GroupDao groupDao = sqlSession.getMapper(GroupDao.class);
//
int noteid_seq = commonService.commonNextVal(Constant.COUNTER_ID_NOTE);
String newNoteID = CommonUtil.getStringID(Constant.ID_PREFIX_NOTE, noteid_seq);
//XR_NOTE INSERT
HashMap<String,Object> param = new HashMap<String, Object>();
param.put("note_id", newNoteID);
param.put("root_id", map.get("root_id"));
param.put("creator_id", sessionVO.getSessId());
param.put("content", map.get("content"));
param.put("note_from", map.get("note_from"));
param.put("note_from_userid",map.get("note_from_userid"));
param.put("note_from_groupid", "");
param.put("note_from_pgroupid", "");
//XR_NOTE INSERT
result = noteDao.noteWrite(param);
if(result == 0) { throw processException("common.system.error"); }
//XR_NOTEMANAGE INSERT
// 수신자 Insert
List<NoteManageVO> manageVOList = new ArrayList<NoteManageVO>();
NoteManageVO manageVO = new NoteManageVO();
// 발신자 Insert
//============================================
int notemid_seq = commonService.commonNextVal(Constant.COUNTER_ID_NOTEMANAGE);
String newManageID = CommonUtil.getStringID(Constant.ID_PREFIX_NOTEMANAGE, notemid_seq);
/*
System.out.println("=======================================");
System.out.println("newManageID = " + newManageID);
System.out.println("=======================================");*/
manageVO.setManage_id(newManageID);
manageVO.setNote_id(newNoteID);
manageVO.setRoot_id(map.get("root_id").toString());
manageVO.setNote_target_id(sessionVO.getSessId());
manageVO.setNote_type("S");
manageVO.setNote_save("N");
manageVO.setNote_read("N");
result = noteDao.noteManageWrite(manageVO);
if(result == 0) { throw processException("common.system.error"); }
//=============================================
manageVO = new NoteManageVO();
notemid_seq = commonService.commonNextVal(Constant.COUNTER_ID_NOTEMANAGE);
newManageID = CommonUtil.getStringID(Constant.ID_PREFIX_NOTEMANAGE, notemid_seq);
manageVO.setManage_id(newManageID);
manageVO.setNote_id(newNoteID);
manageVO.setRoot_id(map.get("root_id").toString());
manageVO.setNote_target_id(map.get("note_from_userid").toString());
manageVO.setNote_type("R");
manageVO.setNote_save("N");
manageVO.setNote_read("N");
result = noteDao.noteManageWrite(manageVO);
if(result == 0) { throw processException("common.system.error"); }
//Rgate 연계를 위해
//XR_NOTE_QUEUE INSERT
result = noteDao.noteQueueWrite(manageVO);
if(result == 0) { throw processException("common.system.error"); }
resultMap.put("result",Constant.RESULT_TRUE);
return resultMap;
}
/**
*
* <pre>
* 1. 개용 : AclItemListVO를 이용하여 NoteVO를 만듬
* </pre>
* @Method Name : aclWriteValid
* @param map
* @return
* @throws Exception List<AclItemVO>
*/
public static HashMap<String,Object> getNoteVOFromReciverList(String reciverListArrayList,GroupDao groupDao) throws Exception{
/*reciverListArrayList = "[{\"accessor_id\" : \"user011\", \"accessor_isgroup\":\"F\"},"
+ "{\"accessor_id\" : \"GRP000000000004\", \"accessor_isgroup\":\"GT\"},"
+ "{\"accessor_id\":\"GRP000000000005\", \"accessor_isgroup\":\"GT\"}]";
*/
if(StringUtil.isEmpty(reciverListArrayList)){
throw new Exception("common.required.error");
}
// JsonArray 객체 생성하기
JSONArray jsonArray = JSONArray.fromObject(reciverListArrayList);
String from_user_id="";
String from_group_id="";
String from_pgroup_id="";
HashMap<String,Object> reciver = new HashMap<String,Object>();
List<GroupedVO> groupUserList = new ArrayList<GroupedVO>();
GroupedVO userid= new GroupedVO();
List list = new ArrayList();
if(jsonArray.size() > 0 ) {
for(int j=0; j<jsonArray.size(); j++){
if(jsonArray.getJSONObject(j).getString("accessor_isgroup").equals("GT")){
reciver = new HashMap<String,Object>();
//그룹아이디로 user_id 추출
groupUserList = groupDao.groupedList(jsonArray.getJSONObject(j).getString("accessor_id"));
reciver.put("USERID", groupUserList);
list.add(reciver);
from_group_id += jsonArray.getJSONObject(j).getString("accessor_id")+"#";
}else if(jsonArray.getJSONObject(j).getString("accessor_isgroup").equals("PT")){
reciver = new HashMap<String,Object>();
//프로젝트아이디로 user_id 추출
groupUserList = groupDao.groupedList(jsonArray.getJSONObject(j).getString("accessor_id"));
reciver.put("USERID", groupUserList);
list.add(reciver);
from_pgroup_id += jsonArray.getJSONObject(j).getString("accessor_id")+"#";
}else{
reciver = new HashMap<String,Object>();
from_user_id += jsonArray.getJSONObject(j).getString("accessor_id")+"#";
userid.setUser_id(jsonArray.getJSONObject(j).getString("accessor_id"));
groupUserList.add(userid);
reciver.put("USERID", groupUserList);
list.add(reciver);
}
}
}/*
System.out.println("==================================");
System.out.println("LIST_SIZE = " +list.size());
System.out.println("from_group_id = " +from_group_id);
System.out.println("from_pgroup_id = " +from_pgroup_id);
System.out.println("from_user_id = " +from_user_id);
System.out.println("==================================");*/
groupUserList = new ArrayList<GroupedVO>();
HashMap<String,Object> reciver01 = new HashMap<String,Object>();
List list2 = new ArrayList();
for(int i=0;i<list.size(); i++) {
reciver01 = (HashMap<String, Object>) list.get(i);
list2.add((List<GroupedVO>) reciver01.get("USERID"));
}
HashMap<String,Object> reciver02 = new HashMap<String,Object>();
reciver02.put("note_target_id", list2);
reciver02.put("from_user_id", from_user_id);
reciver02.put("from_group_id", from_group_id);
reciver02.put("from_pgroup_id", from_pgroup_id);
return reciver02;
}
/**
*
* <pre>
* 1. 개용 :
* 2. 처리내용 :
* </pre>
* @Method Name : noteNewTopNInfoList
* @param map
* @return List<NoteVO>
*/
public Map<String, Object> noteNewTopNInfoList(HashMap<String, Object> map) {
HashMap<String,Object> resultMap = new HashMap<String, Object>();
List<NoteVO> noteList = new ArrayList<NoteVO>();
NoteDao noteDao = sqlSession.getMapper(NoteDao.class);
// 1.user_id로 읽지 않은 쪽지 목록 리스트 가져오기
noteList = noteDao.noteNewTopNInfoList(map);
resultMap.put("list",noteList);
resultMap.put("count",noteList.size()); // TOP & MENU 쪽지 개수 표시
//TEST ========================================================================
/*GroupDao groupDao = sqlSession.getMapper(GroupDao.class);
HashMap<String,Object> resultList = new HashMap<String, Object>();
String ntlist ="";// "accessor_isgroup=accessor_id:"", accessor_isgroup:""";
try{
resultList = getNoteVOFromReciverList(ntlist,groupDao);
String gid = (String) resultList.get("from_group_id");
String pid = (String) resultList.get("from_pgroup_id");
String uid = (String) resultList.get("from_user_id");
List<GroupedVO> groupUserList = new ArrayList<GroupedVO>();
List list = (List) resultList.get("note_target_id");
for(int i=0;i<list.size(); i++) {
groupUserList = (List<GroupedVO>) list.get(i);
for(int j=0;j<groupUserList.size(); j++) {
System.out.println("==================================");
System.out.println("GROUP_ID = " +groupUserList.get(j).getGroup_id());
System.out.println("USER_ID = " +groupUserList.get(j).getUser_id());
System.out.println("==================================");
}
}
}catch (Exception e) {
e.printStackTrace();
}*/
//========================================================================
/*
System.out.println("==========================" );
System.out.println("NOTE RESULT SIZE : " + noteList.size());
//System.out.println("sender_name = " +noteList.get(0).getSender_name());
//System.out.println("newnote_cnt = " +noteList.get(0).getNewnote_cnt());
System.out.println("==========================" );*/
return resultMap;
}
/**
*
* <pre>
* 1. 개용 :
* 2. 처리내용 :
* </pre>
* @Method Name : noteAllReceiveSendInfoList
* @param map
* @return List<NoteVO>
*/
public Map<String, Object> noteAllReceiveSendInfoList(HashMap<String, Object> map) {
HashMap<String,Object> resultMap = new HashMap<String, Object>();
List<NoteVO> noteList = new ArrayList<NoteVO>();
NoteDao noteDao = sqlSession.getMapper(NoteDao.class);
// 1.user_id로 모든 쪽지 목록 리스트를 최신순으로 가져오기
noteList = noteDao.noteAllReceiveSendInfoList(map);
resultMap.put("list",noteList);
resultMap.put("result",Constant.RESULT_TRUE);
return resultMap;
}
/**
*
* <pre>
* 1. 개용 :
* 2. 처리내용 : 보낸쪽지 받은 쪽지의 보관 상태 update
* </pre>
* @Method Name : noteSaveUpdate
* @param map
* @return List<NoteVO>
*/
public int noteSaveUpdate(HashMap<String, Object> map, SessionVO sessionVO) throws Exception {
NoteDao noteDao = sqlSession.getMapper(NoteDao.class);
int result = 0;
//HashMap<String, Object> resultMap = new HashMap<String, Object>();
String manageId = StringUtil.getMapString(map, "manage_id");
/*
System.out.println("+++++++++++++++++++++++++++++++++++++++++++++");
System.out.println("mangeID = "+ manageId);
System.out.println("+++++++++++++++++++++++++++++++++++++++++++++");
*/
// 보관한 저장 상태값 update
HashMap<String,Object> param = new HashMap<String, Object>();
param.put("user_id", sessionVO.getSessId());
param.put("manage_id", manageId);
result = noteDao.noteSaveUpdate(param);
if(result == 0) { throw processException("common.system.error"); }
return result;
}
/**
*
* <pre>
* 1. 개용 :
* 2. 처리내용 : 받은 쪽지의 읽음 상태 update
* </pre>
* @Method Name : noteReadUpdate
* @param map
* @return List<NoteVO>
*/
public int noteReadUpdate(HashMap<String, Object> map, SessionVO sessionVO) throws Exception {
NoteDao noteDao = sqlSession.getMapper(NoteDao.class);
int result = 0;
//HashMap<String, Object> resultMap = new HashMap<String, Object>();
String manageId = StringUtil.getMapString(map, "manage_id");
// 보관한 저장 상태값 update
HashMap<String,Object> param = new HashMap<String, Object>();
param.put("user_id", sessionVO.getSessId());
param.put("manage_id", manageId);
result = noteDao.noteReadUpdate(param);
if(result == 0) { throw processException("common.system.error"); }
return result;
}
/**
*
* <pre>
* 1. 개용 :
* 2. 처리내용 : 보낸쪽지 받은 쪽지의 보관 상태 update
* </pre>
* @Method Name : noteSaveUpdate
* @param map
* @return int
*/
public int noteDelete(HashMap<String, Object> map, SessionVO sessionVO) throws Exception {
NoteDao noteDao = sqlSession.getMapper(NoteDao.class);
//HashMap<String, Object> resultMap = new HashMap<String, Object>();
int result = 0;
String noteId = StringUtil.getMapString(map, "note_id");
String manageId = StringUtil.getMapString(map, "manage_id");
String rootId = StringUtil.getMapString(map, "root_id");
/*
System.out.println("+++++++++++++++++++++++++++++++++++++++++++++");
System.out.println("noteId = "+ noteId);
System.out.println("manageId = "+ manageId);
System.out.println("rootId = "+ rootId);
System.out.println("+++++++++++++++++++++++++++++++++++++++++++++");*/
// parameter setting
HashMap<String,Object> param = new HashMap<String, Object>();
param.put("user_id", sessionVO.getSessId());
param.put("manage_id", manageId);
param.put("root_id", rootId);
param.put("note_id", noteId);
param.put("del_kbn", map.get("del_kbn"));
//noteManage 테이블의 보관데이터가 아닌 쪽지 삭제
result = noteDao.noteManageDelete(param);
//root_id로 보관데이터의 갯수를 가져옴
int selectresult = noteDao.noteRootCount(param);
//보관 데이터가 없으면 XR_NOTE의 데이터 삭제
if(result>0 && selectresult==0){
result = noteDao.noteDelete(param);
}
if(result == 0) { throw processException("common.system.error"); }
return result;
}
/**
*
* <pre>
* 1. 개용 :
* 2. 처리내용 : 대화함 리스트 가져오기
* </pre>
* @Method Name : noteTalkList
* @param map
* @return List<NoteVO>
*/
public Map<String, Object> noteTalkList(HashMap<String, Object> map) {
HashMap<String,Object> resultMap = new HashMap<String, Object>();
List<NoteVO> noteList = new ArrayList<NoteVO>();
NoteDao noteDao = sqlSession.getMapper(NoteDao.class);
NoteVO nvo= new NoteVO();
// 1.user_id로 대화함 목록 리스트 가져오기
noteList = noteDao.noteTalkList(map);
for(int i=0; i<noteList.size(); i++){
nvo= new NoteVO();
nvo = noteList.get(i);
String note_id= noteList.get(i).getNote_id();//.substring(3);
String root_id= noteList.get(i).getRoot_id();//.substring(3);
/*
System.out.println("==================================================================");
System.out.println("root_id = "+ root_id+":note_id =" + note_id);
System.out.println("==================================================================");*/
//if(note_id.equals(root_id) && noteList.get(i).getCreator_id().equals(noteList.get(i).getNote_target_id())){
if(note_id.equals(root_id)){
nvo.setNote_kbn(0);
}else{
nvo.setNote_kbn(1);
}/*
System.out.println("==================================================================");
System.out.println(noteList.get(i).getCreator_id()+":" + noteList.get(i).getNote_kbn());
System.out.println("==================================================================");*/
noteList.set(i, nvo);
}
resultMap.put("list",noteList);
return resultMap;
}
/**
*
* <pre>
* 1. 개용 :
* 2. 처리내용 : 대화함 리스트 가져오기
* </pre>
* @Method Name : noteTalkList
* @param map
* @return List<NoteVO>
*/
public Map<String, Object> noteTalkDetailList(HashMap<String, Object> map) {
HashMap<String,Object> resultMap = new HashMap<String, Object>();
List<NoteVO> noteList = new ArrayList<NoteVO>();
NoteDao noteDao = sqlSession.getMapper(NoteDao.class);
// 1.user_id, root_id 로 대화함 목록 상세 리스트 가져오기
noteList = noteDao.noteTalkDetailList(map);
resultMap.put("list",noteList);
return resultMap;
}
public List<HashMap<String, Object>> noteValidList(HashMap<String,Object> map) throws Exception {
List<HashMap<String, Object>> ret = new ArrayList<HashMap<String, Object>>();
NoteDao noteDao = sqlSession.getMapper(NoteDao.class);
/***********************************************************************************************************
* 주) 쪽지 받는사람 처리 파라미터정의 JsonArry 형식이며 아래의 필수값 포함해야됨.
* delDocList=[
* {"accessor_isgroup":"T","accessor_id":"GRP0000000001"},
* {"accessor_isgroup":"F","accessor_id":"user021"},
* {"accessor_isgroup":"F","accessor_id":"user034"}]
***********************************************************************************************************/
String reciverList = map.get("reciveList") != null ? map.get("reciveList").toString() : "";
// 1.입력값 유효성 체크
if(reciverList.equals("") || reciverList.equals("")) {
throw processException("common.required.error");
}
// 2.JsonArray To List
JSONArray jsonArray = JSONArray.fromObject(reciverList);
if(jsonArray.size() > 0 ) {
for(int j=0;j < jsonArray.size();j++) {
HashMap<String, Object> docInfo = new HashMap<String, Object>();
docInfo.put("accessor_isgroup",jsonArray.getJSONObject(j).getString("accessor_isgroup").toString());
docInfo.put("accessor_id",jsonArray.getJSONObject(j).getString("accessor_id").toString());
// docInfo.put("accessor_name",jsonArray.getJSONObject(j).getString("accessor_name").toString());
/*
System.out.println("================================================");
System.out.println(jsonArray.getJSONObject(j).getString("accessor_id").toString());
System.out.println("================================================");*/
ret.add(docInfo);
}
}
return ret;
}
}
<file_sep>/EDMS3/src/kr/co/exsoft/quartz/controller/AdminTrashDeleteQuartz.java
package kr.co.exsoft.quartz.controller;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import kr.co.exsoft.common.service.CommonService;
import kr.co.exsoft.common.vo.SessionVO;
import kr.co.exsoft.document.service.DocumentService;
import kr.co.exsoft.eframework.configuration.Constant;
import kr.co.exsoft.eframework.exception.BizException;
import kr.co.exsoft.quartz.service.QuartzService;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.quartz.JobExecutionContext;
/**
* 휴지통 - 시스템 휴지통 자동 삭제처리
*
* @author 패키지팀
* @since 2014. 9. 29.
* @version 1.0
*
*/
public class AdminTrashDeleteQuartz extends QuartzJob {
protected static final Log logger = LogFactory.getLog(AdminTrashDeleteQuartz.class);
protected SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss");
protected HashMap<String,Object> strash = new HashMap<String,Object>();
protected HashMap<String,Object> ptrash = new HashMap<String,Object>();
protected int delimiter = 10;
@Override
protected void executeJob(JobExecutionContext context) {
CommonService commonService = (CommonService)super.getBean("commonService");
DocumentService documentService = (DocumentService)super.getBean("documentService");
QuartzService quartzService = (QuartzService)super.getBean("quartzService");
List<HashMap<String,Object>> trashConfig = new ArrayList<HashMap<String,Object>>();
HashMap<String,Object> historyUser = new HashMap<String,Object>();
HashMap<String,Object> param = new HashMap<String,Object>();
long sTime = System.currentTimeMillis();
try {
logger.info("TerminateTrashDeleteQuartz START ="+df.format(sTime));
// 1 XR_DOCUMENT_HT 사용자 정보 :: 시스템관리자
historyUser = quartzService.systemUserInfo(param);
// 2. 휴지통관리 정책 정보 가져오기
param.put("stype",Constant.SYS_TYPE_TRASH);
trashConfig = commonService.trashConfig(param);
this.setTrashConfig(trashConfig);
// 3.관리자 휴지통 자동삭제처리 :: 폐기처리
this.strashDelete(strash,historyUser,commonService,documentService,quartzService);
}catch(Exception e) {
logger.error(e.getMessage());
}finally{
}
long eTime = System.currentTimeMillis();
logger.info("TerminateTrashDeleteQuartz END ="+df.format(eTime));
}
/**
*
* <pre>
* 1. 개용 : 휴지통(개인/시스템) 정책관리 설정 ..
* 2. 처리내용 :
* </pre>
* @Method Name : setTrashConfig
* @param trashConfig
* @param ptrash
* @param strash void
*/
public void setTrashConfig(List<HashMap<String,Object>> trashConfig) {
if(trashConfig != null && trashConfig.size() > 0) {
for(HashMap<String,Object> map : trashConfig) {
if(map.get("doc_type") != null && map.get("doc_type").toString().equals(Constant.PRIVATE_TRASH)) {
// 1.1 개인휴지통 정책
this.ptrash.put("is_use",map.get("is_use"));
this.ptrash.put("decade",map.get("decade"));
}else {
// 1.2 관리자 휴지통 정책
this.strash.put("is_use",map.get("is_use"));
this.strash.put("decade",map.get("decade"));
}
}
}
}
/**
*
* <pre>
* 1. 개용 : 시스템 휴지통 완전삭제처리
* 2. 처리내용 :
* </pre>
* @Method Name : strashDelete
* @param strash
* @param historyUser
* @param commonService
* @param documentService
* @param quartzService void
*/
public void strashDelete(HashMap<String,Object> strash,HashMap<String,Object> historyUser,CommonService commonService,
DocumentService documentService,QuartzService quartzService) {
/********************************************************************
* 시스템휴지통 처리 :: XR_DOCUMENT_DEL WASTE_DATE
* 환경설정 :: XR_DOC_CONFIG DOC_TYPE=STRASH
********************************************************************/
List<HashMap<String,Object>> strashList = new ArrayList<HashMap<String,Object>>();
List<HashMap<String,Object>> strashDelList = new ArrayList<HashMap<String,Object>>();
HashMap<String,Object> resultMap = new HashMap<String,Object>();
HashMap<String,Object> param = new HashMap<String,Object>();
SessionVO sessionVO = new SessionVO(); // 빈 세션객체 생성(XR_DOCUMENT_HT 기록을 위해)
long work_idx = 0;
long successCnt = 0;
long failCnt = 0;
long sTime = System.currentTimeMillis();
try {
logger.info("시스템휴지통 처리시작 ="+df.format(sTime));
// 1. 시스템 휴지통 로그 등록처리
work_idx = commonService.commonNextVal(Constant.COUNTER_ID_BATCH_WORK);
quartzService.batchWorkWrite(work_idx,Constant.WORK_BATCH,Constant.BATCH_STRASH_DOC);
// 완료 후 Constant.NO => Constant.YES
if(strash.get("is_use") != null && strash.get("is_use").toString().equals(Constant.YES)) {
// 2.0 시스템휴지통 삭제기준일 적용
param.put("workType",Constant.STRASH);
param.put("decade",this.strash.get("decade"));
// 2.1 시스템휴지통 목록 구하기
strashList = quartzService.batchDocList(param);
// 2.2 시스템휴지통 폐기처리 :: 데이터베이스 처리
if(strashList != null && strashList.size() > 0) {
for(int i=0;i<strashList.size();i++) {
HashMap<String,Object> strashInfo = new HashMap<String,Object>();
strashInfo = (HashMap<String,Object>)strashList.get(i);
strashDelList.add(strashInfo);
if( i !=0 && ( i % delimiter == 0 ) ) {
try {
documentService.adminTrashDeleteDoc(strashDelList,historyUser,sessionVO);
successCnt = successCnt + strashDelList.size();
}catch(Exception e){
failCnt = failCnt + strashDelList.size();
}
strashDelList.clear();
}else if(i == (strashList.size()-1) ) {
try {
documentService.adminTrashDeleteDoc(strashDelList,historyUser,sessionVO);
successCnt = successCnt + strashDelList.size();
}catch(Exception e){
failCnt = failCnt + strashDelList.size();
}
strashDelList.clear();
}
}
resultMap.put("message","시스템 휴지통 삭제처리건수 성공건수::"+successCnt + "/ 실패건수::"+failCnt);
}else {
resultMap.put("message","시스템 휴지통 삭제처리건수 없음");
}
}else {
resultMap.put("message","시스템 휴지통 자동 비우기 사용안함");
}
resultMap.put("work_state",Constant.T);
}catch(BizException e){
logger.error(e.getMessage());
resultMap.put("work_state",Constant.F);
resultMap.put("message","비지니스 로직 에러");
}catch(Exception e) {
logger.error(e.getMessage());
resultMap.put("work_state",Constant.F);
resultMap.put("message","EXCEPTION ERROR");
}finally{
Date now = new Date();
resultMap.put("work_idx",work_idx);
resultMap.put("work_edate",df.format(now));
try {
quartzService.batchWorkUpdate(resultMap);
}catch(Exception e){
logger.error(e.getMessage());
}
}
long eTime = System.currentTimeMillis();
logger.info("시스템휴지통 처리완료 ="+df.format(eTime));
}
}
<file_sep>/EDMS3/src/kr/co/exsoft/document/dao/TypeDao.java
package kr.co.exsoft.document.dao;
import java.util.HashMap;
import java.util.List;
import org.springframework.stereotype.Repository;
import kr.co.exsoft.document.vo.TypeVO;
import kr.co.exsoft.document.vo.AttrVO;
import kr.co.exsoft.document.vo.AttrItemVO;
/**
* Type 매퍼클래스
* @author 패키지 개발팀
* @since 2014.07.21
* @version 3.0
*
*/
@Repository(value = "typeDao")
public interface TypeDao {
/**
*
* <pre>
* 1. 개용 : 문서유형 목록 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : typeList
* @param map
* @return List<TypeVO>
*/
public List<TypeVO> typeList(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 문서유형 목록 Count
* 2. 처리내용 :
* </pre>
* @Method Name : typePagingCount
* @param map
* @return int
*/
public int typePagingCount(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 문서유형 페이지 리스트
* 2. 처리내용 :
* </pre>
* @Method Name : typePagingList
* @param map
* @return List<TypeVO>
*/
public List<TypeVO> typePagingList(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 문서유형 테이블을 생성
* 2. 처리내용 :
* </pre>
* @Method Name : createType
* @param typeVO
* @return int
*/
public int createType(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 문서유형 테이블 삭제
* 2. 처리내용 :
* </pre>
* @Method Name : dropType
* @param tbl_name
* @return int
*/
public int dropType(String tbl_name);
/**
*
* <pre>
* 1. 개용 : 문서유형속성 테이블 수정
* 2. 처리내용 :
* </pre>
* @Method Name : alterType
* @param map
* @return int
*/
public int alterType(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 문서유형 등록
* 2. 처리내용 :
* </pre>
* @Method Name : typeWrite
* @param attrVO
* @return int
*/
public int typeWrite(TypeVO typeVO);
/**
*
* <pre>
* 1. 개용 : 문서유형 수정
* 2. 처리내용 :
* </pre>
* @Method Name : typeUpdate
* @param map
* @return int
*/
public int typeUpdate(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 문서유형 삭제
* 2. 처리내용 :
* </pre>
* @Method Name : typeDelete
* @param map
* @return int
*/
public int typeDelete(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 문서유형 속성을 등록한다
* 2. 처리내용 :
* </pre>
* @Method Name : attrWrite
* @param attrVO
* @return int
*/
public int attrWrite(AttrVO attrVO);
/**
*
* <pre>
* 1. 개용 : 문서유형 수정
* 2. 처리내용 :
* </pre>
* @Method Name : attrUpdate
* @param map
* @return int
*/
public int attrUpdate(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 문서유형 삭제
* 2. 처리내용 :
* </pre>
* @Method Name : attrDelete
* @param map
* @return int
*/
public int attrDelete(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 문서유형 속성 아이템 목록 얻기
* 2. 처리내용 :
* </pre>
* @Method Name : attrItemList
* @param map
* @return List<AttrItemVO>
*/
public List<AttrItemVO> attrItemList(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 문서유형 속성 아이템 등록
* 2. 처리내용 :
* </pre>
* @Method Name : attrItemWrite
* @param attrItemVO
* @return int
*/
public int attrItemWrite(AttrItemVO attrItemVO);
/**
*
* <pre>
* 1. 개용 : 문서유형 속성 아이템 삭제
* 2. 처리내용 :
* </pre>
* @Method Name : attrItemDelete
* @param map
* @return int
*/
public int attrItemDelete(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 확장속성값을 등록한다.
* 2. 처리내용 :
* </pre>
* @Method Name : attrValueWrite
* @param map
* @return int
*/
public int attrValueWrite(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 확장속성값을 가져온다.
* 2. 처리내용 :
* </pre>
* @Method Name : attrValueList
* @param map
* @return List<String>
*/
public String attrValueDetail(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 확장속성값을 수정한다.
* 2. 처리내용 :
* </pre>
* @Method Name : attrValueUpdate
* @param map
* @return int
*/
public int attrValueUpdate(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 확장속성값을 삭제한다.
* 2. 처리내용 :
* </pre>
* @Method Name : attrValueDelete
* @param map
* @return int
*/
public int attrValueDelete(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 문서유형 정보 보기
* 2. 처리내용 :
* </pre>
* @Method Name : typeDetailInfo
* @param map
* @return TypeVO
*/
public TypeVO typeDetailInfo(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 문서유형 속성 리스트 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : attrList
* @param map
* @return List<AttrVO>
*/
public List<AttrVO> attrList(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 문서유형ID의 테이블이 존재하는 체크한다.
* 2. 처리내용 :
* </pre>
* @Method Name : tableInfo
* @param map
* @return int
*/
public int tableInfo(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 확장문서유형 테이블에 속성값을 추가한다.
* 2. 처리내용 :
* </pre>
* @Method Name : insertAttrValue
* @param map
* @return int
*/
public int insertAttrValue(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개용 : 확장문서유형 테이블에 속성값을 수정한다
* 2. 처리내용 : 문서유형 속성들이 동적이기때문에 이렇게 처리함
* </pre>
* @Method Name : updateAttrValue
* @param map
* @return int
*/
public int updateAttrValue(HashMap<String,Object> map);
}
<file_sep>/EDMS3/WebContent/js/docadmin/aclManager.js
var aclManager = {
currentAclId : null,
init : function(pageSize) {
aclManager.grid.pageSize = pageSize;
aclManager.grid.initAclGridList();
aclManager.grid.initAclItemGridList();
exsoft.util.common.ddslick('#strIndex', 'srch_type', '', 120, function(divId, selectedData){
});
exsoft.util.common.ddslick('#acl_type', 'srch_type', '', 120, function(divId, selectedData){
});
},
ajax : {
aclDetail : function(aclId) {
exsoft.util.ajax.ajaxDataFunctionWithCallback({"acl_id" : aclId}, exsoft.contextRoot+"/admin/aclDetail.do", "", function(data, param) {
//XR_ACL Info View
$('#acl_id').val(data.aclDetail.acl_id); // hidden
$('#src_acl_name').val(data.aclDetail.acl_name); // hidden
$('#open_id').val(data.aclDetail.open_id); // hidden
$('#open_isgroup').val(data.aclDetail.open_isgroup); // hidden
$('#creator_id').val(data.aclDetail.creator_id); // hidden
$('#acl_sort_index').text(data.aclDetail.sort_index); // hidden
$('#acl_name_title').text(data.aclDetail.acl_name);
$('#acl_name').val(data.aclDetail.acl_name);
// $('#acl_type').val(data.aclDetail.acl_type);
exsoft.util.layout.setSelectBox('acl_type', data.aclDetail.acl_type);
$('#open_name').val(data.aclDetail.open_name);
$('#create_date').text(data.aclDetail.create_date);
$('#creator_name').text(data.aclDetail.creator_name);
// XR_ACLITEM 속성 리스트
aclManager.grid.refreshAclItemList();
// if($('#aclItemGridList')[0].grid != undefined) {
// var postData = {acl_id:aclManager.currentAclId} ;
// exsoft.util.grid.gridPostDataRefresh('aclItemGridList',exsoft.contextRoot+'/permission/aclItemList.do',postData);
// }else {
// aclItemGridList();
// }
});
}
},
grid : {
pageSize : 0,
mRowId : 0, // 수정컬럼ID
mList : 0,
paging : function (pageNum) {
// $("#mypageDocList").setGridParam({page:nPage,postData:{is_search:'false',page_init:'false'}}).trigger("reloadGrid");
},
refreshAclList : function() {
exsoft.util.grid.gridRefresh('aclGridList', exsoft.contextRoot + '/permission/aclList.do');
},
refreshAclItemList : function() {
if($('#aclItemGridList')[0].grid != undefined) {
exsoft.util.grid.gridPostDataRefresh('aclItemGridList',exsoft.contextRoot+'/permission/aclItemList.do', {acl_id:aclManager.currentAclId});
}
},
initAclGridList : function() {
$('#aclGridList').jqGrid({
url: exsoft.contextRoot + '/permission/aclList.do',
mtype:"post",
datatype:'json',
jsonReader:{
page:'page',total:'total',root:'list'
},
colNames:['acl_id','권한명','공개대상','공유범위','공유범위','정렬'],
colModel:[
{name:'acl_id',index:'acl_id',width:5, align:'center',editable:false,sortable:false,key:true,hidden:true},
{name:'acl_name',index:'acl_name',width:130, editable:false,sortable:true,resizable:true},
{name:'open_name',index:'open_name',width:70, editable:false,sortable:true,resizable:true,align:'center'},
{name:'acl_type_name',index:'acl_type_name',width:5, editable:false,sortable:false,resizable:true,align:'center',hidden:true},
{name:'acl_type',index:'acl_type',width:70, editable:false,sortable:true,resizable:true,align:'center',
formatter:function(cellValue, option) {
switch(cellValue){
case 'ALL' : return '전사'; break;
case 'DEPT' : return '하위부서포함'; break;
case 'TEAM' : return '부서'; break;
case 'PRIVATE' : return '공유안함'; break;
};
}
},
{name:'sort_index',index:'sort_index',width:3, editable:false,hidden:true},
],
autowidth:true,
viewrecords: true,multiselect:true,sortable:true,shrinkToFit:true,gridview: true,
scrollOffset:0,
sortname:"sort_index", // 최초 정렬은 하위부서포함>부서>전사>개인
sortorder:"asc",
multikey: "ctrlKey",
viewsortcols:'vertical',
rowNum : aclManager.grid.pageSize,
rowList:exsoft.util.grid.listArraySize(),
emptyDataText: "데이터가 없습니다.",
caption:'권한 목록'
,onCellSelect : function(rowid,iCol,cellcontent,e){
if(iCol == 0){
// 체크시 row값을 set한다.(선택시 : rowid셋팅, 해제시 : rowid제거)
$("#aclGridList").jqGrid('setSelection',rowid);
} else {
aclManager.currentAclId = rowid;
aclManager.ajax.aclDetail(rowid);
}
}
,loadBeforeSend: function() {
exsoft.util.grid.gridNoDataMsgInit('aclGridList');
exsoft.util.grid.gridTitleBarHide('aclGridList');
}
,loadComplete: function(data) {
if ($("#aclGridList").getGridParam("records")==0) {
exsoft.util.grid.gridNoRecords('aclGridList','nolayer_data');
}else {
console.log("records != 0");
exsoft.util.grid.gridViewRecords('aclGridList');
// exsoft.util.grid.gridPagerViewHide('aclGridList');
// exsoft.util.grid.gridPagerShow('aclGridList');
// 조회화면 DISPLAY
var rowId = $("#aclGridList").getDataIDs()[0];
aclManager.currentAclId = $("#aclGridList").getRowData(rowId).acl_id;
aclManager.ajax.aclDetail(aclManager.currentAclId);
}
exsoft.util.grid.gridPager("#aclGridPager",data);
exsoft.util.grid.gridInputInit(false); // 페이지 창 숫자만 test
// exsoft.util.grid.gridResize('aclGridList','targetAclGrid',55); //페이지 div 맨 하단에
}
,loadError:function(xhr, status, error) {
exsoft.util.error.isErrorChk(xhr);
}
// ,onPaging: function (pgButton) {
// // 사용자 입력한 페이지 숫자
// var pagerId = this.p.pager.substr(1);
// var inputPage = $('input.ui-pg-input', "#pg_" + $.jgrid.jqID(pagerId)).val();
// exsoft.util.grid.onPager('aclGridList',inputPage,pgButton);
// }
});
},
initAclItemGridList : function() {
$('#aclItemGridList').jqGrid({
// url:exsoft.contextRoot+'/permission/aclItemList.do',
list : "",
mtype:"post",
datatype:'json',
jsonReader:{
root:'list'
},
colNames:['accessor_id', 'accessor_isgroup', 'accessor_isalias', '접근자','기본권한','폴더등록','권한변경','기본권한','문서등록','반출취소','권한변경'],
colModel:[
{name:'accessor_id',index:'accessor_id',width:5, align:'center',editable:false,sortable:false,key:true,hidden:true},
{name:'accessor_isgroup',index:'accessor_id',width:5, align:'center',editable:false,sortable:false,key:true,hidden:true},
{name:'accessor_isalias',index:'accessor_id',width:5, align:'center',editable:false,sortable:false,key:true,hidden:true},
{name:'accessor_name',index:'accessor_name',width:30, editable:false,sortable:false,resizable:true,align:'center',hidden:false},
{name:'fol_default_acl',index:'fol_default_acl',width:30, editable:true,sortable:false,resizable:true,align:'center',edittype:'select',
editoptions:{
value:"DELETE:삭제;UPDATE:수정;READ:조회;BROWSE:목록"
},formatter:'select' //formatter의 역활은 value값으로 grid에 표시함.
},
{name:'fol_act_create',index:'fol_act_create',width:30, editable:true,sortable:false,resizable:true,align:'center',
edittype:'checkbox',
editoptions:{value:'T:F'},
fomatter:'checkbox'
},
{name:'fol_act_change_permission',index:'fol_act_change_permission',width:30, editable:true,sortable:false,resizable:true,align:'center',
edittype:'checkbox',
editoptions:{value:'T:F'},
fomatter:'checkbox'
},
{name:'doc_default_acl',index:'doc_default_acl',width:30, editable:true,sortable:false,resizable:true,align:'center',edittype:'select',
editoptions:{
value:"NONE:없음;DELETE:삭제;UPDATE:수정;READ:조회;BROWSE:목록"
},formatter:'select'
},
{name:'doc_act_create',index:'doc_act_create',width:30, editable:true,sortable:false,resizable:true,align:'center',
edittype:'checkbox',
editoptions:{value:'T:F'},
fomatter:'checkbox'
},
{name:'doc_act_cancel_checkout',index:'doc_act_cancel_checkout',width:30, editable:true,sortable:false,resizable:true,align:'center',
edittype:'checkbox',
editoptions:{value:'T:F'},
fomatter:'checkbox'
},
{name:'doc_act_change_permission',index:'doc_act_change_permission',width:30, editable:true,sortable:false,resizable:true,align:'center',
edittype:'checkbox',
editoptions:{value:'T:F'},
fomatter:'checkbox'
},
],
autowidth:true,
viewrecords: true,
multiselect:true,
sortable: true,
shrinkToFit:true,
scrollOffset: 0,
gridview: true,
postData : {acl_id:aclManager.currentAclId},
emptyDataText: "데이터가 없습니다.",
caption:'접근자 목록'
,loadBeforeSend: function() {
exsoft.util.grid.gridNoDataMsgInit('aclItemGridList');
exsoft.util.grid.gridTitleBarHide('aclItemGridList');
}
,loadComplete: function(data) {
if ($("#aclItemGridList").getGridParam("records") ==0) {
exsoft.util.grid.gridNoDataMsg('aclItemGridList','nolayer_data');
aclManager.grid.mRowId = 0;
}else {
var rowIDs = $("#aclItemGridList").jqGrid('getDataIDs');
aclManager.grid.mRowId = rowIDs[rowIDs.length-1];
}
aclManager.grid.mList = aclManager.grid.mRowId; // atrr_id 수정못하게 처리위한 변수
}
,onCellSelect: function(rowid, iCol,cellcontent,e){
if(base.gridIsRowDataExist('aclItemGridList', rowid, 'accessor_id', 'OWNER')) {
jAlert('소유자는 수정할 수 없습니다.');
$('#aclItemGridList').jqGrid('setSelection',rowid,false); ////checkbox 해제
} else {
$('#aclItemGridList').editRow(rowid,false);
}
}
,onSelectRow: function(rowid,status,e){
if(!base.gridIsRowDataExist('aclItemGridList', rowid, 'accessor_id', 'OWNER')) {
// 에디터모드인지 체크
var edited = base.gridEditMode('aclItemGridList',rowid);
// false 이면 row 저장처리
if(!status) {
$('#aclItemGridList').jqGrid('saveRow', rowid, gParameters );
}else {
if(edited == "0") {
$('#aclItemGridList').editRow(rowid,false);
}
}
}
}
});
// 헤더 colspan
jQuery("#aclItemGridList").jqGrid('setGroupHeaders', {
useColSpanStyle: true,
groupHeaders:[
{startColumnName: 'fol_default_acl', numberOfColumns: 3, titleText: '폴더권한'},
{startColumnName: 'doc_default_acl', numberOfColumns: 4, titleText: '문서권한'}
]
});
}
},
event : {
searchAclList : function() {
var postData = {
strIndex: exsoft.util.common.getDdslick("#strIndex"),
strKeyword1:$("#strKeyword1").val(),
strKeyword2:$("#strKeyword2").val(),
is_search:'true'
};
exsoft.util.grid.gridPostDataRefresh('aclGridList',exsoft.contextRoot+'/permission/aclList.do',postData);
}
},
ui : {
},
callbackFunctions : {
}
}<file_sep>/EDMS3/WebContent/js/process/processCoworkWindow.js
/**
* 협업자 선택 JavaScript
*/
var exsoftProcessCoworkWindow = {
callbackFunction : null,
treeObject : null,
// 0. 초기화
init : {
initProcessCoworkWindow : function(coworkList, callback){
// 콜백 함수 저장
exsoftProcessCoworkWindow.callbackFunction = callback;
// 팝업창 오픈
exsoftProcessCoworkWindow.open.layerOpen();
// 1. 트리 초기화
if (exsoftProcessCoworkWindow.treeObject == undefined) {
var treeOption = {
divId : "#coopUser_tree",
context : exsoft.contextRoot,
url : "/group/groupList.do"
};
exsoftProcessCoworkWindow.treeObject = new XFTree(treeOption);
exsoftProcessCoworkWindow.treeObject.callbackSelectNode = function(e, data) {
// 검색 옵션 초기화
$("#coworkUserName").val("");
$("#coworkGroupName").val("");
var param = {
groupName : "",
userName : "",
groupId : data.node.id
}
// 부서 사용자 목록 조회
exsoft.util.grid.gridPostDataRefresh('#coopUser_searchUserList',exsoft.contextRoot + '/user/searchUserList.do', param);
}
exsoftProcessCoworkWindow.treeObject.init();
} else {
exsoftProcessCoworkWindow.treeObject.refresh();
}
// 2. 그룹의 기존 멤버를 설정함
if ($("#coopUser_searchUserList")[0].grid == undefined) {
$('#coopUser_searchUserList').jqGrid({
url:'${contextRoot}/user/searchUserList.do',
mtype:"post",
datatype:'json',
jsonReader:{
page:'page',total:'total',root:'list'
},
colNames:['group_nm','user_nm','user_id'],
colModel:[
{name:'group_nm',index:'group_nm',width:30, editable:false,sortable:true,resizable:true,hidden:false,align:'center'},
{name:'user_nm',index:'user_nm',width:50, editable:false,sortable:true,resizable:true,hidden:false,align:'center'},
{name:'user_id',index:'user_id',width:50, editable:false,sortable:false,resizable:true,hidden:false,align:'center'},
],
autowidth:true,
viewrecords: true,multiselect:true,sortable: true,shrinkToFit:true,gridview: true,
sortname : "user_nm",
sortorder:"asc",
scroll: true,
scrollOffset: 0,
rowNum : 10,
rowList:exsoft.util.grid.listArraySize(),
emptyDataText: "데이터가 없습니다.",
caption:'사용자 목록',
pagerpos: 'center',
pginput: true,
loadError:function(xhr, status, error) {
exsoft.util.error.isErrorChk(xhr);
}
,loadBeforeSend: function() {
exsoft.util.grid.gridTitleBarHide('coopUser_searchUserList');
}
,loadComplete: function() {
// 기존 데이터를 삭제함
//$("#coopUser_searchUserList").jqGrid("clearGridData");
exsoft.util.grid.gridInputInit(false);
}
});
// Grid 컬럼정렬 처리
var headerData = '{"group_nm":"부서명","user_nm":"사용자명", "user_id":"사용자 ID"}';
exsoft.util.grid.gridColumHeader('coopUser_searchUserList',headerData,'center');
//headerData = null;
} else {
// 기존 데이터를 삭제함
$("#coopUser_searchUserList").jqGrid("clearGridData");
}
exsoft.util.table.tablePrintList('tableAuthor', coworkList.authorList, true, true);
exsoft.util.table.tablePrintList('tableCoauthor', coworkList.coauthorList, true, true);
exsoft.util.table.tablePrintList('tableApprver', coworkList.approverList, true, true);
exsoft.util.table.tablePrintList('tableReceiver', coworkList.receiverList, true, true);
//3. 작성자, 공동작성자, 승인자, 수신자 없을 경우 nodata msg 표시
exsoftProcessCoworkWindow.ui.addTableNoData('tableAuthor');
exsoftProcessCoworkWindow.ui.addTableNoData('tableCoauthor');
exsoftProcessCoworkWindow.ui.addTableNoData('tableApprver');
exsoftProcessCoworkWindow.ui.addTableNoData('tableReceiver');
},
},
// 1. 팝업
open : {
layerOpen : function() {
exsoft.util.layout.divLayerOpen("coopUser_choose_wrapper", "coopUser_choose");
},
},
//2. layer + show
layer : {
},
//3. 닫기 + hide
close : {
layerClose : function() {
exsoft.util.layout.divLayerClose("coopUser_choose_wrapper", "coopUser_choose");
},
},
//4. 화면 이벤트 처리
event : {
/**
* table에 row를 추가 한다.
*/
addTableRow : function(tableId){
var gridUserList = $("#coopUser_searchUserList").getGridParam('selarrrow');
var data = new Array();
//1. 작성자일 경우 validtion
if(gridUserList.length == 0){
jAlert('사용자를 선택하세요.');
return;
}else if(tableId == 'tableAuthor' && gridUserList.length > 1){
jAlert('작성자는 1명만 선택 가능합니다.');
return;
}
$(gridUserList).each(function(index){
var tempData = $("#coopUser_searchUserList").getRowData(gridUserList[index]);
//기존 data가 추가되어 있는지 점검
if($(exsoft.util.common.getIdFormat(tableId)+'_'+tempData.user_id).length == 0){
// console.log('is rowData [000] : ' + $(exsoft.util.common.getIdFormat(tableId)+'_'+tempData.user_id).length);
data.push(tempData);
}
});
if(data.length > 0){
$(exsoft.util.common.getIdFormat(tableId)+'_noData').remove();
if(tableId == 'tableAuthor'){
exsoft.util.table.tablePrintList(tableId, data, true, true);
}else{
exsoft.util.table.tablePrintList(tableId, data, true, false);
}
}
// grid all uncheck
$("#coopUser_searchUserList").resetSelection();
},
/**
* 선택된 tbale의 row를 삭제 한다.
*/
delTableRow : function(tableId, allChkBoxId){
$(exsoft.util.common.getIdFormat(allChkBoxId)).prop("checked",false);
var delChecked = $(exsoft.util.common.getIdFormat(tableId)+' tbody input:checked');
if(delChecked.length == 0){
jAlert('목록에서 제외할 사용자를 선택하세요.');
}else{
$(delChecked).each(function(index){
$(this).parent().parent().remove();
});
exsoftProcessCoworkWindow.ui.addTableNoData(tableId);
}
},
/**
* table all check and all uncheck
*/
selectChkBoxAll : function(allChkBoxId, chkBoxName) {
exsoft.util.common.allCheckBox(allChkBoxId, chkBoxName);
},
// 사용자 검색
searchGroupUser : function() {
var param = {
groupName : $("#coworkGroupName").val(),
userName : $("#coworkUserName").val(),
groupId : ''
}
console.log('stephan : ' + param);
exsoft.util.grid.gridPostDataRefresh('#coopUser_searchUserList',exsoft.contextRoot + '/user/searchUserList.do', param);
},
submit : function(){
//callbackFunction
var callbackObj = {};
var authorList = exsoftProcessCoworkWindow.dofunction.getTableList('tableAuthor');
var coauthorList = exsoftProcessCoworkWindow.dofunction.getTableList('tableCoauthor');
var approverList = exsoftProcessCoworkWindow.dofunction.getTableList('tableApprver');
var receiverList = exsoftProcessCoworkWindow.dofunction.getTableList('tableReceiver');
//validation 체크
if(authorList.length == 0){
jAlert('작성자를 지정하세요.');
}else if(approverList.length == 0){
jAlert('승인자는 1명 이상 지정하세요.');
}else{
callbackObj.authorList = authorList;
callbackObj.coauthorList = coauthorList;
callbackObj.approverList = approverList;
callbackObj.receiverList = receiverList;
exsoftProcessCoworkWindow.callbackFunction(callbackObj);
exsoftProcessCoworkWindow.close.layerClose();
}
}
},
//5. 화면 UI 변경 처리
ui : {
/**
* table에 row가 없을 경우 보여주는 화면
*/
addTableNoData : function(tableId){
if($(exsoft.util.common.getIdFormat(tableId)+' tbody').children('tr').length == 0){
$(exsoft.util.common.getIdFormat(tableId)+' tbody').append('<tr id="'+tableId+'_noData"><td colspan=4>추가된 사용자가 없습니다.</td></tr>');
}
},
},
//6. callback 처리
callback : {
},
//7. 내부처리 함수
dofunction : {
getTableList : function(tableId){
$(exsoft.util.common.getIdFormat(tableId)+'_noData').remove();
var arrayList = new Array();
$(exsoft.util.common.getIdFormat(tableId)+' tbody').children('tr').each(function(trIndex){
var userObject = {};
userObject.sort_index = trIndex;
// 사용자 정보 set
$(this).children('td').each(function(tdIndex){
switch (tdIndex) {
case 1:userObject.group_nm = $(this).html();break;
case 2:userObject.user_nm = $(this).html();break;
case 3:userObject.user_id = $(this).html();break;
default:break;
}
}); // td each end...
arrayList.push(userObject);
}); // tr each end...
exsoftProcessCoworkWindow.ui.addTableNoData(tableId);
return arrayList;
},
}
}
<file_sep>/EDMS3/WebContent/html/readme.txt
2015-04-24
/html/user_add1.html
/html/user_add2.html
/html/user_add3.html
-> 부서/사용자/구성원 추가 각각 레이아웃으로 정리
2015-04-16
/js/plugins/upload/jquery.uploadfile.3.1.10.js :: line 637
- 스크립트 width 지정 값 수정(원본은 그 뒤에 주석으로 남겨놓음)
2015-04-13
/html/admin/rgate/*.html
- 레이어 팝업 중 사용자 선택부분 위치 수정
/jsp/selectAccessorWindow.jsp
- 레이아웃 틀어진 부분 수정
2015-04-10
/html/admin
- /doc_config.html
- /layout.html
- /rgate_config.html
- /sys_config.html : 위 4개 파일은 변동사항 없음
- /document : 문서관리
- /bin.html : 휴지통
- /config.html : 문서유형 관리
- /dup_file.html : 중복파일 관리
- /expired.html : 만기 문서 관리
- /folder.html : 폴더 관리
- /mass_audit.html : 대량문서 열람 감사 관리
- /transfer_owner.html : 소유권 이전 관리
- /rgate : rGate
- /app_folder.html : 프로그램별 작업폴더 관리
- /application.html : 저장 허용 프로그램 관리
- /client_passwd.html : 클라이언트 삭제 비밀번호 관리
- /extension.html : 저장금지 확장자 관리
- /local.html : 로컬 저장 허용 관리
- /network.html : 네트워크 접속 허용 관리
- /usb.html : USB 저장 허용 관리
- /system : 시스템 관리
- /acl.html : 권한(ACL) 관리
- /config.html : 환경설정 관리
- /group.html : 그룹 관리
- /menu.html : 메뉴 접근권한 관리
- /user.html : 사용자 관리
/css/admin/ecm.css
- 페이지 추가에 따른 css 추가
/js/admin/ecm.js
- 페이지 추가에 따른 코드 추가
2015-04-07
/css/ecm.css
- datepicker 테두리 추가
/img/icon
- 신규 아이콘 추가
엑셀파일 psd / 퍼블리싱 작업 된 페이지 비교대조표 작성
2015-04-03
/css/pluagin/bxslider/jquery.bxslider.css
- 추가
/html/main.html
- 추가
/js/plugins/jqgrid/js/jquery.jqGrid.src.js
- 4009 라인 수정(원본은 주석처리)
/js/plugins/jqgrid/css/jquery.jqGrid.css
- 85라인 (loading) 수정
아이콘파일 추가
2015-04-02
/css/plugin/ddslick/jquery.ddslick.custom.css
- 점화 시 생기는 css 테두리 수정
/css/common/ecm.css
- 컨텐츠 내 탑 검색창 폰트 색상 수정
/css/plugins/themes/jquery-ui.css
- grid 관련 수정
/js/plugins/jqgrid/css/ui.jqgrid.css
- grid 관련 수정
- jquery-ui 그루핑 관련 css는 여기에 포함 되어있습니다.
/js/common/include.js
- jqgrid.min.js -> jqgrid.src.js로 수정
- jquery.jqGrid.src.js 수정 내용
line 2883 : comment /* class 부분이 추가되었습니다 */
line 2884 : class 부분이 추가되었습니다
before :: .append($('<div style="position:relative;'+(isMSIE && $.jgrid.msiever() < 8 ? "height:0.01%;" : "")+'"></div>').append('<div></div>').append(this))
after :: .append($('<div class="ui-jqgrid-bdiv-subwrapper" style="position:relative;'+(isMSIE && $.jgrid.msiever() < 8 ? "height:0.01%;" : "")+'"></div>').append('<div></div>').append(this))
2015-04-01
/css/common/ecm.css
- .loading_wrapper background color 변경
- <div class="depth_navi"> 밑의 태그 변경에 따른 스타일 변경
- 협업관리 트리메뉴 css 추가
- 퀵메뉴 화살표 위치 조정
/jsp/processMenu.jsp
- 협업관리 트리메뉴 css 추가
/js/common/ecm.js
- 협업관리 fold/expand 추가
2015-03-31
/html/admin/*
- footer 수정
/css/common/admin/ecm.css
- footer css 수정
/jsp/layout/userTopMenu.jsp
- <div class="top_menu_close"> ~ scriptlet 주석 처리(X 버튼 이미지입니다. 삭제대신에 주석처리 하였습니다.)
2015-03-30
/html/table_test.html
/html/table_test1.html
- 테이블 구조 변경 샘플파일 입니다.
/html/doc_integratedsearch.html
- 새로운 테이블 레이아웃으로 변경
/html/doc_mydocs7_tempbox.html
- 작업카트제외, 다운로드, 즐겨찾기 추가 이벤트 추가
/html/login.html
- 관리자 로그인 버튼 클릭 시 관리자 로그인 레이어 팝업 출현
/css/common/ecm.css
- 신규 테이블 레이아웃 추가
- ** 테이블 레이아웃에서 width값을 지정하는 class같은 것이 없으면 width가 정해진 요소들을 제외하고 1/n으로 width를 나눔 **
/js/common/ecm.js
- 테이블 레이아웃 변경에 따른 jquery 이벤트 추가
/html/admin/*
- 신규추가
/css/common/admin/*
- 신규추가
/js/common/admin/*
- 신규추가
2015-03-27
/jsp/myPage/myPageDocList.jsp - 수평, 수직스크롤 안의 내용 추가
/html/layout.html - 수평,수직 스크롤 안의 내용 추가
/html/doc_detail2.html
- 문서 상세조회 창 너비 조정
- 협업 탭 메뉴 추가
/css/common/ecm.css
- 상기 변동사항 반영
- 즐겨찾기 선택 / 즐겨찾기 선택 설정 z-index 변경
2015-03-26
/css/common/ecm.css
- 문서등록 > 관련문서 부분 수정
- 문서수정 > css 추가
- 내 소유문서 컨텍스트 메뉴
- 소유권 이전, 소유권 전체 이전 삭제에 따른 주석처리
- 내 소유문서 > 문서 등록
- css 추가
/html/doc_modify.html
- 클래스명 변경
/html/doc_mydocs2.html
- 나의 문서 > 문서 등록창 추가
- 나의 문서 > 문서 수정창 추가
/html/doc_integeraedsearch.html
- 좌측메뉴, 토글버튼 주석처리(삭제)
/js/common/ecm.js
- 문서수정 클래스로 창 닫기, 음영부분으로 창 닫기 추가
- 내 소유문서 > 문서 등록창 추가로 인한 창 닫기, 음영부분으로 창 닫기 추가
2015-03-25
/html/pop_myNote.html - 쪽지관리 팝업창
- 윈도우 사이즈 : 730 x 690px(padding 포함)
- 사용자 선택 시 : 920 x 630px(padding 포함)
/css/common/ecm_note.css - ecm.css 부분에서 쪽지 쓰기 관련 부분만 추출
/js/common/ecm_note.js - ecm.js 부분에서 쪽지 쓰기 관련 부분만 추출
/js/common/ecm.js - 상기 이유로 인하여 쪽지 쓰기 부분이 주석 처리(제외) 되었습니다.
/jsp/layout/userHeader.jsp - 환경설정 옆에 드롭다운 화살표 제거
2015-03-24
/jsp/layout/myPageMenu.jsp 수정
/jsp/layout/treeMenu.jsp 수정
/jsp/layout/statisticsMenu.jsp 수정(통계 세부 리스트 펼침/접힘 기능 추가)
이로 인하여
- /img/icon/tree_fold.png 변경
- /img/icon/tree_expand.png 추가
- /js/common/ecm.js 변경
- /css/common/ecm.css 변경
/js/common/ecm.js 수정
- layer popup 표시되는 위치 조절
2015-03-23
html 태그 내 id => class 변경
(* datepicker, dropdown 제외)
- 파일
/jsp/layout/staticsMenu.jsp 수정
/jsp/statistics/loginStatistics.jsp 수정
2015-03-19
- 파일
- doc_coop.html : 업무 상세조회 - 작성단계
- doc_coop_approval.html : 업무 상세조회 - 승인단계
- doc_coop_supplement.html : 업무 상세조회 - 보완단계
- doc_coop_complete.html : 업무 상세조회 - 완료단계
- doc_detailsearch.html : 탑메뉴 상세검색
- doc_integratedsearch.html : 통합검색
- doc_mydocs2.html : 나의문서 - 나의 문서 트리메뉴가 html코드와 css로 정의되어 있습니다.
- div id="tbody" 태그 부분 확인 부탁 드립니다.
(기존 : 웹 페이지상 thead tbody 합쳐서 scrolling -> 이후 : tbody 부분만 scrolling)
- doc_statics.html : 통계
- error.html : 에러페이지
- myNote.html : 나의 쪽지 - 쪽지관리
<file_sep>/EDMS3/src/kr/co/exsoft/quartz/service/QuartzService.java
package kr.co.exsoft.quartz.service;
import java.util.HashMap;
import java.util.List;
import org.springframework.transaction.annotation.Transactional;
import kr.co.exsoft.document.vo.PageVO;
import kr.co.exsoft.eframework.repository.EXrepClient;
import kr.co.exsoft.quartz.vo.FileQueueDeleteVO;
/**
* 배치프로그램 서비스 인터페이스
* @author 패키지 개발팀
* @since 2014.07.17
* @version 3.0
*
*/
@Transactional
public interface QuartzService {
/**
*
* <pre>
* 1. 개용 : 배치작업 로그 등록처리
* 2. 처리내용 :
* </pre>
* @Method Name : batchWorkWrite
* @param batchWorkVO
* @return
* @throws Exception int
*/
public int batchWorkWrite(long work_idx,String work_type,String work_nm) throws Exception ;
/**
*
* <pre>
* 1. 개용 :
* 2. 처리내용 : 배치작업 로그 수행완료처리
* </pre>
* @Method Name : batchWorkUpdate
* @param map
* @return
* @throws Exception int
*/
public int batchWorkUpdate(HashMap<String,Object> map) throws Exception ;
/**
*
* <pre>
* 1. 개용 : 배치작업 수행여부 체크
* 2. 처리내용 :
* </pre>
* @Method Name : isBatchWork
* @param map
* @return
* @throws Exception boolean
*/
public boolean isBatchWork(HashMap<String,Object> map) throws Exception ;
/**
*
* <pre>
* 1. 개용 : 기준 열람수 초과자 목록 열람
* 2. 처리내용 :
* </pre>
* @Method Name : auditExceedList
* @param map
* @return
* @throws Exception List<HashMap<String,Object>>
*/
public List<HashMap<String,Object>> auditExceedList(HashMap<String,Object> map) throws Exception ;
/**
* 기준 열람수 초과자 등록처리
* @param map
* @return
* @throws Exception
*/
public int writeAudit(HashMap<String,Object> map) throws Exception ;
/**
*
* <pre>
* 1. 개용 : 문서 배치처리 대상 목록 얻기 - 만기/개인휴지통/시스템휴지통
* 2. 처리내용 :
* </pre>
* @Method Name : batchDocList
* @param map
* @return
* @throws Exception List<HashMap<String,Object>>
*/
public List<HashMap<String, Object>> batchDocList(HashMap<String,Object> map) throws Exception ;
/**
*
* <pre>
* 1. 개용 : 시스템 관리자 정보 얻기..
* 2. 처리내용 :
* </pre>
* @Method Name : systemUserInfo
* @param map
* @return
* @throws Exception HashMap<String,Object>
*/
public HashMap<String,Object> systemUserInfo(HashMap<String,Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : 사용자별/부서별 문서현황 집계 처리
* 2. 처리내용 :
* </pre>
* @Method Name : userDocStatus
* @param map
* @throws Exception void
*/
public void docStatusProc(HashMap<String,Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : 폐기문서 첨부파일 삭제 목록 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : delPageList
* @param map
* @return
* @throws Exception List<PageVO>
*/
public List<PageVO> delPageList(HashMap<String,Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : 폐기문서 첨부파일 삭제처리 :: eXrep 연동처리
* 2. 처리내용 :
* </pre>
* @Method Name : delPageProc
* @param pageList
* @param eXrepClient
* @return
* @throws Exception HashMap<String,Object>
*/
public HashMap<String,Object> delPageProc(List<PageVO> pageList,EXrepClient eXrepClient ) throws Exception;
/**
*
* <pre>
* 1. 개용 : DELETEFILE_QUEUE 삭제 목록 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : fileQueueDeleteList
* @param map
* @return
* @throws Exception List<FileQueueDeleteVO>
*/
public List<FileQueueDeleteVO> fileQueueDeleteList(HashMap<String,Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : DELETEFILE_QUEUE 삭제처리 :: eXrep 연동처리
* 2. 처리내용 :
* </pre>
* @Method Name : delFileProc
* @param pageList
* @param eXrepClient
* @return
* @throws Exception HashMap<String,Object>
*/
public HashMap<String,Object> delFileProc(List<FileQueueDeleteVO> fileQueueDeleteList,EXrepClient eXrepClient ) throws Exception;
/**
*
* <pre>
* 1. 개용 : 임시작업함 삭제 대상 목록 얻기
* 2. 처리내용 :
* </pre>
* @Method Name : tempDelDocList
* @param map
* @return
* @throws Exception List<HashMap<String,Object>>
*/
public List<HashMap<String,Object>> tempDelDocList(HashMap<String,Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : 임시작업함 문서 삭제처리
* 2. 처리내용 :
* </pre>
* @Method Name : tempDocDelete
* @param map
* @throws Exception void
*/
public void tempDocDelete(HashMap<String,Object> map) throws Exception;
}
<file_sep>/EDMS3/WebContent/js/common/constant.js
/**
* EDMS HTML5 Global const valiables
*/
var Constant = {
/**
* Tree 관련
*/
MAP_MYPAGE : "MYPAGE",
MAP_MYDEPT : "MYDEPT",
MAP_PROJECT : "PROJECT",
WORK_MYPAGE : "WORK_MYPAGE",
WORK_MYDEPT : "WORK_MYDEPT",
WORK_ALLDEPT : "WORK_ALLDEPT",
WORK_PROJECT : "WORK_PROJECT",
WORK_FREEDOC : "WORK_FREEDOC",
// 업무(협업) 관련
PROCESS : {
REQUEST_MENU : "REQUEST", // 요청문서
WRITE_ING_MENU : "WRITE_ING", // 작성중 문서
APPROVAL_ING_MENU : "APPROVAL_ING", // 승인중 문서
WRITE_END_MENU : "WRITE_END", // 작성한 문서
APPROVAL_END_MENU : "APPROVAL_END", // 승인한 문서
RECEIVE_MENU : "RECEIVE", // 수신문서
//업무요청(Q)>작성중(W)>승인중(A)>승인완료(AE)>보완중(M)>완료(E)
REQUEST : 'Q', //업무요청
WRITE_ING : 'W', //작성중
WRITE_END : 'WE', //작성완료
APPROVAL_ING : 'A', //승인중
APPROVAL_END : 'AE', //승인완료
MODIFY_ING : 'M', //보완중
MODIFY_END : 'ME', //보완종료
PROCESS_END : 'E', //업무종료
RECEIVE_ING : 'R', //열람중
RECEIVE_END : 'RE', //열람완료
TYPE_REQUESTOR : "Q", // 업무 요청자
TYPE_AUTHOR : "R", // Responsible(대표 작성자)
TYPE_COAUTHOR : "C", // 공동 작성자
TYPE_APPROVER : "A", // Accountable(의사결정권자::승인자)
TYPE_RECEIVER : "I", // Informed(사후에 결과를 통보 반는자::수신자)
EXECUTOR_START : "S", // 실행자 해당 단계 시작
EXECUTOR_END : "E", // 실행자 해당 단계 종료
},
// 권한 선택 팝업 관련
ACL : {
TYPE_FOLDER : "FOLDER", // 폴더 권한 선택 팝업
TYPE_DOC : "DOCUMENT", // 문서 권한 선택 팝업
TYPE_ALL : "ALL",
TYPE_TEAM : "TEAM",
TYPE_DEPT : "DEPT",
TYPE_PRIVATE: "PRIVATE",
ACL_NONE : "NONE",
ACL_READ : "READ",
ACL_DELETE : "DELETE",
ACL_UPDATE : "UPDATE",
ACL_BROWSE : "BROWSE",
},
// 메뉴 권한 관련
MENU : {
MENU_ALL : "ALL",
MENU_GROUP : "GROUP",
MENU_TEAM : "TEAM"
},
// ROLE 관련
ROLE : {
SYSTEM_ROLE : "SYSTEM_OPERATOR",
},
// 쪽지관련
ALLDATA : "ALL",
LIST_TYPE : {
DOCUMENT_LIST_TYPE_TEMPDOC : "TEMPDOC",
DOCUMENT_LIST_TYPE_RECENTLYDOC : "RECENTLYDOC"
},
// action type
ACTION_CREATE : "CREATE",
ACTION_UPDATE : "UPDATE",
ACTION_DELETE : "DELETE",
ACTION_REPLY : "REPLY",
ACTION_VIEW : "VIEW",
}<file_sep>/EDMS3/src/kr/co/exsoft/eframework/exception/AbsExceptionHandleManager.java
package kr.co.exsoft.eframework.exception;
import kr.co.exsoft.eframework.handler.ExceptionHandler;
import javax.annotation.Resource;
import org.springframework.context.MessageSource;
import org.springframework.util.PathMatcher;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/***
* ExceptionHandlerService의 기본 구현체
* @author 패키지 개발팀
* @since 2014.07.15
* @version 3.0
*
*/
public abstract class AbsExceptionHandleManager {
protected static final Log logger = LogFactory.getLog(AbsExceptionHandleManager.class);
@Resource(name = "messageSource")
protected MessageSource messageSource;
protected Exception ex;
protected String thisPackageName;
protected String[] patterns;
protected ExceptionHandler[] handlers;
protected PathMatcher pm;
/**
* setPatterns 메소드
*
* 패키지,클래스 이름으로 패턴등록(Ant형식의 매칭)
* @param patterns 패턴리스트
*/
public void setPatterns(String[] patterns) {
this.patterns = patterns;
}
/**
* setHandlers 메소드
* ExceptionHandler 리스트 등록
* @param handlers handler리스트
*/
public void setHandlers(ExceptionHandler[] handlers) {
this.handlers = handlers;
}
/**
* setPackageName 메소드
* 비교할 클래스 정보
*
* @param canonicalName 비교할 클래스명
*/
public void setPackageName(String canonicalName) {
this.thisPackageName = canonicalName;
}
public String getPackageName() {
return this.thisPackageName;
}
/**
* setException 메소드
*
* @param be Exception
*/
public void setException(Exception be) {
this.ex = be;
}
/**
* setReqExpMatcher 메소드
*
* @param pm 별도의 PathMatcher
*/
public void setReqExpMatcher(PathMatcher pm) {
this.pm = pm;
}
/**
* hasReqExpMatcher 메소드
* PathMatcher 가 있는지 여부 반환
* @return boolean true|false
*/
public boolean hasReqExpMatcher() {
return this.enableMatcher();
}
/**
* enableMatcher 메소드
* PathMatcher 가 있는지 여부 반환
* @return boolean true|false
*/
public boolean enableMatcher() {
return (this.pm == null) ? false : true;
}
/**
* run 메소드
*
* 상속받아 구현해야할 메스드 하지만 미리구현은 먼저 해둠. 실 구현체에서 override 하여 구현해야 함.
* @param exception 발생한 Exception
* @return boolean 실행성공여부
*/
public boolean run(Exception exception) throws Exception {
if (!enableMatcher())
return false;
for (String pattern : patterns) {
if (pm.match(pattern, thisPackageName)) {
for (ExceptionHandler eh : handlers) {
eh.occur(exception, getPackageName());
}
break;
}
}
return true;
}
}
<file_sep>/EDMS3/WebContent/js/popup/selectGroupWindow.js
var selectGroupWindow = {
callbackFunction : null, // 확인 버튼 클릭시 결과를 반환할 함수
groupTree : null, // JSTree 그룹 트리 오브젝트
projectTree : null, // JSTree 프로젝트 트리 오브젝트
currentTree : null, // JSTree 현재 선택된 트리 오브젝트
mapId : "MYDEPT", // 맵
popTreeOption : null, // 트리 옵션 (그룹 / 프로젝트 / 전체)
gSelectGroup_rootId : null, // 트리 root가 관리부서를 사용할지 여부
// 0. 초기화
init : {
initPage : function(callback, treeOption, manage_group_id) {
selectGroupWindow.callbackFunction = callback;
selectGroupWindow.popTreeOption = treeOption;
if( manage_group_id != undefined || manage_group_id != null)
selectGroupWindow.gSelectGroup_rootId = manage_group_id;
// 그룹 팝업일 경우 "프로젝트" 탭 숨김
if (selectGroupWindow.popTreeOption == "GROUP" || selectGroupWindow.popTreeOption == "MYDEPT") {
selectGroupWindow.mapId = "MYDEPT";
$("#popup_groupTree").removeClass("hide");
$("#popup_projectTree").addClass("hide");
} else if (selectGroupWindow.popTreeOption == "PROJECT"){
selectGroupWindow.mapId = "PROJECT";
$("#popup_projectTree").removeClass("hide");
$("#popup_groupTree").addClass("hide");
} else {
selectGroupWindow.mapId = "MYDEPT";
}
selectGroupWindow.init.initTree();
selectGroupWindow.open();
},
initTree : function() {
var treeOption = null;
if (selectGroupWindow.mapId == "MYDEPT") {
treeOption = {
context : exsoft.contextRoot,
url : "/group/groupList.do",
divId : "#popup_groupTree",
mapId : Constant.MAP_MYDEPT,
workType : Constant.WORK_MYDEPT,
manageGroupId : selectGroupWindow.gSelectGroup_rootId
};
if(selectGroupWindow.groupTree == undefined) {
selectGroupWindow.groupTree = new XFTree(treeOption);
selectGroupWindow.groupTree.init();
} else {
selectGroupWindow.groupTree.refresh();
}
selectGroupWindow.currentTree = selectGroupWindow.groupTree;
} else if (selectGroupWindow.mapId == "PROJECT") {
treeOption = {
context : exsoft.contextRoot,
url : "/group/groupList.do",
divId : "#popup_projectTree",
mapId : Constant.MAP_PROJECT,
workType : Constant.WORK_PROJECT
};
if(selectGroupWindow.projectTree == undefined) {
selectGroupWindow.projectTree = new XFTree(treeOption);
selectGroupWindow.projectTree.init();
} else {
selectGroupWindow.projectTree.refresh();
}
selectGroupWindow.currentTree = selectGroupWindow.projectTree;
}
}
},
// 1. 팝업
open : function() {
exsoft.util.layout.divLayerOpen("dept_choose_wrapper", "dept_choose");
},
//2. layer + show
layer : {
},
//3. 닫기 + hide
close : function() {
exsoft.util.layout.divLayerClose("dept_choose_wrapper", "dept_choose");
},
//4. 화면 이벤트 처리
event : {
changeMap : function() {
var map_id = $("#map_id").val();
if(map_id == "MYDEPT") {
$("#popup_groupTree").removeClass("hide");
$("#popup_projectTree").addClass("hide");
} else {
$("#popup_projectTree").removeClass("hide");
$("#popup_groupTree").addClass("hide");
}
selectGroupWindow.mapId = map_id
selectGroupWindow.init.initTree();
},
// 확인 버튼 클릭시
selectGroupSubmit : function() {
var returnObjects = exsoft.util.common.getReturnTreeObject(selectGroupWindow.currentTree,
exsoft.util.common.getIdFormat(selectGroupWindow.currentTree.divId), selectGroupWindow.mapId);
// 그룹 선택 팝업창 파라메터로 온 콜백함수를 호출한다.
selectGroupWindow.callbackFunction(returnObjects);
// 팝업을 숨긴다.
selectGroupWindow.close();
}
},
//5. 화면 UI 변경 처리
ui : {
},
//6. callback 처리
callback : {
},
}<file_sep>/EDMS3/src/kr/co/exsoft/common/dao/MenuDao.java
package kr.co.exsoft.common.dao;
import java.util.HashMap;
import java.util.List;
import kr.co.exsoft.common.vo.MenuVO;
import kr.co.exsoft.common.vo.MenuAuthVO;
import org.springframework.stereotype.Repository;
/**
* Menu 매퍼클래스
* @author 패키지 개발팀
* @since 2014.07.21
* @version 3.0
*
*/
@Repository(value = "menuDao")
public interface MenuDao {
/**
*
* <pre>
* 1. 개요 : 메뉴권한 목록 가져오기.
* 2. 처리내용 :
* </pre>
* @Method Name : menuAuthList
* @param map
* @return List
*/
public List<MenuAuthVO> menuAuthList(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개요 : 메뉴권한 삭제 처리
* 2. 처리내용 :
* </pre>
* @Method Name : menuAuthDelete
* @param menuAuthVO
* @return int
*/
public int menuAuthDelete(MenuAuthVO menuAuthVO);
/**
*
* <pre>
* 1. 개요 :하위메뉴권한 삭제 처리
* 2. 처리내용 :
* </pre>
* @Method Name : subMenuAuthDelete
* @param menuAuthVO
* @return int
*/
public int subMenuAuthDelete(MenuAuthVO menuAuthVO);
/**
*
* <pre>
* 1. 개요 : 메뉴권한 등록처리
* 2. 처리내용 :
* </pre>
* @Method Name : menuAuthWrite
* @param menuAuthVO
* @return int
*/
public int menuAuthWrite(MenuAuthVO menuAuthVO);
/**
*
* <pre>
* 1. 개요 : 메뉴권한 목록 등록 여부
* 2. 처리내용 :
* </pre>
* @Method Name : menuAuthDetail
* @param menuAuthVO
* @return MenuAuthVO
*/
public MenuAuthVO menuAuthDetail(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개요 : 메뉴목록 가져오기.
* 2. 처리내용 :
* </pre>
* @Method Name : menuList
* @param map
* @return List
*/
public List<MenuVO> menuList(HashMap<String,Object> map);
/**
*
* <pre>
* 1. 개요 : 메뉴정보 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : menuDetail
* @param map
* @return MenuVO
*/
public MenuVO menuDetail(HashMap<String,Object> map);
}
<file_sep>/EDMS3/src/kr/co/exsoft/folder/service/FolderService.java
package kr.co.exsoft.folder.service;
import java.sql.SQLException;
import java.util.*;
import kr.co.exsoft.common.vo.RecentlyObjectVO;
import kr.co.exsoft.common.vo.SessionVO;
import kr.co.exsoft.document.vo.DocumentVO;
import kr.co.exsoft.folder.vo.FavoriteFolderVO;
import kr.co.exsoft.folder.vo.FolderVO;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
/**
* Folder 서비스 인터페이스
* @author <NAME>
* @since 2014.07.17
* @version 3.0
*
*/
@Transactional
public interface FolderService {
/**
*
* <pre>
* 1. 개용 : MapID, ParentID 기준의 root 폴더 목록
* 2. 처리내용 :
* </pre>
* @Method Name : rootFolderList
* @param map
* @return
* @throws Exception List<FolderVO>
*/
public List<FolderVO> rootFolderList(HashMap<String, Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : 하위 부서 목록 조회
* 2. 처리내용 :
* </pre>
* @Method Name : childFolderList
* @param map
* @return
* @throws Exception List<FolderVO>
*/
public List<FolderVO> childFolderList(HashMap<String, Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : 폴더 상세 내용 가져오기
* 2. 처리내용 :
* </pre>
* @Method Name : folderDetail
* @param map
* @return
* @throws Exception FolderVO
*/
public FolderVO folderDetail(HashMap<String, Object> map) throws Exception;
/**
*
* <pre>
* 1. 개용 : 폴더 등록
* 2. 처리내용 :
* </pre>
* @Method Name : folderWrite
* @param folderVO
* @param map
* @param sessionVo
* @return
* @throws Exception Map<String,Object>
*/
public Map<String, Object> folderWrite(FolderVO folderVO, HashMap<String, Object> map, SessionVO sessionVO) throws Exception;
/**
*
* <pre>
* 1. 개용 : 폴더 수정
* 2. 처리내용 :
* </pre>
* @Method Name : folderUpdate
* @param folderVO
* @param map
* @param sessionVo
* @return
* @throws Exception Map<String,Object>
*/
@Transactional(propagation = Propagation.REQUIRED,rollbackFor ={ Exception.class,SQLException.class})
public Map<String, Object> folderUpdate(FolderVO folderVO, HashMap<String, Object> map, SessionVO sessionVO) throws Exception;
/**
*
* <pre>
* 1. 개용 : 폴더 삭제
* 2. 처리내용 : 하위 폴더 및 현재 폴더에 문서 또는 개인 휴지통에 현재 폴더에 대한 문서가 존재할 경우 삭제 불가
* </pre>
* @Method Name : folderDelete
* @param map
* @param sessionVo
* @return
* @throws Exception Map<String,Object>
*/
public Map<String, Object> folderDelete(HashMap<String, Object> map, SessionVO sessionVO) throws Exception;
/**
*
* <pre>
* 1. 내용 : 특정 폴더의 모든 하위 폴더를 가져온다.
* 2. 처리내용 :
* - 최초로 주어진 rootId를 기준으로 모든 하위 폴더를 검색하여 List<FolderVO>형태로 반환한다
* - recursive function으로 로직을 구현한다.
* - 단, 모든 검색이 끝나기 전엔 리소스를 반환하지 않으니 for문으로 교체를 고려해야 한다.
* </pre>
* @Method Name : getFolderTree
* @param rootId
* @return
* @throws Exception
*/
public List<FolderVO> getFolderTreeToLow(HashMap<String, Object> param) throws Exception;
/**
* <pre>
* 1. 개용 : 현재폴더를 기준으로 root 폴더까지 구한다.
* 2. 처리내용 :
* </pre>
* @Method Name : existDocument
* @param folderId
* @return
* @throws Exception
*/
public FolderVO getFolderTreeToHigh(String folderId, FolderVO childFolder) throws Exception;
/**
* <pre>
* 1. 개용 : 인계자의 특정 폴더의 경로를 인수자의 인수인계폴더에 구성한다.
* 2. 처리내용 :
* </pre>
* @Method Name : getFavoriteFolderTreeToLow
* @param param
* @return
* @throws Exception
*/
public List<FavoriteFolderVO> getFavoriteFolderTreeToLow(HashMap<String, Object> param) throws Exception;
/**
* <pre>
* 1. 개용 : 인계자의 특정 폴더의 경로를 인수자의 인수인계폴더에 구성한다.
* 2. 처리내용 :
* </pre>
* @Method Name : cloneMyFolder
* @param targetUserId
* @param fromUserId
* @param folderId
* @return
* @throws Exception
*/
public FolderVO cloneMyFolder(String ownerId, FolderVO srcFolder) throws Exception;
/**
*
* <pre>
* 1. 개용 : 인계자의 특정 폴더의 경로를 인수자의 인수인계폴더에 구성한다.
* 2. 처리내용 :
* </pre>
* @Method Name : ownerChangeFolder
* @param ownerId
* @param documentVO
* @param preFolderPath
* @return
* @throws Exception FolderVO
*/
public FolderVO ownerChangeFolder(String ownerId,DocumentVO documentVO,String preFolderPath) throws Exception;
/**
*
* <pre>
* 1. 개용 : 캐쉬의 parent_id 값을 변경 한다.
* 2. 처리내용 :
* </pre>
* @Method Name : changeFolderCacheByFolderID
* @param folder_id : 폴더 ID
* @param folderVo : folderVo
* @param type : CREATE|UPDATE|DELETE|MOVE
* @throws Exception void
*/
public void changeFolderCacheByFolderID(String folder_id, FolderVO folderVo, String type) throws Exception;
/**
*
* <pre>
* 1. 개용 : 폴더 이동, 폴더 수정 시 하위 폴더/문서 권한 일괄변경 요청 시
* 2. 처리내용 : 변경할 폴더 및 문서에 대한 is_inherit_acl값이 'F' 값일 경우 변경
* </pre>
* @Method Name : changeAclFromSubFolNDoc
* @param folder_id
* @param parent_acl_id
* @throws Exception void
*/
@Transactional(propagation = Propagation.REQUIRED,rollbackFor ={ Exception.class,SQLException.class})
public void changeAclFromSubFolNDoc(String folder_id, String parent_acl_id) throws Exception;
/**
*
* <pre>
* 1. 개용 : folder_id 기준으로 상위 폴더 id 요청
* 2. 처리내용 : ex) arg:FOL000000000008 => FOL000000000000,FOL000000000002,FOL000000000008
* </pre>
* @Method Name : folderFullpathIDsByfolderID
* @param folder_id
* @return
* @throws Exception List<String>
*/
public List<String> folderFullpathIdsByfolderID(String folder_id) throws Exception;
/**
*
* <pre>
* 1. 개용 : 하위 폴더 목록 가져오기 MAIN
* 2. 처리내용 :
* </pre>
* @Method Name : childFolderIdsByfolderId
* @param folder_id
* @param folder_menu_part
* @return List<String>
*/
public List<String> childFolderIdsByfolderId(String folder_id, String folder_menu_part) throws Exception;
/**
* <pre>
* 1. 개용 : 공유 폴더 목록
* 2. 처리내용 :
* </pre>
* @Method Name : rootShareFolderList
* @param map
* @return
* @throws Exception
*/
public List<FolderVO> rootShareFolderList(HashMap<String, Object> map) throws Exception;
/**
* <pre>
* 1. 개용 : 즐겨찾기 루트 폴더 목록
* 2. 처리내용 :
* </pre>
* @Method Name : rootFavoriteFolderList
* @param map
* @return
* @throws Exception
*/
public List<FavoriteFolderVO> rootFavoriteFolderList(HashMap<String, Object> map) throws Exception;
/**
* <pre>
* 1. 개용 : 즐겨찾기 자식 폴더 목록
* 2. 처리내용 :
* </pre>
* @Method Name : childFavoriteFolderList
* @param map
* @return
* @throws Exception
*/
public List<FavoriteFolderVO> childFavoriteFolderList(HashMap<String, Object> map) throws Exception;
/**
* <pre>
* 1. 개용 : 즐겨찾기 폴더 추가
* 2. 처리내용 :
* </pre>
* @Method Name : writeFavoriteFolderList
* @param map
* @throws Exception
*/
public void writeFavoriteFolder(HashMap<String, Object> map) throws Exception;
/**
* <pre>
* 1. 개용 : 즐겨찾기 폴더 수정
* 2. 처리내용 :
* </pre>
* @Method Name : updateFavoriteFolder
* @param map
* @throws Exception
*/
public void updateFavoriteFolder(HashMap<String, Object> map) throws Exception;
/**
* <pre>
* 1. 개용 : 즐겨찾기 폴더 삭제
* 2. 처리내용 :
* </pre>
* @Method Name : deleteFavoriteFolder
* @param map
* @throws Exception
*/
public void deleteFavoriteFolder(HashMap<String, Object> map) throws Exception;
/**
* <pre>
* 1. 개용 : 즐겨찾기 폴더 중복 확인
* 2. 처리내용 :
* </pre>
* @Method Name : existsFavoriteFolder
* @param map
* @throws Exception
*/
public Map<String, Object> existsFavoriteFolder(HashMap<String, Object> map) throws Exception;
/**
* <pre>
* 1. 개용 : 즐겨찾기 폴더 Index 스왑
* 2. 처리내용 :
* </pre>
* @Method Name : swapFavoriteFolderIndex
* @param map
* @throws Exception
*/
public void swapFavoriteFolderIndex(HashMap<String, Object> map) throws Exception;
/**
* <pre>
* 1. 개용 : 즐겨찾기 폴더 이동
* 2. 처리내용 :
* </pre>
* @Method Name : moveFavoriteFolder
* @param map
* @throws Exception
*/
public void moveFavoriteFolder(HashMap<String, Object> map) throws Exception;
/**
* <pre>
* 1. 개용 : 폴더의 루트폴더 조회
* 2. 처리내용 : 스토리지 할당량 확인을 위해 해당 폴더의 루트폴더 (GroupFolder)를 가져온다
* </pre>
* @param MapId
* @param folderId
* @param childFolder
* @return
* @throws Exception
*/
public FolderVO getRootFolder(String MapId, String folderId, FolderVO childFolder) throws Exception;
/**
* <pre>
* 1. 개용 : folder_id와 user_id로 최근 사용한 폴더 목록 조회
* 2. 처리내용 :
* </pre>
* @param userId
* @return
* @throws Exception
*/
public List<RecentlyObjectVO> recentlyFolderList(String userId) throws Exception;
/**
* <pre>
* 1. 개용 : folder_id와 user_id로 최근 사용한 폴더 목록 조회
* 2. 처리내용 :
* </pre>
* @param userId
* @return
* @throws Exception
*/
public Map<String, Object> recentlyFolderDelete(String idx) throws Exception;
}
| 7971da56d8f6717c3346cbb6756c01cc98ff9cdf | [
"JavaScript",
"Java",
"Text"
] | 106 | JavaScript | asiojs/finotek | fd44e392355e03ad718cb247b35cd0f18be52679 | 36793a9b0230ca327f9d758451f7c584c2df91c1 |
refs/heads/master | <repo_name>hector-sanchez/abaris_widget_corp<file_sep>/app/helpers/products_helper.rb
include ActionView::Helpers::DateHelper
module ProductsHelper
def days_in_the_market(product)
distance_of_time_in_words(product.created_at, DateTime.now)
end
end
<file_sep>/app/models/seller.rb
class Seller < User
has_many :products, foreign_key: :user_id
has_many :purchase_offers, through: :products
end<file_sep>/spec/features/product_spec.rb
require 'rails_helper'
describe Product do
before do
@seller = FactoryGirl.create(:seller)
login_as(@seller, :scope => :user)
end
describe 'index' do
before do
wrong_seller = FactoryGirl.create(:seller)
FactoryGirl.create_list(:product, 2, seller: @seller)
FactoryGirl.create_list(:product, 2, user_id: wrong_seller.id)
visit products_path
end
it 'can be reached successfully' do
expect(page.status_code).to eq(200)
end
it 'can see the products' do
expect(page).to have_content /Products/
end
it 'shows only products that belong to current user (seller)' do
within('table#my-products') do
expect(page).to have_xpath(".//tr", :count => 3) # one for the <th>
end
end
end
describe 'new' do
it 'has a link from the home page' do
visit products_path
click_link 'new_product_from_nav'
expect(page.status_code).to eq 200
end
end
describe 'edit' do
before do
@product = FactoryGirl.create(:product, seller: @seller)
end
it 'can be reached by clicking edit on the product line' do
visit products_path
click_link "edit_#{@product.id}"
expect(page.status_code).to eq 200
end
it 'can be edited' do
visit edit_product_path(@product)
fill_in 'product[title]', with: 'My EDITED awesome product'
fill_in 'product[description]', with: Faker::Lorem.paragraphs[0]
fill_in 'product[price]', with: Faker::Number.decimal(2)
click_on 'Save'
expect(page).to have_content 'My EDITED awesome product'
end
end
describe 'create product' do
before do
visit new_product_path
end
it 'reaches the create product form' do
expect(page.status_code).to eq 200
end
it 'can be created from new form page' do
fill_in 'product[title]', with: 'My awesome product'
fill_in 'product[description]', with: Faker::Lorem.paragraphs[0]
fill_in 'product[price]', with: Faker::Number.decimal(2)
click_on 'Save'
expect(page).to have_content 'My awesome product'
end
it 'redirects to the new action if creation error' do
click_on 'Save'
expect(page).to_not have_content 'My awesome product'
end
it 'product will belong to a seller' do
fill_in 'product[title]', with: 'My awesome product'
fill_in 'product[description]', with: Faker::Lorem.paragraphs[0]
fill_in 'product[price]', with: Faker::Number.decimal(2)
click_on 'Save'
expect(@seller.products.count).to eq 1
end
end
end<file_sep>/db/migrate/20170703005150_remove_photo_from_product.rb
class RemovePhotoFromProduct < ActiveRecord::Migration[5.1]
def change
remove_attachment :products, :photo
end
end
<file_sep>/db/migrate/20170701043008_create_purchase_offer.rb
class CreatePurchaseOffer < ActiveRecord::Migration[5.1]
def change
create_table :purchase_offers do |t|
t.references :user, foreign_key: true, index: true
t.references :product, foreign_key: true, index: true
t.decimal :offer_amount
t.integer :status
end
end
end
<file_sep>/app/controllers/application_controller.rb
class ApplicationController < ActionController::Base
protect_from_forgery with: :exception
before_action :authenticate_user!
def after_sign_in_path_for(resource_or_scope)
if resource_or_scope.is_a?(Seller)
products_path
else
root_path
end
end
end
<file_sep>/spec/factories/users.rb
FactoryGirl.define do
factory :user do
first_name { Faker::Name.first_name }
last_name { Faker::Name.last_name }
password '<PASSWORD>'
password_confirmation '<PASSWORD>'
sequence :username do |n|
"foo#{n}#{(0..10000000).to_a.sample}"
end
sequence :email do |n|
"foo#{n}#{(0..10000000).to_a.sample}@bar.baz"
end
end
factory :buyer, parent: :user, class: Buyer do
end
factory :seller, parent: :user, class: Seller do
end
end<file_sep>/db/seeds.rb
seller_1 = Seller.create!(first_name: Faker::Name.first_name,
last_name: Faker::Name.last_name,
username: 'killer_seller',
email: '<EMAIL>',
password: '<PASSWORD>', password_confirmation: '<PASSWORD>')
puts "#{seller_1.username} was ceated."
30.times do |product|
Product.create!(user_id: seller_1.id, title: "#{product} - #{Faker::Name.title}", description: Faker::Lorem.paragraphs[0], price: Faker::Number.decimal(2))
end
puts "30 Products have been created."
seller_2 = Seller.create!(first_name: Faker::Name.first_name,
last_name: Faker::Name.last_name,
username: 'bank_breaker',
email: '<EMAIL>',
password: '<PASSWORD>', password_confirmation: '<PASSWORD>')
puts "#{seller_2.username} was ceated."
40.times do |product|
Product.create!(user_id: seller_2.id, title: "#{product} - #{Faker::Name.title}", description: Faker::Lorem.paragraphs[0], price: Faker::Number.decimal(2))
end
puts "40 Products have been created."
buyer = Buyer.create!(first_name: Faker::Name.first_name,
last_name: Faker::Name.last_name,
username: 'big_baller',
email: '<EMAIL>',
password: '<PASSWORD>', password_confirmation: '<PASSWORD>')
puts "#{buyer.username} was ceated."
<file_sep>/spec/models/user_spec.rb
require 'rails_helper'
RSpec.describe User, type: :model do
describe 'validations' do
it { is_expected.to validate_presence_of :first_name }
it { is_expected.to validate_presence_of :last_name }
it { is_expected.to validate_presence_of :username }
it { is_expected.to validate_uniqueness_of(:username).ignoring_case_sensitivity }
it { is_expected.to validate_presence_of :email }
it { is_expected.to validate_uniqueness_of(:email).ignoring_case_sensitivity }
it { is_expected.to allow_value('<EMAIL>').for(:email) }
it { is_expected.to allow_value('a@a.aa').for(:email) }
it { is_expected.not_to allow_value('').for(:email) }
it { is_expected.not_to allow_value('asdf').for(:email) }
it { is_expected.not_to allow_value('a@a.a').for(:email) }
it { is_expected.not_to allow_value('a'*100 + '@example.com').for(:email) }
end
describe '#full_name' do
user = FactoryGirl.build(:user, first_name: 'Mike', last_name: 'Wasowski')
it 'returns the full name of the user' do
expect(user.full_name).to eq '<NAME>'
end
end
end
<file_sep>/config/routes.rb
Rails.application.routes.draw do
resources :products do
resource :purchase_offer, only: [:new]
end
devise_for :users
root to: 'dashboard#homepage'
end
<file_sep>/spec/models/seller_spec.rb
require 'rails_helper'
RSpec.describe Seller, type: :model do
describe 'associations' do
it { is_expected.to have_many :products }
it { is_expected.to have_many(:purchase_offers).through(:products) }
end
end<file_sep>/app/controllers/purchase_offers_controller.rb
class PurchaseOffersController < ApplicationController
def new
product = Product.find(params[:product_id])
product.purchase_offers.create(user_id: current_user.id)
redirect_to root_path, notice: "You have successfully added #{product.title} to your cart."
end
end<file_sep>/app/models/user.rb
class User < ApplicationRecord
# Include default devise modules. Others available are:
# :confirmable, :lockable, :timeoutable and :omniauthable
devise :database_authenticatable, :registerable,
:recoverable, :rememberable, :trackable, :validatable
validates_presence_of :first_name, :last_name, :email, :username
validates_uniqueness_of :email, :username
validates :email,
format: /\A[^@]+@[^@]+\z/.freeze,
length: {in: 6..80.freeze}
def full_name
"#{first_name} #{last_name}"
end
end
<file_sep>/app/controllers/dashboard_controller.rb
class DashboardController < ApplicationController
def homepage
@products = Product.all.includes(:seller, :purchase_offers)
end
end<file_sep>/app/models/product.rb
class Product < ApplicationRecord
belongs_to :seller, foreign_key: :user_id, class_name: 'Seller'
has_many :purchase_offers
has_many :buyers, through: :purchase_offers
validates_presence_of :title, :description, :price
validates_numericality_of :price, :greater_than => 0
end
<file_sep>/app/models/buyer.rb
class Buyer < User
has_many :purchase_offers, foreign_key: :user_id
end<file_sep>/spec/models/purchase_offer_spec.rb
require 'rails_helper'
RSpec.describe PurchaseOffer, type: :model do
describe 'associations' do
it { is_expected.to belong_to :buyer }
it { is_expected.to belong_to :product }
end
describe 'validations' do
it { is_expected.to validate_presence_of :user_id }
it { is_expected.to validate_presence_of :product_id }
end
end<file_sep>/spec/models/product_spec.rb
require 'rails_helper'
RSpec.describe Product, type: :model do
describe 'associations' do
it { is_expected.to belong_to :seller }
it { is_expected.to have_many :purchase_offers }
it { is_expected.to have_many(:buyers).through(:purchase_offers) }
end
describe 'validations' do
it { is_expected.to validate_presence_of :title }
it { is_expected.to validate_presence_of :description }
it { is_expected.to validate_presence_of :price }
it { is_expected.to validate_numericality_of(:price).is_greater_than(0) }
end
end
<file_sep>/spec/models/buyer_spec.rb
require 'rails_helper'
RSpec.describe Buyer, type: :model do
describe 'associations' do
it { is_expected.to have_many :purchase_offers }
end
end<file_sep>/spec/features/purchase_offer_spec.rb
require 'rails_helper'
describe PurchaseOffer do
before do
@seller = FactoryGirl.create(:seller)
@buyer = FactoryGirl.create(:buyer)
login_as(@buyer, :scope => :user)
end
describe 'present offer' do
it 'offer page can be reached successfully' do
product1 = FactoryGirl.create(:product, seller: @seller)
visit root_path
click_link "make_offer_product_#{product1.id}"
expect(page.status_code).to eq(200)
end
end
end<file_sep>/README.md
## Abaris' WidgetCorp Commerce
### Associations
#### Seller
X A seller has many products
- A seller has many purchase offers through product
#### Product
X A product belongs to a seller
X A product has many purchase offers
X A product has many buyers through purchase offers
#### Purchase Offers
X An purchase offer belongs to a buyer
X An purchase offer belongs product
#### Buyer
X A buyer has many purchase offers
<file_sep>/spec/features/authentication_spec.rb
require 'rails_helper'
describe 'Authentication' do
describe 'seller' do
before do
FactoryGirl.create(:seller, email: '<EMAIL>', password: '<PASSWORD>', password_confirmation: '<PASSWORD>')
seller = FactoryGirl.create(:seller)
login_as(seller, :scope => :user)
end
it 'sees the seller links only' do
visit root_path
expect(page).to have_link 'My Products'
end
it 'lands seller on products page after login' do
visit root_path
click_link 'Account'
expect(page).to have_content 'Logout'
click_link 'Logout'
expect(page).to have_content 'Log in'
fill_in 'user[email]', with: '<EMAIL>'
fill_in 'user[password]', with: '<PASSWORD>'
click_on 'Log in'
expect(page).to have_current_path(products_url, url: true)
end
end
describe 'buyer' do
before do
buyer = FactoryGirl.create(:buyer)
FactoryGirl.create(:buyer, email: '<EMAIL>', password: '<PASSWORD>', password_confirmation: '<PASSWORD>')
login_as(buyer, :scope => :user)
end
it 'sees the buyer links only' do
visit root_path
expect(page).not_to have_link 'My Products'
end
it 'lands buyer on products page after login' do
visit root_path
click_link 'Account'
expect(page).to have_content 'Logout'
click_link 'Logout'
expect(page).to have_content 'Log in'
fill_in 'user[email]', with: '<EMAIL>'
fill_in 'user[password]', with: '<PASSWORD>'
click_on 'Log in'
expect(page).to have_current_path(root_url, url: true)
end
end
def login_logout_workflow
end
end<file_sep>/app/controllers/products_controller.rb
class ProductsController < ApplicationController
before_action :set_product, only: [:show, :edit, :update, :destroy]
def index
@products = current_user.products.includes(:purchase_offers)
end
def new
@product = Product.new
end
def edit
end
def create
@product = current_user.products.new(product_params)
if @product.save
redirect_to @product, notice: 'Your product was successfully created.'
else
render :new
end
end
def update
if @product.update(product_params)
redirect_to @product, notice: 'Your product was successfully updated.'
else
render :edit
end
end
def show
end
def destroy
@product.destroy
redirect_to products_path, notice: 'Your product was successfully deleted.'
end
private
def set_product
@product = Product.find(params[:id])
end
def product_params
params.require(:product).permit(:title, :description, :price)
end
end
<file_sep>/app/models/purchase_offer.rb
class PurchaseOffer < ApplicationRecord
belongs_to :buyer, foreign_key: :user_id
belongs_to :product
validates_presence_of :user_id, :product_id
end | 2196a659a60d9034b2206d68513bce74ae7f239f | [
"Markdown",
"Ruby"
] | 24 | Ruby | hector-sanchez/abaris_widget_corp | 5fd6e956f7ce26076b11f8ae33c9538122e63e7d | 92291ab72e2c1ef7eac6ab6c3f7131184f14b3c4 |
refs/heads/master | <file_sep>package test.vo;
import java.sql.Date;
public class MembersVo {
private int num;
private String name;
private String phone;
private String addr;
private Date regdate;
public MembersVo() {
}
public MembersVo(int num, String name, String phone, String addr, Date regdate) {
super();
this.num = num;
this.name = name;
this.phone = phone;
this.addr = addr;
this.regdate = regdate;
}
public int getNum() {
return num;
}
public void setNum(int num) {
this.num = num;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getPhone() {
return phone;
}
public void setPhone(String phone) {
this.phone = phone;
}
public String getAddr() {
return addr;
}
public void setAddr(String addr) {
this.addr = addr;
}
public Date getRegdate() {
return regdate;
}
public void setRegdate(Date regdate) {
this.regdate = regdate;
}
}
<file_sep>package test.main;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;
import org.apache.ibatis.io.Resources;
import org.apache.ibatis.session.SqlSession;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.ibatis.session.SqlSessionFactoryBuilder;
import test.vo.MyUsersVo;
public class TestMain {
public static void main(String[] args) {
//myusers테이블과 연동해서
//데이터를 추가, 수정, 삭제, 아이디로 조회, 전체회원 조회기능 구현하기
try {
String resource = "mybatis/mybatis-config.xml";
InputStream inputStream = Resources.getResourceAsStream(resource);
SqlSessionFactory sqlSessionFactory = new SqlSessionFactoryBuilder().build(inputStream);
SqlSession session = sqlSessionFactory.openSession();
//데이터 추가
//String id, String pw, String email, Date mydate
// MyUsersVo vo = new MyUsersVo("1234", "aaa1", "<EMAIL>", null);
// int n = session.insert("mybatis.MyUsersMapper.insert", vo);
// session.commit();
//데이터 수정
// MyUsersVo vo = new MyUsersVo("77", "test", "<EMAIL>", null);
// session.update("mybatis.MyUsersMapper.update", vo);
// session.commit();
// session.close();
//데이터 삭제
// session.delete("mybatis.MyUsersMapper.delete", "77");
// session.commit();
// session.close();
//아이디로 조회
// List<MyUsersVo> list = session.selectList("mybatis.MyUsersMapper.select", "22");
//
// for( MyUsersVo vo : list ) {
// System.out.println("아이디:" + vo.getId());
// System.out.println("비밀번호:" + vo.getPw());
// System.out.println("이메일:" + vo.getEmail());
// }
//전체회원 조회
List<MyUsersVo> list = session.selectList("mybatis.MyUsersMapper.selectAll");
for( MyUsersVo vo : list ) {
System.out.println("아이디:" + vo.getId());
System.out.println("비밀번호:" + vo.getPw());
System.out.println("이메일:" + vo.getEmail());
System.out.println("=============================");
}
} catch(IOException e) {
System.out.println(e.getMessage());
}
}
}
<file_sep>package test.controller;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import test.dao.MembersDao;
@WebServlet("/members/update.do")
public class UpdateController extends HttpServlet {
@Override
protected void service(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
int num =Integer.parseInt(req.getParameter("num"));
String name =req.getParameter("name");
String phone =req.getParameter("phone");
String addr =req.getParameter("addr");
try {
MembersDao dao =new MembersDao();
int n =dao.update(num, name, phone, addr);
if(n>0) {
req.setAttribute("updateResult", n);
req.getRequestDispatcher("/members/list.do").forward(req, resp);
} else {
req.setAttribute("errMsg", "회원수정 실패!");
req.getRequestDispatcher("error.jsp").forward(req, resp);
}
} catch(Exception e) {
System.out.println(e.getMessage());
}
}
}
<file_sep>package test.controller;
import java.io.IOException;
import java.util.ArrayList;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import test.dao.MembersDao;
import test.vo.MembersVo;
@WebServlet("/members/list.do")
public class ListController extends HttpServlet {
@Override
protected void service(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
MembersDao dao =new MembersDao();
ArrayList<MembersVo> list =dao.getList();
req.setAttribute("list", list);
req.getRequestDispatcher("/members/list.jsp").forward(req, resp);
}
}
<file_sep>package mybatis;
import java.io.IOException;
import java.io.InputStream;
import org.apache.ibatis.io.Resources;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.ibatis.session.SqlSessionFactoryBuilder;
public class SqlSessionFactoryService {
private static SqlSessionFactory sqlSessionFactory;
static {
String resource = "mybatis/mybatis-config.xml";
sqlSessionFactory = null;
try {
InputStream is = Resources.getResourceAsStream(resource);
sqlSessionFactory = new SqlSessionFactoryBuilder().build(is);
} catch(IOException e) {
System.out.println(e.getMessage());
}
}
public static SqlSessionFactory getSqlSessionFactory() {
System.out.println("sqlSessionFactory:" + sqlSessionFactory);
return sqlSessionFactory;
}
}
<file_sep>package test.servlet;
import java.io.IOException;
import java.io.PrintWriter;
import java.sql.Connection;
import java.sql.Date;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import db.JDBCUtil;
@WebServlet("/update.do")
public class UpdateServlet extends HttpServlet {
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
String id =req.getParameter("id");
req.setCharacterEncoding("utf-8");
Connection con =null;
PreparedStatement pstmt =null;
ResultSet rs =null;
try {
con =JDBCUtil.getConn();
String sql ="select * from myusers where id=?";
pstmt =con.prepareStatement(sql);
resp.setContentType("text/html;charset=utf-8");
pstmt.setString(1, id);
rs =pstmt.executeQuery();
PrintWriter pwr =resp.getWriter();
while(rs.next()) {
String pw =rs.getString("pw");
String email =rs.getString("email");
Date mydate =rs.getDate("mydate");
pwr.print("<form method='post' action='update.do'>");
pwr.print("<p>아이디 <input type='text' name='id' value='" + id + "'></p>");
pwr.print("<p>비밀번호 <input type='text' name='pw' value='" + pw + "'></p>");
pwr.print("<p>이메일 <input type='text' name='email' value='" + email + "'></p>");
pwr.print("<p>가입일 <input type='text' name='mydate' value='" + mydate + "'></p>");
pwr.print("<p><input type='submit' value='저장'></p>");
pwr.print("</form>");
}
pwr.close();
} catch(SQLException s) {
System.out.println(s.getMessage());
} finally {
JDBCUtil.close(rs, pstmt, con);
}
}
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
Connection con =null;
PreparedStatement pstmt =null;
req.setCharacterEncoding("utf-8");
String id =req.getParameter("id");
String pw =req.getParameter("pw");
String email =req.getParameter("email");
int n =0;
try {
con =JDBCUtil.getConn();
String sql ="update myusers set pw=?, email=? where id=?";
pstmt =con.prepareStatement(sql);
pstmt.setString(1, pw);
pstmt.setString(2, email);
pstmt.setString(3, id);
System.out.println(id);
System.out.println(pw);
System.out.println(email);
n =pstmt.executeUpdate();
if(n>0) {
System.out.println("수정성공");
resp.sendRedirect("list.do");
} else {
System.out.println("수정실패");
}
} catch(SQLException s) {
s.printStackTrace();
} finally {
JDBCUtil.close(null, pstmt, con);
}
}
}
<file_sep>package test.vo;
public class MovieVo {
private int mnum;
private String title;
private String content;
private String director;
public MovieVo() {
}
public MovieVo(int mnum, String title, String content, String director) {
super();
this.mnum = mnum;
this.title = title;
this.content = content;
this.director = director;
}
public int getMnum() {
return mnum;
}
public void setMnum(int mnum) {
this.mnum = mnum;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getContent() {
return content;
}
public void setContent(String content) {
this.content = content;
}
public String getDirector() {
return director;
}
public void setDirector(String director) {
this.director = director;
}
}
<file_sep>package member.controller;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import data.access.object.MemberDAO;
import value.object.MyusersVO;
@WebServlet("/search")
public class UserSearch extends HttpServlet {
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
String keyword = req.getParameter("keyword");
String id = req.getParameter("id");
String email = req.getParameter("email");
// System.out.println(keyword);
// System.out.println(id);
// System.out.println(email);
MemberDAO dao = new MemberDAO();
HashMap<String, String> map = new HashMap<String, String>();
map.put("keyword", keyword);
map.put("id", id);
map.put("email", email);
List<MyusersVO> vo = dao.searchList(map);
HttpSession session = req.getSession();
session.setAttribute("userlistALL", vo);
req.setAttribute("loginPage", "/alluserList.jsp");
req.getRequestDispatcher("index.jsp").forward(req, resp);
}
}
<file_sep>package js4_servlet;
import java.io.IOException;
import java.io.PrintWriter;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@WebServlet("/insert.do")
public class InsertServlet extends HttpServlet {
/*
* service메소드 -> get방식/post방식으로 요청시 모두 호출됨
*
*
*/
// @Override
// protected void service(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
// System.out.println("요청방식:" + req.getMethod());
// System.out.println("service메소드 호출됨");
// }
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
System.out.println("doGet 메소드 호출됨");
resp.sendRedirect("test01.html");
}
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
System.out.println("doPost 메소드 호출됨");
System.out.println("회원정보 db 저장 완료!");
resp.setCharacterEncoding("text/html;charset=utf-8");
PrintWriter pw =resp.getWriter();
pw.print("<h1>회원가입 성공</h1>");
}
}
<file_sep>package test.dao;
import java.util.HashMap;
import java.util.List;
import org.apache.ibatis.session.SqlSession;
import org.apache.ibatis.session.SqlSessionFactory;
import mybatis.SqlSessionFactoryService;
import vo.BoardVo;
public class BoardDao {
private SqlSessionFactory sqlSessionFactory = null;
private static final String NAMESPACE = "mybatis.MyBoardMapper";
public int insert(BoardVo vo) {
sqlSessionFactory = SqlSessionFactoryService.getSqlSessionFactory();
SqlSession session = null;
try {
session = sqlSessionFactory.openSession();
int n = session.insert(NAMESPACE + ".insert", vo);
session.commit();
return n;
} finally {
if(session != null) session.close();
}
}
public List<BoardVo> getList(HashMap<String, String> map) {
sqlSessionFactory = SqlSessionFactoryService.getSqlSessionFactory();
SqlSession session = null;
try {
session = sqlSessionFactory.openSession();
List<BoardVo> list = session.selectList(NAMESPACE + ".select", map);
session.commit();
return list;
} finally {
if(session != null) session.close();
}
}
public List<BoardVo> getList1(HashMap<String, String> map) {
sqlSessionFactory = SqlSessionFactoryService.getSqlSessionFactory();
SqlSession session = null;
try {
session = sqlSessionFactory.openSession();
return session.selectList(NAMESPACE + ".select1", map);
} finally {
if(session != null) session.close();
}
}
//번호넣어서 리스트목록가져오기
public List<BoardVo> getList(int num) {
sqlSessionFactory = SqlSessionFactoryService.getSqlSessionFactory();
SqlSession session = null;
try {
session = sqlSessionFactory.openSession();
System.out.println("num>>" + num);
List<BoardVo> list = session.selectList(NAMESPACE + ".selectDoGet", num);
for(BoardVo vo : list) {
System.out.println(vo.getContent());
}
session.commit();
return list;
} finally {
if(session != null) session.close();
}
}
public int delete(int num) {
sqlSessionFactory = SqlSessionFactoryService.getSqlSessionFactory();
SqlSession session = null;
try {
session = sqlSessionFactory.openSession();
System.out.println("삭제완료");
int n = session.delete(NAMESPACE + ".delete", num);
session.commit();
return n;
} finally {
if(session != null) session.close();
}
}
public int update(BoardVo vo) {
sqlSessionFactory = SqlSessionFactoryService.getSqlSessionFactory();
SqlSession session = null;
try {
session = sqlSessionFactory.openSession();
System.out.println("수정 완료");
int n = session.update(NAMESPACE + ".update", vo);
session.commit();
return n;
} finally {
if(session != null) session.close();
}
}
}
<file_sep>package test.filter;
import java.io.IOException;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.annotation.WebFilter;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
@WebFilter("/member/*")
public class LoginFilter implements Filter {
@Override
public void destroy() {
}
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
throws IOException, ServletException {
boolean login =false; //로그인했는지 여부를 저장
//세션에 아이디가 저장되어있으면 login에 true가 저장되도록 해보세요.
HttpServletRequest req =(HttpServletRequest)request;
HttpSession session =req.getSession();
String id =(String)session.getAttribute("id");
if(session!=null) {
if(id!=null) {
login =true;
}
}
if(login) {
chain.doFilter(request, response);
} else {
HttpServletResponse resp =(HttpServletResponse)response;
resp.sendRedirect(req.getContextPath() + "/login/login.jsp");
}
}
@Override
public void init(FilterConfig filterConfig) throws ServletException {
}
}
<file_sep>package controller;
import java.io.IOException;
import java.util.ArrayList;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import dao.MyBoardDao;
import vo.MyBoardVo;
@WebServlet("/board/list")
public class ListController extends HttpServlet {
@Override
protected void service(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
MyBoardDao dao =new MyBoardDao();
String spageNum=req.getParameter("pageNum");
int pageNum=1;
if(spageNum!=null) {
pageNum=Integer.parseInt(spageNum);
}
int startRow=(pageNum-1)*5+1;
int endRow=startRow+4;
int pageCount=(int)Math.ceil(dao.getMaxCount()/5.0);
int startPageNum=(pageNum/5)*4+1;
int endPageNum=startPageNum+3;
if(pageCount<endPageNum) {
endPageNum=pageCount;
}
System.out.println("pageCount:"+pageCount);
System.out.println("startPageNum:"+startPageNum);
System.out.println("endPageNum:"+endPageNum);
System.out.println("pageNum:"+pageNum);
System.out.println("startRow:"+startRow);
System.out.println("endRow:"+endRow);
System.out.println("------------------------------------------");
req.setAttribute("pageCount",pageCount);
req.setAttribute("startPageNum",startPageNum);
req.setAttribute("endPageNum",endPageNum);
req.setAttribute("pageNum",pageNum);
ArrayList<MyBoardVo> list =dao.getList(startRow, endRow);
req.setAttribute("list", list);
req.getRequestDispatcher("/board/list.jsp").forward(req, resp);
}
}
<file_sep>package test.dao;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import db.ConnectionPool;
import test.vo.MovieVo;
public class MovieDao {
private static MovieDao instance =new MovieDao();
private MovieDao() {}
public static MovieDao getInstance() {
return instance;
}
public MovieVo getinfo(int mnum) {
String sql ="select * from movie where mnum =?";
Connection con =null;
PreparedStatement pstmt =null;
ResultSet rs =null;
try {
con =ConnectionPool.getCon();
pstmt =con.prepareStatement(sql);
pstmt.setInt(1, mnum);
rs =pstmt.executeQuery();
if(rs.next()) {
//int mnum, String title, String content, String director
MovieVo vo =new MovieVo(
rs.getInt("mnum"),
rs.getString("title"),
rs.getString("content"),
rs.getString("director")
);
return vo;
}
return null;
} catch(SQLException s) {
System.out.println(s.getMessage());
return null;
} finally {
ConnectionPool.close(pstmt, rs, con);
}
}
}
<file_sep>package dao;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import db.JDBCUtil;
/*
* [ 싱글톤 패턴 ]
* -객체를 하나만 생성해서 공유해서 사용하는 기법
* -만드는 방법
* 1. 자기자신의 객체를 static맴버로 생성한다.
* 2. 객체를 리턴하는 메소드를 제공한다.
* 3. 생성자를 private로 만든다.
*
*/
public class LoginDao {
private static LoginDao dao =new LoginDao();
private LoginDao() {} // 생성자를 private로 만들기
public static LoginDao getDao() {
return dao;
}
public int isMember(HashMap<String, String> map) {
String id =map.get("id");
String pwd =map.get("pwd");
Connection con =null;
PreparedStatement pstmt =null;
ResultSet rs =null;
try {
con =JDBCUtil.getConn();
String sql ="select * from myusers where id=? and pw=?";
pstmt =con.prepareStatement(sql);
pstmt.setString(1, id);
pstmt.setString(2, pwd);
rs =pstmt.executeQuery();
if(rs.next()) {
return 1; //회원일경우 1리턴
}
return 2; //회원이 아닐경우 2리턴
} catch(SQLException s) {
System.out.println(s.getMessage());
return -1; //오류발생시 -1리턴
} finally {
JDBCUtil.close(rs, pstmt, con);
}
}
}
<file_sep>package dao;
import java.sql.Connection;
import java.sql.Date;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import db.ConnectionPool;
import vo.MyBoardVo;
public class MyBoardDao {
public int insert(MyBoardVo vo) {
Connection con =null;
PreparedStatement pstmt =null;
try {
con =ConnectionPool.getCon();
String sql ="insert into myboard values(myboard_seq.nextval, ?, ?, ?, ?, sysdate)";
pstmt =con.prepareStatement(sql);
pstmt.setString(1, vo.getWriter());
pstmt.setString(2, vo.getPwd());
pstmt.setString(3, vo.getTitle());
pstmt.setString(4, vo.getContent());
return pstmt.executeUpdate();
} catch(SQLException s) {
System.out.println(s.getMessage());
return -1;
} finally {
ConnectionPool.close(pstmt, null, con);
}
}
public MyBoardVo getList(int num) {
Connection con =null;
PreparedStatement pstmt =null;
ResultSet rs =null;
try {
con =ConnectionPool.getCon();
String sql ="select * from myboard where num = ?";
pstmt =con.prepareStatement(sql);
pstmt.setInt(1, num);
rs =pstmt.executeQuery();
MyBoardVo vo =null;
if(rs.next()) {
String writer =rs.getString("writer");
String pwd =rs.getString("pwd");
String title =rs.getString("title");
String content =rs.getString("content");
Date regdate =rs.getDate("regdate");
//int num, String writer, String pwd, String title, String content, Date regdate
vo =new MyBoardVo(num, writer, pwd, title, content, regdate);
}
return vo;
} catch(SQLException s) {
System.out.println(s.getMessage());
return null;
} finally {
ConnectionPool.close(pstmt, rs, con);
}
}
public ArrayList<MyBoardVo> getList(int startRow, int endRow) {
Connection con =null;
PreparedStatement pstmt =null;
ResultSet rs =null;
try {
con =ConnectionPool.getCon();
String sql ="select * from (\r\n" +
"select aa.*, rownum rnum from (\r\n" +
"select * from myboard order by num desc\r\n" +
")aa\r\n" +
")\r\n" +
"where rnum >= ? and rnum <= ?";
pstmt =con.prepareStatement(sql);
pstmt.setInt(1, startRow);
pstmt.setInt(2, endRow);
rs =pstmt.executeQuery();
ArrayList<MyBoardVo> list =new ArrayList<MyBoardVo>();
while(rs.next()) {
int num =rs.getInt("num");
String writer =rs.getString("writer");
String pwd =rs.getString("pwd");
String title =rs.getString("title");
String content =rs.getString("content");
Date regdate =rs.getDate("regdate");
list.add(new MyBoardVo(num, writer, pwd, title, content, regdate));
}
return list;
} catch(SQLException s) {
System.out.println(s.getMessage());
return null;
} finally {
ConnectionPool.close(pstmt, rs, con);
}
}
public ArrayList<MyBoardVo> search(String keyword, String op){
Connection con =null;
PreparedStatement pstmt =null;
ResultSet rs =null;
try {
con =ConnectionPool.getCon();
String resultk ="%" + keyword + "%";
String sql = "select * from book where bookname like ?";
pstmt =con.prepareStatement(sql);
pstmt.setString(1, resultk);
rs =pstmt.executeQuery();
ArrayList<MyBoardVo> list =new ArrayList<MyBoardVo>();
while(rs.next()) {
int num =rs.getInt("num");
String writer =rs.getString("writer");
String pwd =rs.getString("pwd");
String title =rs.getString("title");
String content =rs.getString("content");
Date regdate =rs.getDate("regdate");
list.add(new MyBoardVo(num, writer, pwd, title, content, regdate));
}
return list;
} catch(SQLException s) {
System.out.println(s.getMessage());
return null;
} finally {
ConnectionPool.close(pstmt, rs, con);
}
}
public int delete(String pwd, int num) {
Connection con =null;
PreparedStatement pstmt =null;
try {
con =ConnectionPool.getCon();
String sql ="delete from myboard where num=? and pwd=?";
pstmt =con.prepareStatement(sql);
pstmt.setInt(1, num);
pstmt.setString(2, pwd);
return pstmt.executeUpdate();
} catch(SQLException s) {
System.out.println(s.getMessage());
return -1;
} finally {
ConnectionPool.close(pstmt, null, con);
}
}
public int getMaxCount() {
Connection con =null;
PreparedStatement pstmt =null;
ResultSet rs =null;
try {
con =ConnectionPool.getCon();
String sql ="select NVL(count(num),0) cnt from myboard";
pstmt =con.prepareStatement(sql);
rs =pstmt.executeQuery();
if(rs.next()) {
return rs.getInt("cnt");
}
return 0;
} catch(SQLException s) {
System.out.println(s.getMessage());
return 0;
} finally {
ConnectionPool.close(pstmt, rs, con);
}
}
}
<file_sep>package test.beans;
/*
* 자바빈즈
* -전달되는 파라미터와 같은 이름으로 멤버변수 만들기
* -setter, getter메소드 만들기
* -디폴트생성자 만들기
* - 위와같은 규칙으로 생성된 객체를 자바빈즈라고 함
*/
public class Members {
private String name;
private String phone;
private String addr;
public Members() {
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getPhone() {
return phone;
}
public void setPhone(String phone) {
this.phone = phone;
}
public String getAddr() {
return addr;
}
public void setAddr(String addr) {
this.addr = addr;
}
}
<file_sep><!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>Insert title here</title>
<script type="text/javascript" src="js/jquery-3.5.1.js"></script>
<style type="text/css">
#content {
width: 80%;
margin: auto;
}
#quickmenu {
width: 180px;
height: 230px;
border: 2px solid red;
background-color: orange;
position: absolute;
right: 200px;
top: 50px;
}
</style>
</head>
<body>
<div id="quickmenu">
<ul>
<li><a href="#">최근본페이지1</a></li>
<li><a href="#">최근본페이지2</a></li>
<li><a href="#">최근본페이지3</a></li>
<li><a href="#">최근본페이지4</a></li>
<li><a href="#">최근본페이지5</a></li>
</ul>
</div>
<div id="content">
<p>Lorem ipsum dolor sit amet, consectetur adipisicing elit. Eaque praesentium consequatur nihil sequi fugiat quibusdam officiis quas repellendus possimus aspernatur nisi ab numquam tempora hic expedita? Sunt fugiat enim iure temporibus cupiditate hic quo suscipit et tenetur minus eligendi dignissimos praesentium nam nesciunt iusto porro iste! Illo dolorum esse vitae dolore. Quos culpa earum ex repudiandae dolore excepturi itaque veritatis adipisci quidem reprehenderit facere ipsa natus ipsam sit eaque nam deleniti minus? Autem deserunt voluptatum recusandae laborum eum molestias reprehenderit ad quidem vero iure explicabo nisi dolorem sed! Ducimus obcaecati fuga voluptas fugiat quam totam placeat nemo tenetur maiores aliquam praesentium vero tempora corporis quibusdam nobis magni earum minima dolor provident et sapiente ut alias blanditiis debitis vitae molestias porro cum dignissimos expedita ab necessitatibus facilis dolorum ipsa. Nesciunt ea officiis repudiandae ipsum nam dolorem facilis voluptas labore quo officia error inventore ipsam dolores aperiam assumenda doloribus at vero cupiditate non consectetur rem in fugiat doloremque. Voluptas labore deserunt sapiente est quis culpa dignissimos explicabo et laboriosam fugiat! Nemo soluta illo maiores nisi adipisci quis aspernatur ea quos distinctio. Iure recusandae repudiandae corrupti perspiciatis ea dolores ullam cum ducimus fuga ut placeat consequatur aut. Quos nulla cupiditate quam consequatur adipisci eaque et accusantium accusamus facilis rem necessitatibus voluptas doloremque velit alias tempora voluptates quasi quaerat nobis esse inventore officiis sit deleniti minus! Et fuga ad vel quaerat animi a consectetur eaque qui libero eos doloribus totam quam veritatis impedit numquam consequuntur suscipit voluptates. Impedit nesciunt repellat molestiae omnis obcaecati dolores quia praesentium molestias laudantium possimus mollitia illo perferendis et architecto nihil quaerat in maiores labore neque corporis tempora explicabo cupiditate iusto reprehenderit eius? Voluptate ex atque deserunt dolorum beatae error eveniet impedit in sunt odio. Cupiditate quasi laborum sunt deserunt architecto nesciunt tempore? Dignissimos explicabo et sequi reiciendis voluptates officiis rem minus consequatur ex dolore voluptatum dolores voluptate quo adipisci eligendi placeat temporibus repudiandae quos nulla in dolor repellendus ut fugiat error cum rerum eum corrupti. Iusto perferendis at officiis molestiae placeat nostrum est veniam dolorem quia aliquid labore iure consequuntur ipsum fugiat doloremque saepe ratione laudantium quam hic expedita soluta accusamus cumque esse iste quis impedit nesciunt laborum adipisci ab neque enim consequatur praesentium quo debitis assumenda qui veritatis voluptatibus commodi ipsa sed. Ex laudantium provident modi odio sed cupiditate rerum veniam dolorem ipsam vitae labore hic sapiente repudiandae illo inventore placeat quasi nam aut doloremque fugit quod tenetur asperiores iure autem distinctio assumenda pariatur nulla quas nostrum excepturi vero commodi voluptatibus aliquam ipsa iste odit aliquid. Sint omnis dignissimos porro aspernatur nam sapiente nesciunt architecto recusandae quaerat cumque modi commodi vero quos debitis doloremque cum enim necessitatibus distinctio laudantium excepturi reiciendis minus in hic expedita natus dolores non maxime fugit laborum ad explicabo sunt ex aperiam molestias numquam error earum vel veniam eius beatae nemo totam! Consectetur nulla reiciendis vel iusto itaque cumque iure quaerat non totam quasi excepturi culpa enim labore est natus dolores incidunt! Excepturi sequi exercitationem ab blanditiis neque eaque dignissimos omnis culpa quos vel eligendi natus soluta similique explicabo saepe dolorem dolores id ad doloribus deserunt assumenda libero et. Quos placeat quidem dicta. Corporis ducimus ipsum cum autem quidem animi suscipit rerum facere earum qui consectetur quas iste dolorem odit possimus. Ab ad quasi explicabo tempore rem vero quam quibusdam itaque sapiente! Ipsa nulla ea ullam perferendis vitae ipsum corporis facilis dolore iure enim quod beatae aliquam id quo dolorem dignissimos modi asperiores distinctio voluptate amet aperiam odio quam autem inventore cupiditate placeat nesciunt animi consequuntur corrupti odit nam impedit optio recusandae veniam eum itaque magnam? Aperiam corporis amet quam praesentium blanditiis dolorum consequatur esse non nostrum assumenda natus incidunt recusandae ipsa placeat voluptatum deserunt ullam! Modi repellat rerum hic animi quidem architecto assumenda deserunt natus porro rem minus culpa totam odio! Placeat delectus at fugit laborum reprehenderit ex quis ad quo nobis voluptas minima quibusdam iure! Eveniet harum mollitia sit fugit iure rem laborum eos in dolorum! Pariatur quasi suscipit esse natus modi ducimus quos fugiat ullam tempora laudantium ex optio enim eaque id alias nesciunt illo mollitia facilis porro necessitatibus? Ea neque corporis nobis libero placeat. Dolorem ipsam rerum illum odio totam quibusdam perspiciatis nihil sint repellat commodi quisquam harum nemo optio repellendus minus excepturi error. Vero voluptatibus voluptates deserunt suscipit sit culpa maiores est animi ab eius dignissimos praesentium! Quasi natus molestiae in sapiente sequi impedit dolorem cupiditate laboriosam eligendi enim fuga dignissimos corporis quaerat dolor repellat! Totam quasi similique ad expedita tempora ipsum omnis quis delectus voluptas quaerat quo corporis doloremque! Tenetur unde tempora aliquam sunt aliquid cum repudiandae quos distinctio possimus enim facere deleniti atque rem ratione placeat eos et magnam ullam quam totam natus nisi tempore. Inventore error saepe nam minima pariatur dolore ipsa rerum asperiores in facere? Nam porro architecto velit et sit sequi facere ad impedit. Odit numquam iste ab nobis harum quis ad quia nisi rerum dicta veniam adipisci perferendis pariatur eveniet totam? Labore vel accusantium ipsum quam dolorum pariatur animi fugiat deleniti rerum optio provident doloribus reiciendis vitae adipisci debitis dolorem voluptas ea quia fugit numquam commodi distinctio autem. Laborum architecto sit totam sint voluptatem accusamus amet harum rem reprehenderit fugit blanditiis ut doloribus similique fugiat inventore dolor ipsam ad molestias vero nostrum. Animi veritatis praesentium rerum nemo eveniet ipsa amet expedita quos explicabo odit quam nesciunt deserunt inventore? Corporis incidunt esse possimus accusantium quidem quod voluptatem eveniet eaque voluptatibus odio ex dolores veritatis nihil eos debitis ipsam doloremque dicta ducimus rerum velit consequuntur voluptate laborum molestiae expedita repudiandae obcaecati a dolore labore fugit ut ad voluptatum iste repellat? Aliquid ratione assumenda ea aut debitis sapiente delectus reiciendis impedit praesentium eius iste quae at aspernatur dolorem eveniet minus suscipit repudiandae sunt corporis ex cumque modi numquam odio neque maxime sit animi eligendi expedita veniam facere ut perferendis exercitationem commodi ad cupiditate accusamus laboriosam perspiciatis. Beatae quos totam sequi nisi delectus dicta aperiam at provident corporis iure odit officiis animi pariatur explicabo in iste perferendis. Quas eos quo doloribus id autem illo aliquid sapiente accusamus.</p>
<p>Lorem ipsum dolor sit amet, consectetur adipisicing elit. Dolor dolore dolores ad perspiciatis similique minus dignissimos maxime blanditiis explicabo neque quae inventore a reiciendis eveniet ipsa! Aliquam quis veritatis sint quaerat modi ut reiciendis ex delectus sapiente dignissimos alias eaque illum doloremque odit sit sunt eligendi nam officia officiis animi deserunt totam maxime nesciunt quos numquam blanditiis explicabo quas! Amet sit perferendis nemo aperiam doloremque alias laudantium tempore omnis aspernatur asperiores magni nobis. Incidunt cupiditate explicabo aperiam laudantium esse veritatis ducimus voluptate corporis facere ad! Culpa fuga cupiditate qui quae esse unde quas cum consectetur illo sed deserunt tempora voluptatum et eveniet delectus sit inventore a soluta illum at velit blanditiis non odio earum magnam laboriosam in. Suscipit iste ipsum officiis error odio enim similique itaque voluptatibus eaque vero debitis saepe animi dicta deserunt provident cupiditate in veniam nisi qui neque porro nihil veritatis earum commodi aperiam est modi assumenda dolorem quidem! Maiores tenetur pariatur distinctio ex vel doloribus illum cum ut odio iure? Porro impedit iusto quae rem illo illum obcaecati eaque enim dolorem earum aliquid unde nisi! Sequi aspernatur dolor cupiditate eveniet dolorum non quas quis? Ipsa suscipit doloribus hic nihil ullam magni et maiores cupiditate aspernatur consequatur? Dicta soluta pariatur culpa numquam voluptas nobis. Quas aspernatur voluptatibus officia nobis praesentium eius labore debitis eos optio architecto consequuntur itaque facere veritatis nam voluptates excepturi tempore voluptate provident dolorem quod esse illum at nisi voluptas sunt ea beatae laborum est suscipit reiciendis numquam delectus quam deserunt non eum repellat cupiditate culpa accusamus dolor eveniet. Adipisci harum corporis nam similique distinctio nemo earum vitae aut hic neque perferendis numquam amet vero ad magnam dicta assumenda praesentium? Impedit dolore minima velit saepe dolorem molestias ullam ex commodi facilis suscipit sunt quaerat quam atque perspiciatis optio ratione accusamus vel sit quis quos nam sint repellendus corporis nihil cum soluta ipsum maxime animi a dicta illum excepturi fugiat veniam consequuntur doloremque voluptatum deleniti. Molestiae ab ad eveniet voluptates aut vel perspiciatis similique cum dignissimos quas consequuntur explicabo! Similique quibusdam dolorem incidunt quos iure voluptatibus nam id vero. Dolorum mollitia voluptatibus pariatur repellendus officia aliquam cumque porro magnam velit placeat blanditiis vel asperiores amet consectetur fugit tenetur soluta! Molestiae non culpa ducimus consequuntur libero eligendi natus numquam. Facilis architecto ipsa voluptatibus commodi voluptates similique debitis voluptatem ea ab suscipit minus dolor. Culpa facere quas quis ipsam odio vel perferendis cupiditate sint velit sed a maiores ducimus aperiam porro doloremque itaque illum commodi. Dolores modi repellendus optio ipsa perspiciatis id soluta rerum molestias dolorem laudantium reprehenderit veritatis aut quas repudiandae expedita tempore a ad maxime omnis vel illo provident porro blanditiis voluptate iste quo quibusdam atque tempora ut cupiditate veniam recusandae in fugit similique voluptatibus aspernatur eos officiis. Suscipit at cumque quasi eligendi velit cum deserunt ab inventore corporis nam recusandae quibusdam iste architecto et minus vero dicta. Porro repellat veniam facilis iste debitis nobis velit inventore recusandae sint animi maxime dolores aspernatur. Cupiditate autem quae optio obcaecati illo dolores et id sit vero ipsam?</p>
<p>Lorem ipsum dolor sit amet, consectetur adipisicing elit. Incidunt alias quia commodi quo facere sequi temporibus fugiat fuga harum eaque aliquam blanditiis dicta magnam reiciendis animi. Amet distinctio dolore quos neque doloribus. Facilis beatae unde obcaecati culpa quos cum asperiores quidem reprehenderit mollitia perferendis doloremque excepturi. Dolor quibusdam tenetur consequatur rem distinctio eveniet labore error ad maxime quam fugiat perferendis praesentium dolorem mollitia soluta libero possimus nisi. Libero aliquid dignissimos. Obcaecati laudantium expedita soluta vero tempore tenetur voluptates itaque magnam rem ipsa quam mollitia repellendus fuga quas animi quae sapiente aliquid cupiditate doloremque molestiae ex incidunt quis est quos officia dolor illum necessitatibus eaque libero quisquam? Facere voluptate natus provident accusamus sapiente deserunt assumenda et excepturi commodi aliquam. Ratione soluta ipsam delectus eligendi asperiores voluptatem non expedita animi nihil velit deserunt molestias! Commodi natus dolores veniam in eum consequuntur quia quisquam omnis sed aut maiores iste ducimus a sapiente minima laudantium eligendi voluptate sequi sit aspernatur porro temporibus enim corporis facilis quae quis incidunt ipsum officiis vitae magnam! Iste cumque quae eius unde consectetur mollitia sint at eos animi recusandae nihil quia modi blanditiis repellendus sequi rem tempore consequuntur vel totam ratione nesciunt dolores labore itaque repudiandae perferendis nemo harum! Aliquam libero culpa tenetur facere praesentium fugit natus eaque vitae repellat in quaerat provident delectus inventore unde dolorum iusto incidunt eum obcaecati nihil ipsum! Laudantium sunt atque incidunt debitis quisquam inventore hic voluptatum esse alias corrupti magni repellendus distinctio odit at quo provident quaerat ipsum molestias aperiam ad nobis cum harum quas! Ipsam architecto quibusdam dolores quasi quam. Iste laudantium aut quas natus commodi perspiciatis obcaecati excepturi iure tempore hic expedita in blanditiis voluptas quam error quos doloremque facilis ut tenetur nostrum! Dolore odio assumenda aliquid eum expedita perferendis unde maiores repellat culpa. Nulla quod nesciunt suscipit officiis nobis sapiente excepturi deleniti voluptatibus dolor velit non quam a ducimus adipisci magnam doloribus doloremque minus perferendis. Perferendis eveniet deserunt neque similique quos. Sint facere consectetur nihil necessitatibus voluptatem earum quam pariatur sed maxime enim dolor molestiae aut qui repellendus ducimus nesciunt possimus tempora deleniti sequi voluptates iusto sunt blanditiis autem laudantium soluta eius doloribus aperiam at harum labore minima error atque voluptatum culpa voluptas esse numquam minus nemo illo velit corporis adipisci. Consequatur delectus itaque aliquam. Ut aliquam ipsa aperiam id commodi tempora placeat illum et. Voluptatem expedita ratione deleniti praesentium atque pariatur provident quasi totam odio aspernatur non quo explicabo vel corporis sapiente cumque dolores aut commodi perferendis distinctio amet voluptatibus consequatur? Veritatis nesciunt repellat ad perspiciatis vitae error numquam nulla velit dolore dolorem quo aperiam eius animi similique odit neque tempora sunt a quod natus rerum labore doloribus optio fugiat saepe distinctio sed unde autem inventore iure dignissimos eaque est doloremque. Sapiente odio itaque iste soluta sunt expedita ducimus unde magni harum accusamus asperiores officiis velit ab quod a quia impedit quis earum commodi nemo repellendus. Eos nihil recusandae illo tempore tenetur enim suscipit architecto aliquam pariatur doloribus repudiandae aliquid asperiores assumenda blanditiis quo laborum velit? Dignissimos veritatis voluptates.</p>
</div>
</body>
<script type="text/javascript">
$(window).scroll(function(){
//스크롤된 top값 얻어오기
var scrollTop = $(window).scrollTop();
var moveTop = scrollTop + 50;
//$("#quickmenu").css("top", moveTop);
$("#quickmenu").stop(); //기존에 수행중인 애니메이션 멈추기
/*
$("#quickmenu").animate({
top: moveTop
});
*/
$("#quickmenu").stop().animate({
top: moveTop
});
});
</script>
</html><file_sep>CREATE TABLE fileinfo (
filenum NUMBER(5) PRIMARY KEY,
writer VARCHAR2(20),
title VARCHAR2(20),
content VARCHAR2(100),
orgfilename VARCHAR2(150),
savefilename VARCHAR2(150),
filesize NUMBER(10)
);
CREATE SEQUENCE fileinfo_seq;<file_sep>package test.dao;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import db.JDBCUtil;
import test.vo.FileinfoVo;
public class FileinfoDao {
public int insert(FileinfoVo vo) {
Connection con =null;
PreparedStatement pstmt =null;
try {
con =JDBCUtil.getConn();
String sql ="insert into fileinfo values(fileinfo_seq.nextval, ?, ?, ?, ?, ?, ?)";
pstmt =con.prepareStatement(sql);
pstmt.setString(1, vo.getWriter());
pstmt.setString(2, vo.getTitle());
pstmt.setString(3, vo.getContent());
pstmt.setString(4, vo.getOrgfilename());
pstmt.setString(5, vo.getSavafilename());
pstmt.setLong(6, vo.getFilesize());
return pstmt.executeUpdate();
} catch(SQLException sql) {
return -1;
} finally {
JDBCUtil.close(null, pstmt, con);
}
}
public ArrayList<FileinfoVo> getList(){
Connection con =null;
PreparedStatement pstmt =null;
ResultSet rs =null;
ArrayList<FileinfoVo> list =new ArrayList<FileinfoVo>();
try {
con =JDBCUtil.getConn();
String sql ="select * from fileinfo";
pstmt =con.prepareStatement(sql);
rs =pstmt.executeQuery();
while(rs.next()) {
int filenum =rs.getInt("filenum");
String writer =rs.getString("writer");
String title =rs.getString("title");
String content =rs.getString("content");
String orgfilename =rs.getString("orgfilename");
String savefilename =rs.getString("savefilename");
long filesize =rs.getLong("filesize");
//int filenum, String writer, String title, String content, String orgfilename, String savafilename,
//long filesize
list.add(new FileinfoVo(filenum, writer, title, content, orgfilename, savefilename, filesize));
}
return list;
} catch(SQLException e) {
System.out.println(e.getMessage());
return null;
} finally {
JDBCUtil.close(rs, pstmt, con);
}
}
public FileinfoVo getList(int filenum){ //하나가져오기
Connection con =null;
PreparedStatement pstmt =null;
ResultSet rs =null;
try {
con =JDBCUtil.getConn();
String sql ="select * from fileinfo where filenum =?";
pstmt =con.prepareStatement(sql);
pstmt.setInt(1, filenum);
rs =pstmt.executeQuery();
if(rs.next()) {
String writer =rs.getString("writer");
String title =rs.getString("title");
String content =rs.getString("content");
String savefilename =rs.getString("savefilename");
long filesize =rs.getLong("filesize");
//int filenum, String writer, String title, String content, String orgfilename, String savafilename,
//long filesize
FileinfoVo vo =new FileinfoVo(filenum, writer, title, content, null, savefilename, filesize);
return vo;
}
return null;
} catch(SQLException e) {
System.out.println(e.getMessage());
return null;
} finally {
JDBCUtil.close(rs, pstmt, con);
}
}
public int delete(int filenum) {
Connection con =null;
PreparedStatement pstmt =null;
ResultSet rs =null;
try {
con =JDBCUtil.getConn();
String sql ="delete from fileinfo where filenum =?";
pstmt =con.prepareStatement(sql);
pstmt.setInt(1, filenum);
return pstmt.executeUpdate();
} catch(SQLException s) {
System.out.println(s.getMessage());
return -1;
} finally {
JDBCUtil.close(rs, pstmt, con);
}
}
public int update(FileinfoVo vo) {
Connection con =null;
PreparedStatement pstmt =null;
try {
con =JDBCUtil.getConn();
String sql ="update fileinfo set writer=?, title=?, content=?, orgfilename=?,"
+ " savefilename=?, filesize=? where filenum=?";
pstmt =con.prepareStatement(sql);
pstmt.setString(1, vo.getWriter());
pstmt.setString(2, vo.getTitle());
pstmt.setString(3, vo.getContent());
pstmt.setString(4, vo.getOrgfilename());
pstmt.setString(5, vo.getSavafilename());
pstmt.setLong(6, vo.getFilesize());
pstmt.setInt(7, vo.getFilenum());
return pstmt.executeUpdate();
} catch(SQLException s) {
System.out.println(s.getMessage());
return -1;
} finally {
JDBCUtil.close(null, pstmt, con);
}
}
}
<file_sep>package login.controller;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import data.access.object.MemberDAO;
import value.object.MyusersVO;
@WebServlet("/window/login")
public class LoginPage extends HttpServlet {
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
//System.out.println("로그인 페이지");
String memberPageChk = req.getParameter("member");
if( memberPageChk == null ) {
memberPageChk = "";
}
if( memberPageChk.equals("true") ) {
req.setAttribute("loginPage", "/memberJoin.jsp");
req.getRequestDispatcher("/index.jsp").forward(req, resp);
}
req.setAttribute("loginPage", "/login.jsp");
req.getRequestDispatcher("/index.jsp").forward(req, resp);
}
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
String id = req.getParameter("id");
String pwd = req.getParameter("pwd");
MyusersVO vo = new MyusersVO(id, pwd, null, null);
MemberDAO dao = new MemberDAO();
MyusersVO result = dao.loginChecked(vo);
HttpSession session = req.getSession();
if( !result.getId().equals("") ) {
System.out.println("로그인 성공!");
session.setAttribute("logined", result.getId());
req.getRequestDispatcher("/index.jsp").forward(req, resp);
} else {
System.out.println("로그인 실패..");
}
}
}
<file_sep>package test.servlet;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@WebServlet("/member/login")
public class GoLogin extends HttpServlet {
@Override
protected void service(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
System.out.println("login");
String cp =req.getContextPath();
resp.sendRedirect(cp + "/1/login.jsp");
System.out.println(cp + "/1/login.jsp");
}
}
<file_sep>package test.dao;
import java.util.HashMap;
import java.util.List;
import org.apache.ibatis.session.SqlSession;
import org.apache.ibatis.session.SqlSessionFactory;
import mybatis.SqlSessionFactoryService;
public class MovieDao {
private static MovieDao moviedao;
private MovieDao() {
}
static {
moviedao = new MovieDao();
}
public static MovieDao getInstance() {
return moviedao;
}
private SqlSessionFactory sessionFactory = SqlSessionFactoryService.getSqlSessionFactory();
public List< HashMap<String, Object> > list() {
SqlSession sqlSession = null;
try {
sqlSession = sessionFactory.openSession();
List< HashMap<String, Object> > mlist = sqlSession.selectList("mybatis.MovieMapper.movieComments");
return mlist;
} finally {
if(sqlSession != null) sqlSession.close();
}
}
}
<file_sep>package test.dao;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import db.ConnectionPool;
import test.vo.CommentsVo;
public class CommentsDao {
private static CommentsDao instance =new CommentsDao();
private CommentsDao() {}
public static CommentsDao getInstance() {
return instance;
}
public int insert(CommentsVo vo) {
String sql ="insert into comments values(comments_seq.nextval, ?, ?, ?)";
Connection con =null;
PreparedStatement pstmt =null;
try {
con =ConnectionPool.getCon();
pstmt =con.prepareStatement(sql);
pstmt.setInt(1, vo.getMnum());
pstmt.setString(2, vo.getId());
pstmt.setString(3, vo.getComments());
return pstmt.executeUpdate();
} catch(SQLException s) {
System.out.println(s.getMessage());
return -1;
} finally {
ConnectionPool.close(pstmt, null, con);
}
}
public int delete(int num) {
String sql ="delete from comments where num =?";
Connection con =null;
PreparedStatement pstmt =null;
try {
con =ConnectionPool.getCon();
pstmt =con.prepareStatement(sql);
pstmt.setInt(1, num);
return pstmt.executeUpdate();
} catch(SQLException s) {
System.out.println(s.getMessage());
return -1;
} finally {
ConnectionPool.close(pstmt, null, con);
}
}
public ArrayList<CommentsVo> listAll(int mnum){
Connection con =null;
PreparedStatement pstmt =null;
ResultSet rs =null;
try {
con =ConnectionPool.getCon();
String sql ="select * from comments where mnum =?";
pstmt =con.prepareStatement(sql);
pstmt.setInt(1, mnum);
rs =pstmt.executeQuery();
ArrayList<CommentsVo> list =new ArrayList<CommentsVo>();
while(rs.next()) {
//int num, int mnum, String id, String comments
CommentsVo vo =new CommentsVo(
rs.getInt("num"),
rs.getInt("mnum"),
rs.getString("id"),
rs.getString("comments"));
list.add(vo);
}
return list;
} catch(SQLException s) {
System.out.println(s.getMessage());
return null;
} finally {
ConnectionPool.close(pstmt, rs, con);
}
}
}
<file_sep>package test.vo;
public class FileinfoVo {
private int filenum;
private String writer;
private String title;
private String content;
private String orgfilename;
private String savefilename;
private long filesize;
public FileinfoVo(int filenum, String writer, String title, String content, String orgfilename, String savefilename,
long filesize) {
super();
this.filenum = filenum;
this.writer = writer;
this.title = title;
this.content = content;
this.orgfilename = orgfilename;
this.savefilename = savefilename;
this.filesize = filesize;
}
public FileinfoVo() {
}
public int getFilenum() {
return filenum;
}
public void setFilenum(int filenum) {
this.filenum = filenum;
}
public String getWriter() {
return writer;
}
public void setWriter(String writer) {
this.writer = writer;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getContent() {
return content;
}
public void setContent(String content) {
this.content = content;
}
public String getOrgfilename() {
return orgfilename;
}
public void setOrgfilename(String orgfilename) {
this.orgfilename = orgfilename;
}
public String getSavafilename() {
return savefilename;
}
public void setSavefilename(String savefilename) {
this.savefilename = savefilename;
}
public long getFilesize() {
return filesize;
}
public void setFilesize(long filesize) {
this.filesize = filesize;
}
}
<file_sep>package jsp21_sessionListener;
import javax.servlet.http.HttpSessionEvent;
import javax.servlet.http.HttpSessionListener;
/*
* HttpSessionListener -> 세션이 생성되거나 종료될때에 대한 이벤트를 처리하는 리스너 클래스
*/
public class SessionCountListener implements HttpSessionListener {
private static int userCount =0;
@Override
public void sessionCreated(HttpSessionEvent se) {
System.out.println("세션이 생성되었어요");
userCount++;
}
@Override
public void sessionDestroyed(HttpSessionEvent se) {
System.out.println("세션이 종료되었어요");
userCount--;
}
public static int getUserCount() {
return userCount;
}
}
<file_sep>package dao;
import java.sql.Connection;
import java.sql.Date;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import db.JDBCUtil;
import vo.BoardVo;
public class BoardDao {
public int insertBoard(BoardVo vo) {
Connection con =null;
PreparedStatement pstmt =null;
try {
con =JDBCUtil.getConn();
String sql ="insert into board values(board_seq.nextval, ?, ?, ?, ?, sysdate)";
pstmt =con.prepareStatement(sql);
pstmt.setString(1, vo.getWriter());
pstmt.setString(2, vo.getEmail());
pstmt.setString(3, vo.getTitle());
pstmt.setString(4, vo.getContent());
return pstmt.executeUpdate();
} catch(SQLException sql) {
System.out.println(sql.getMessage());
return -1;
} finally {
JDBCUtil.close(null, pstmt, con);
}
}
public ArrayList<BoardVo> getList(){
Connection con =null;
PreparedStatement pstmt =null;
ResultSet rs =null;
ArrayList<BoardVo> list =new ArrayList<BoardVo>();
try {
con =JDBCUtil.getConn();
String sql ="select * from board order by num desc";
pstmt =con.prepareStatement(sql);
rs =pstmt.executeQuery();
// int num, String writer, String email, String title, String content, Date w_date
while(rs.next()) {
list.add(new BoardVo(rs.getInt("num"), rs.getString("writer"),
rs.getString("email"), rs.getString("title"), rs.getString("content"), rs.getDate("w_date")));
}
return list;
} catch(SQLException sql) {
System.out.println(sql.getMessage());
return null;
} finally {
JDBCUtil.close(rs, pstmt, con);
}
}
public BoardVo getList(int num){
Connection con =null;
PreparedStatement pstmt =null;
ResultSet rs =null;
try {
con =JDBCUtil.getConn();
String sql ="select * from board where num =?";
pstmt =con.prepareStatement(sql);
pstmt.setInt(1, num);
rs =pstmt.executeQuery();
// int num, String writer, String email, String title, String content, Date w_date
if(rs.next()) {
BoardVo vo =new BoardVo(rs.getInt("num"), rs.getString("writer"), rs.getString("email"),
rs.getString("title"), rs.getString("content"), rs.getDate("w_date"));
return vo;
}
return null;
} catch(SQLException sql) {
System.out.println(sql.getMessage());
return null;
} finally {
JDBCUtil.close(rs, pstmt, con);
}
}
public BoardVo update(int num) {
Connection con =null;
PreparedStatement pstmt =null;
ResultSet rs =null;
try {
con =JDBCUtil.getConn();
String sql ="select * from board where num =?";
pstmt =con.prepareStatement(sql);
pstmt.setInt(1, num);
rs =pstmt.executeQuery();
if(rs.next()) {
//int num, String writer, String email, String title, String content, Date w_date
String writer =rs.getString("writer");
String email =rs.getString("email");
String title =rs.getString("title");
String content =rs.getString("content");
Date w_date =rs.getDate("w_date");
BoardVo vo =new BoardVo(num, writer, email, title, content, w_date);
return vo;
}
return null;
} catch(SQLException sq) {
System.out.println(sq.getMessage());
return null;
} finally {
JDBCUtil.close(rs, pstmt, con);
}
}
public int update(int num, String writer, String email, String title, String content) {
Connection con =null;
PreparedStatement pstmt =null;
try {
con =JDBCUtil.getConn();
String sql ="update board set writer=?, email=?, title=?, content=? where num=?";
pstmt =con.prepareStatement(sql);
pstmt.setString(1, writer);
pstmt.setString(2, email);
pstmt.setString(3, title);
pstmt.setString(4, content);
pstmt.setInt(5, num);
return pstmt.executeUpdate();
} catch(SQLException sq) {
System.out.println(sq.getMessage());
return -1;
} finally {
JDBCUtil.close(null, pstmt, con);
}
}
public int delete(int num) {
Connection con =null;
PreparedStatement pstmt =null;
try {
con =JDBCUtil.getConn();
String sql ="delete from board where num=?";
pstmt =con.prepareStatement(sql);
pstmt.setInt(1, num);
return pstmt.executeUpdate();
} catch(SQLException sq) {
System.out.println(sq.getMessage());
return -1;
}
}
public ArrayList<BoardVo> search(String keyword) {
Connection con =null;
PreparedStatement pstmt =null;
ResultSet rs =null;
ArrayList<BoardVo> list =new ArrayList<BoardVo>();
try {
con =JDBCUtil.getConn();
String sql ="select * from board where title like '%' || ? || '%'";
pstmt =con.prepareStatement(sql);
pstmt.setString(1, keyword);
rs =pstmt.executeQuery();
while(rs.next()) {
list.add(new BoardVo(rs.getInt("num"), rs.getString("writer"),
rs.getString("email"), rs.getString("title"), rs.getString("content"), rs.getDate("w_date")));
}
return list;
} catch(SQLException s) {
System.out.println(s.getMessage());
return null;
} finally {
JDBCUtil.close(rs, pstmt, con);
}
}
}
<file_sep>package test.controller;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@WebServlet("/sum.do")
public class SumController extends HttpServlet {
@Override
protected void service(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
int n1 =Integer.parseInt(req.getParameter("n1"));
int n2 =Integer.parseInt(req.getParameter("n2"));
int sum =n1 + n2;
req.setAttribute("sum", sum);
req.getRequestDispatcher("showSum.jsp").forward(req, resp);
}
}
<file_sep>package test.servlet;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URLEncoder;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import test.dao.FileinfoDao;
import test.vo.FileinfoVo;
@WebServlet("/filedownload")
public class FileDownloadServlet extends HttpServlet {
@Override
protected void service(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
int filenum =Integer.parseInt(req.getParameter("filenum"));
FileinfoDao dao =new FileinfoDao();
//다운로드할 파일 정보 얻어오기
FileinfoVo vo =dao.getList(filenum);
//파일명이 한글인 경우 깨지지않도록 인코딩 설정
String filename =URLEncoder.encode(vo.getOrgfilename(),"utf-8");
filename.replaceAll("\\+","%20"); // +를 공백으로 바꿔줌
//1.다운로드창으로 응답하기
resp.setContentType("application/octet-stream");
resp.setContentLengthLong(vo.getFilesize());
resp.setHeader("Content-Disposition","attachment;filename=" + filename);
//2.클라이언트에 파일 보내기(파일복사)
String upload =req.getServletContext().getRealPath("/upload");
//전송된 파일을 읽어오기 위한 파일스트림 객체
FileInputStream fis =new FileInputStream(upload + File.separator + vo.getSavafilename());
//클라이언트에 파일데이터를 전송(출력) 하기위한 스트림 객체
OutputStream os =resp.getOutputStream();
BufferedInputStream bis =new BufferedInputStream(fis);
BufferedOutputStream bos =new BufferedOutputStream(os);
byte[] b =new byte[1024];
int n =0;
while( (n =bis.read(b) ) != -1){
bos.write(b, 0 , n);
}
bos.close();
bis.close();
}
}
<file_sep>package test.controller;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import test.dao.CommentsDao;
import test.vo.CommentsVo;
@WebServlet("/comments.do")
public class CommentsController extends HttpServlet {
@Override
protected void service(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
CommentsDao dao =CommentsDao.getInstance();
int mnum =Integer.parseInt(req.getParameter("mnum"));
ArrayList<CommentsVo> list =dao.listAll(mnum);
resp.setContentType("text/xml; charset=utf-8");
PrintWriter pw =resp.getWriter();
pw.println("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
pw.println("<result>");
for(CommentsVo vo : list) {
pw.println("<comm>");
pw.println("<num>"+ vo.getNum() +"</num>");
pw.println("<mnum>"+ vo.getMnum() +"</mnum>");
pw.println("<id>"+ vo.getId() +"</id>");
pw.println("<comments>"+ vo.getComments() +"</comments>");
pw.println("</comm>");
}
pw.println("</result>");
}
}
<file_sep>package controller;
import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import db.ConnectionPool;
@WebServlet("/comment/insert")
public class InsertController extends HttpServlet {
@Override
protected void service(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
//System.out.println("testinsert");
String id = req.getParameter("id");
String content = req.getParameter("content");
int mnum = Integer.parseInt(req.getParameter("mnum"));
Connection con = null;
PreparedStatement pstmt = null;
ResultSet rs = null;
try {
con = ConnectionPool.getCon();
String sql = "insert into comments values(comments_seq.nextval, ?, ?, ?)";
pstmt = con.prepareStatement(sql);
pstmt.setInt(1, mnum);
pstmt.setString(2, id);
pstmt.setString(3, content);
pstmt.executeUpdate();
System.out.println("인설트 완료!");
} catch(SQLException s) {
System.out.println(s.getMessage());
} finally {
ConnectionPool.close(pstmt, null, con);
}
}
}
<file_sep>package controller;
import java.io.IOException;
import java.sql.Date;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import dao.BoardDao;
import vo.BoardVo;
@WebServlet("/write.do")
public class WriteController extends HttpServlet {
@Override
protected void service(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
req.setCharacterEncoding("utf-8");
String writer =req.getParameter("writer");
String email =req.getParameter("email");
String title =req.getParameter("title");
String content =req.getParameter("content");
BoardDao dao =new BoardDao();
//int num, String writer, String email, String title, String content, Date w_date
BoardVo vo =new BoardVo(0, writer, email, title, content, null);
int n =dao.insertBoard(vo);
req.setAttribute("resultWrite", n);
req.getRequestDispatcher("/write_s.jsp").forward(req, resp);
}
}
<file_sep>package test.servlet.controller;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import test.dao.BoardDao;
@WebServlet("/delete")
public class DeleteController extends HttpServlet {
@Override
protected void service(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
int num = Integer.parseInt(req.getParameter("num"));
BoardDao dao = new BoardDao();
dao.delete(num);
}
}
<file_sep>package vo;
public class BoardVo {
private int num;
private String writer;
private String title;
private String content;
private int ref;
private int lev;
private int step;
public BoardVo() {
}
public BoardVo(int num, String writer, String title, String content, int ref, int lev, int step) {
super();
this.num = num;
this.writer = writer;
this.title = title;
this.content = content;
this.ref = ref;
this.lev = lev;
this.step = step;
}
public int getNum() {
return num;
}
public void setNum(int num) {
this.num = num;
}
public String getWriter() {
return writer;
}
public void setWriter(String writer) {
this.writer = writer;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getContent() {
return content;
}
public void setContent(String content) {
this.content = content;
}
public int getRef() {
return ref;
}
public void setRef(int ref) {
this.ref = ref;
}
public int getLev() {
return lev;
}
public void setLev(int lev) {
this.lev = lev;
}
public int getStep() {
return step;
}
public void setStep(int step) {
this.step = step;
}
}
<file_sep>package test.controller;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import test.dao.MembersDao;
@WebServlet("/members/delete.do")
public class DeleteController extends HttpServlet {
@Override
protected void service(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
int num =Integer.parseInt(req.getParameter("num"));
MembersDao dao =new MembersDao();
try {
int n =dao.delete(num);
resp.sendRedirect(req.getContextPath() + "/members/list.do");
} catch(Exception e) {
System.out.println(e.getMessage());
req.setAttribute("errMsg", "회원삭제 실패!");
req.getRequestDispatcher("error.jsp").forward(req, resp);
}
}
}
<file_sep>package test.main;
import java.util.List;
import test.dao.MembersDao;
import test.vo.MembersVo;
public class TestMain {
public static void main(String[] args) {
MembersDao dao = new MembersDao();
// MembersVo vo = new MembersVo(2, "asd2f", "000", "서울", null);
// int n = dao.insert(vo);
// System.out.println(n + "명의 회원 추가됨!");
// List<MembersVo> list = dao.list();
// System.out.println("<<전체 회원 리스트>>");
// for(MembersVo vo1 : list) {
// System.out.println("회원번호 : " + vo1.getNum());
// System.out.println("이름 : " + vo1.getName());
// System.out.println("전화번호 : " + vo1.getPhone());
// System.out.println("주소 : " + vo1.getAddr());
// System.out.println("가입일 : " + vo1.getRegdate());
// System.out.println("-----------------------------------");
// }
//삭제, 수정, 한명회원정보 조회
//삭제
// int r = dao.delete(8);
// System.out.println(r + "명의 회원 삭제!");
//// //수정
// MembersVo vo2 = new MembersVo(1, "test", "04302", "부산", null);
// int u = dao.update(vo2);
// System.out.println(u + "명의 회원 정보 수정!");
//한명 회원정보 조회
// List<MembersVo> list2 = dao.list2(1);
// System.out.println("<<회원 리스트>>");
// for(MembersVo vo3 : list2) {
// System.out.println("회원번호 : " + vo3.getNum());
// System.out.println("이름 : " + vo3.getName());
// System.out.println("전화번호 : " + vo3.getPhone());
// System.out.println("주소 : " + vo3.getAddr());
// System.out.println("가입일 : " + vo3.getRegdate());
// System.out.println("-----------------------------------");
// }
}
}
<file_sep>package test.controller;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@WebServlet("/weather.do")
public class WeatherController extends HttpServlet {
@Override
protected void service(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
String day =req.getParameter("day");
String result ="";
if(day.equals("1")) {
result ="¸¼À½";
} else if(day.equals("2")) {
result ="ºñ°¡ ¸¹ÀÌ ¿È";
}
req.setAttribute("result", result);
req.getRequestDispatcher("/showWeather.jsp").forward(req, resp);
}
}
<file_sep>package test.servlet.controller;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import dao.BoardDao;
import vo.BoardVo;
@WebServlet("/board/detail.do")
public class DetailController extends HttpServlet {
@Override
protected void service(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
int num=Integer.parseInt(req.getParameter("num"));
BoardDao dao=new BoardDao();
BoardVo vo=dao.detail(num);
String content=vo.getContent();
content=content.replaceAll("\n","<br>");
vo.setContent(content);
req.setAttribute("vo",vo);
req.getRequestDispatcher("/board/detail.jsp").forward(req, resp);
}
}
<file_sep>package mybatis.dao;
import java.util.List;
import org.apache.ibatis.session.SqlSession;
import org.apache.ibatis.session.SqlSessionFactory;
import mybatis.MembersMapper;
import mybatis.SqlSessionFactoryService;
import mybatis.vo.MyUsersVo;
public class MyUsersDao {
private SqlSessionFactory sqlSessionFactory = SqlSessionFactoryService.getSqlSessionFactory();
public int insert(MyUsersVo vo) {
SqlSession session = null;
try {
session = sqlSessionFactory.openSession();
MembersMapper mapper = session.getMapper(MembersMapper.class);
int n = mapper.insert(vo);
session.commit();
return n;
} finally {
if( session != null) session.close();
}
}
public List<MyUsersVo> selectAll(){
SqlSession session = null;
try {
session = sqlSessionFactory.openSession();
MembersMapper mapper = session.getMapper(MembersMapper.class);
List<MyUsersVo> list = mapper.selectAll();
return list;
} finally {
if( session != null) session.close();
}
}
public MyUsersVo selectOne(String id){
SqlSession session = null;
try {
session = sqlSessionFactory.openSession();
MembersMapper mapper = session.getMapper(MembersMapper.class);
MyUsersVo list = mapper.selectOne(id);
return list;
} finally {
if( session != null) session.close();
}
}
public int deleteOne(String id){
SqlSession session = null;
try {
session = sqlSessionFactory.openSession();
MembersMapper mapper = session.getMapper(MembersMapper.class);
int result = mapper.deleteOne(id);
session.commit();
return result;
} finally {
if( session != null) session.close();
}
}
public int updateOne(MyUsersVo vo){
SqlSession session = null;
try {
session = sqlSessionFactory.openSession();
MembersMapper mapper = session.getMapper(MembersMapper.class);
int result = mapper.updateOne(vo);
session.commit();
return result;
} finally {
if( session != null) session.close();
}
}
}
<file_sep>package vo;
import java.sql.Date;
public class BoardVo {
private int num;
private String writer;
private String email;
private String title;
private String content;
private Date w_date;
public BoardVo() {
}
public BoardVo(int num, String writer, String email, String title, String content, Date w_date) {
super();
this.num = num;
this.writer = writer;
this.email = email;
this.title = title;
this.content = content;
this.w_date = w_date;
}
public int getNum() {
return num;
}
public void setNum(int num) {
this.num = num;
}
public String getWriter() {
return writer;
}
public void setWriter(String writer) {
this.writer = writer;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getContent() {
return content;
}
public void setContent(String content) {
this.content = content;
}
public Date getW_date() {
return w_date;
}
public void setW_date(Date w_date) {
this.w_date = w_date;
}
}
<file_sep>package db;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
public class JDBCUtil {
public static Connection getConn() throws SQLException {
Connection con =null;
try {
Class.forName("oracle.jdbc.OracleDriver");
String url ="jdbc:oracle:thin:@localhost:1521:xe";
con =DriverManager.getConnection(url, "scott", "tiger");
return con;
} catch (ClassNotFoundException e) {
e.printStackTrace();
return null;
}
}
public static void close(ResultSet rs, Statement psmt, Connection con) {
try {
if(rs != null) rs.close();
if(psmt != null) psmt.close();
if(con != null) con.close();
} catch(SQLException sq) {
System.out.println(sq.getMessage());
}
}
public static void close(ResultSet rs) {
try {
if(rs != null) rs.close();
} catch(SQLException sq) {
System.out.println(sq.getMessage());
}
}
public static void close(Statement psmt) {
try {
if(psmt != null) psmt.close();
} catch(SQLException sq) {
System.out.println(sq.getMessage());
}
}
public static void close(Connection con) {
try {
if(con != null) con.close();
} catch(SQLException sq) {
System.out.println(sq.getMessage());
}
}
}
<file_sep>package member.controller;
import java.io.IOException;
import java.util.List;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import data.access.object.MemberDAO;
import value.object.MyusersVO;
@WebServlet("/member/allusers")
public class AllUsers extends HttpServlet {
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
MemberDAO dao = new MemberDAO();
List<MyusersVO> list = dao.getAllUserList();
req.setAttribute("userlistALL", list);
req.setAttribute("loginPage", "/alluserList.jsp");
req.getRequestDispatcher("/index.jsp").forward(req, resp);
}
}
<file_sep>package test.servlet;
import java.io.IOException;
import java.io.PrintWriter;
import java.sql.Connection;
import java.sql.Date;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import db.JDBCUtil;
public class ListServlet extends HttpServlet {
@Override
protected void service(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
req.setCharacterEncoding("utf-8");
Connection con =null;
PreparedStatement pstmt =null;
ResultSet rs =null;
String id ="";
String pw ="";
String email ="";
Date mydate =null;
try {
con =JDBCUtil.getConn();
String sql ="select * from myusers";
pstmt =con.prepareStatement(sql);
rs =pstmt.executeQuery();
resp.setContentType("text/html;charset=utf-8");
PrintWriter pwr =resp.getWriter();
pwr.print("<html>");
pwr.print("<head></head>");
pwr.print("<body>");
pwr.print("<h1>회원정보</h1>");
while(rs.next()) {
id =rs.getString("id");
pw =rs.getString("pw");
email =rs.getString("email");
mydate =rs.getDate("mydate");
pwr.print("<p>id:" + id + "<br>");
pwr.print("pw:" + pw + "<br>");
pwr.print("email:" + email + "<br>");
pwr.print("가입일:" + mydate + "</p><br>");
pwr.print("<a href='update.do?id="+id+"'>수정</a>");
pwr.print("<a href='delete.do?id="+id+"'>삭제</a>");
}
pwr.print("</body>");
pwr.print("</html>");
pwr.close();
} catch(SQLException s) {
System.out.println(s.getMessage());
}
}
}
<file_sep>package controller;
import java.io.IOException;
import java.util.ArrayList;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import dao.BoardDao;
import vo.BoardVo;
@WebServlet("/list.do")
public class ListController extends HttpServlet {
@Override
protected void service(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
ArrayList<BoardVo> list =new BoardDao().getList();
req.setAttribute("list", list);
req.getRequestDispatcher("/list.jsp").forward(req, resp);
}
}
<file_sep>package db;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import javax.sql.DataSource;
public class ConnectionPool {
static DataSource ds =null;
static { //static 맴버를 초기화할떄는 static 블록을 사용한다.
Context initContext;
try {
initContext = new InitialContext();
Context envContext = (Context) initContext.lookup("java:/comp/env");
ds = (DataSource) envContext.lookup("jdbc/myoracle");
} catch (NamingException e) {
e.printStackTrace();
}
}
public static Connection getCon() throws SQLException {
Connection con =ds.getConnection();
return con;
}
public static void close(PreparedStatement pstmt, ResultSet rs, Connection con) {
try {
if(pstmt!=null) pstmt.close();
if(rs!=null) rs.close();
if(con!=null) con.close();
} catch(SQLException s) {
System.out.println(s.getMessage());
}
}
}
<file_sep>CREATE TABLE guestboard (
num NUMBER(5) PRIMARY KEY, --글번호
writer VARCHAR2(10),
title VARCHAR2(10),
content VARCHAR2(50),
ref NUMBER(5), --그룹번호
lev NUMBER(5), --레벨(답글인지, 답글의 답글인지..)
step NUMBER(5) --출력순서
);
<file_sep>package test.main;
import java.io.IOException;
import java.io.InputStream;
import org.apache.ibatis.io.Resources;
import org.apache.ibatis.session.SqlSession;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.ibatis.session.SqlSessionFactoryBuilder;
import test.vo.MembersVo;
public class TestMain {
public static void main(String[] args) {
String resource = "mybatis/mybatis-config.xml";
try {
InputStream inputStream = Resources.getResourceAsStream(resource);
SqlSessionFactory sqlSessionFactory = new SqlSessionFactoryBuilder().build(inputStream);
SqlSession session = sqlSessionFactory.openSession();
// MembersVo vo = new MembersVo(3,"홍길동","010","서울",null);
// if( 0 < session.insert("insert", vo) ) {
// session.commit();
// System.out.println("회원추가 성공");
// session.close();
// };
// if( 0 < session.delete("delete", 3) ) {
// session.commit();
// System.out.println("1명 회원 삭제 성공");
// session.close();
// }
MembersVo vo = new MembersVo(77, null, "12342", "asd2f", null);
session.update("mybatis.MembersMapper.update", vo);
session.commit();
session.close();
} catch(IOException i) {
System.out.println(i.getMessage());
}
}
}
<file_sep>package test.servlet;
import java.io.IOException;
import java.io.PrintWriter;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/*
* [서블릿]
* -웹어플리케이션에서 실행되는 자바클래스(웹어플리케이션에서 실행되는 작은 프로그램)
* -클라이언트의 요청을 받고 클라이언트에 응답할수있다.
* -만드는 방법
* 1.HttpServlet 상속받는다.
* 2.service메소드를 오버라이딩 한다. ->요청과 응답에 관련된 작업 구현
* 3.서블릿을 호출할 경로를 매핑한다.(어노테이션 또는 web.xml에서 설정)
* 4.매핑된 경로로 서블릿을 호출한다.
*/
@WebServlet("/insert.do") //서블릿 매칭하기
public class InsertServlet extends HttpServlet {
/*
* request : 요청에 관련된 정보와 기능을 갖는 객체
* response : 응답에 관련된 정보와 기능을 갖는 객체
*
*/
@Override
protected void service(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
request.setCharacterEncoding("utf-8");
String id =request.getParameter("id");
String pwd =request.getParameter("pwd");
String email =request.getParameter("email");
System.out.println("<<사용자가 보내온 정보>>");
System.out.println("id:" + id);
System.out.println("pwd:" + pwd);
System.out.println("email:" + email);
response.setContentType("text/html;charset=utf-8");
//응답할 콘텐츠 타입과 인코딩방식 지정
PrintWriter pw =response.getWriter();
pw.print("<html>");
pw.print("<head></head>");
pw.print("<body>");
pw.print("<h1>사용자가 보내온 정보</h1>");
pw.print("id:" + id + "<br>");
pw.print("pwd:" + pwd + "<br>");
pw.print("email:" + email + "<br>");
pw.print("</body>");
pw.print("</html>");
pw.close();
}
}
<file_sep>CREATE TABLE movie (
mnum NUMBER(5) PRIMARY KEY,
title VARCHAR2(20),
content VARCHAR2(100),
director VARCHAR2(50)
);
CREATE TABLE comments (
num NUMBER(5) PRIMARY KEY,
mnum NUMBER(5) REFERENCES movie(mnum),
id VARCHAR2(20),
comments VARCHAR2(50)
);
CREATE SEQUENCE movie_seq;
CREATE SEQUENCE comments_seq;
<file_sep>package test.servlet;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
@WebServlet("/member/loginOk")
public class LoginOkServlet extends HttpServlet{
@Override
protected void service(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
String id =req.getParameter("id");
String pwd =req.getParameter("pwd");
HttpSession session =req.getSession();
session.setAttribute("id", id);
System.out.println("실행");
//리다이렉트방식으로 main.jsp로 이동해보세요.
resp.sendRedirect(req.getContextPath() + "/main.jsp");
}
}
| d81e9d10a5973b5e2ef7a270348ad95f9f5e74d4 | [
"Java",
"HTML",
"SQL"
] | 49 | Java | ohjs1/jhta | 05a09396004bbcafbeb7ddc109274421701c024e | db4a0079cd39d73ebee5c314aee601db638620b3 |
refs/heads/master | <file_sep>#!/usr/bin/env bash
{ set +x; } 2>/dev/null
find="$(find-pip)" | exit
[[ -z $find ]] && exit
while IFS= read f; do
[ -e "$f" ] || exit
done <<< "$find";:
<file_sep>#!/usr/bin/env bash
{ set +x; } 2>/dev/null
# find all pip scripts (pipX.Y)
# exclude:
# pip (without version)
[[ -z $PATH ]] && echo "ERROR: \$PATH '' EMPTY" && exit 1
IFS=':';set -- $PATH;IFS=
find="$(find "$@" -name "pip*" 2> /dev/null | sort -u | grep -E "/pip\w.\w"$)"
# exclude .pyenv/shims
find="$(echo "$find" | grep -v shims)"
finded=
set --;while IFS= read path; do
! [ -e "$path" ] && continue # symlink target must exists
file="$(file -b "$path")" || continue
# 1) script
# 2) symbolic link to
# 3) unknown
[[ "$file" == *"script"* ]] && {
[[ "$@" == *"${path##*/}"* ]] && continue
set -- "$@" "$path"; continue
}
[[ "$file" == *"symbolic"* ]] && {
[[ $OSTYPE == *"darwin"* ]] && path="$(readlink "$path")"
[[ $OSTYPE != *"darwin"* ]] && path="$(readlink -f "$path")"
[[ $path =~ [0-9]+$ ]] || continue
[[ "$@" == *"${path##*/}"* ]] && continue
set -- "$@" "$path"; continue
}
echo "ERROR: $path" 1>&2
( set -x; file "$path" )
exit 1
done <<< "$find"
[[ $# == 0 ]] && { which pip; exit; }
find="$(while :; do echo "$1"; shift; [[ $# == 0 ]] && break; done)"
v=
[[ $OSTYPE == *darwin* ]] && {
v=$(defaults read com.apple.versioner.python Version 2> /dev/null)
}
[[ -n $v ]] && {
# run default version first
find="$(echo "$find" | grep "$v";echo "$find" | grep -v "$v")";:
} || {
echo "$find"
}
:
<file_sep><!--
README generated with readmemako.py (github.com/russianidiot/readme-mako.py) and .README dotfiles (github.com/russianidiot-dotfiles/.README)
-->
<p align="center">
<b>find pip scripts</b>
</p>
#### Install
`[sudo] pip install find-pip`
`[sudo] sudo npm install -g find-pip`
#### Features
* readlink if symlink
* exclude .pyenv/shims
#### Usage
```bash
# find-pip --help
/usr/local/bin/pip2.6
/usr/local/bin/pip2.7
/usr/local/bin/pip3.5
```
#### Example
```bash
$ find-pip
/usr/local/bin/pip2.6
/usr/local/bin/pip2.7
/usr/local/bin/pip3.5
```
[Examples/](https://github.com/russianidiot/find-pip.sh.cli/tree/master/Examples)
Feedback
[](https://github.com/russianidiot/find-pip.sh.cli/issues)
[](https://gitter.im/russianidiot/find-pip.sh.cli)
[](https://github.com/russianidiot)
<file_sep>.. README generated with readmemako.py (github.com/russianidiot/readme-mako.py) and .README dotfiles (github.com/russianidiot-dotfiles/.README)
Install
```````
:code:`[sudo] pip install find-pip`
Features
````````
* readlink if symlink
* exclude .pyenv/shims
Usage
`````
.. code:: bash
# find-pip --help
/usr/local/bin/pip2.6
/usr/local/bin/pip2.7
/usr/local/bin/pip3.5
Example
```````
.. code:: bash
$ find-pip
/usr/local/bin/pip2.6
/usr/local/bin/pip2.7
/usr/local/bin/pip3.5
`Examples/`_
.. _Examples/: https://github.com/russianidiot/find-pip.sh.cli/tree/master/Examples
Feedback |github_issues| |gitter| |github_follow|
.. |github_issues| image:: https://img.shields.io/github/issues/russianidiot/find-pip.sh.cli.svg
:target: https://github.com/russianidiot/find-pip.sh.cli/issues
.. |github_follow| image:: https://img.shields.io/github/followers/russianidiot.svg?style=social&label=Follow
:target: https://github.com/russianidiot
.. |gitter| image:: https://badges.gitter.im/russianidiot/find-pip.sh.cli.svg
:target: https://gitter.im/russianidiot/find-pip.sh.cli
<file_sep><!--
README generated with readmemako.py (github.com/russianidiot/readme-mako.py) and .README dotfiles (github.com/russianidiot-dotfiles/.README)
-->
[]()
[](https://pypi.python.org/pypi/find-pip)
[](https://www.npmjs.com/package/find-pip)
[](https://drone.io/github.com/russianidiot/find-pip.sh.cli)
[](https://scrutinizer-ci.com/g/russianidiot/find-pip.sh.cli/)
[](https://semaphoreci.com/russianidiot/find-pip-sh-cli)
[](https://app.shippable.com/projects/5722758a2a8192902e1e4c0f/status/)
[](https://travis-ci.org/russianidiot/find-pip.sh.cli)
[](https://app.wercker.com/russianidiot/find-pip.sh.cli/)
<p align="center">
<b>find pip scripts</b>
</p>
#### Install
`[sudo] pip install find-pip`
`[sudo] sudo npm install -g find-pip`
#### Features
* readlink if symlink
* exclude .pyenv/shims
#### Usage
```bash
# find-pip --help
/usr/local/bin/pip2.6
/usr/local/bin/pip2.7
/usr/local/bin/pip3.5
```
#### Example
```bash
$ find-pip
/usr/local/bin/pip2.6
/usr/local/bin/pip2.7
/usr/local/bin/pip3.5
```
[Examples/](https://github.com/russianidiot/find-pip.sh.cli/tree/master/Examples)
Feedback
[](https://github.com/russianidiot/find-pip.sh.cli/issues)
[](https://gitter.im/russianidiot/find-pip.sh.cli)
[](https://github.com/russianidiot)
| 71512f730a403fd4c53b4b61be78001e1863131d | [
"Markdown",
"reStructuredText",
"Shell"
] | 5 | Shell | russianidiot/find-pip.sh.cli | 844e93cf4d362e2ae7332308432e73f1936d29b1 | 791a316b54e2dfc1e5a496cb0544bf1e0b1ae348 |
refs/heads/master | <repo_name>AnotherSamPower/FootballRosterRepository<file_sep>/FootballRoster/footballroster_db_SETUP.sql
DROP DATABASE IF EXISTS footballroster_db;
CREATE DATABASE footballroster_db;
USE footballroster_db;
DROP TABLE IF EXISTS `user`;
CREATE TABLE `user` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`forename` varchar(45) NOT NULL,
`surname` varchar(45) DEFAULT NULL,
`email` varchar(45) DEFAULT NULL,
`password` varchar(20) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
DROP TABLE IF EXISTS `league`;
CREATE TABLE `league` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`name` varchar(45) DEFAULT NULL,
`image` varchar(45) DEFAULT NULL,
`user_id` int(11) unsigned DEFAULT NULL,
PRIMARY KEY (id),
FOREIGN KEY (user_id) REFERENCES user(id)
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
DROP TABLE IF EXISTS `team`;
CREATE TABLE `team` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`name` varchar(45) DEFAULT NULL,
`image` varchar(45) DEFAULT NULL,
`league_id` int(11) unsigned DEFAULT NULL,
PRIMARY KEY (id),
FOREIGN KEY (league_id) REFERENCES league(id)
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
DROP TABLE IF EXISTS `player`;
CREATE TABLE `player` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`forename` varchar(45) NOT NULL,
`surname` varchar(45) DEFAULT NULL,
`image` varchar(45) DEFAULT NULL,
`team_id` int(11) unsigned DEFAULT NULL,
PRIMARY KEY (id),
FOREIGN KEY (team_id) REFERENCES team(id)
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
LOCK TABLES user WRITE;
INSERT INTO user VALUES
(1,'Sam','Power','<EMAIL>','admin');
LOCK TABLES league WRITE;
INSERT INTO league VALUES
(1,'The League','default_league.jpg',1);
LOCK TABLES team WRITE;
INSERT INTO team VALUES
(1,'Kilbranish FC','default_team.jpg',1);
LOCK TABLES player WRITE;
INSERT INTO player VALUES
(1,'Sam','Power','default_player.jpg',1);
UNLOCK TABLES;
<file_sep>/FootballRoster/src/main/java/com/footballroster/entities/User.java
package com.footballroster.entities;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
@Entity
@Table(name="user") // database table
public class User {
@Id @GeneratedValue(strategy=GenerationType.IDENTITY)
private int id;
private String forename;
private String surname;
private String email;
private String password;
public int getId() {
return id;
}
public String getName() {
return forename;
}
public void setName(final String name) {
this.forename = name;
}
public String getSurname() {
return surname;
}
public void setSurname(final String surname) {
this.surname = surname;
}
public String getEmail() {
return email;
}
public void setEmail(final String email) {
this.email = email;
}
public String getPassword() {
return password;
}
public void setPassword(final String password) {
this.password = <PASSWORD>;
}
}
<file_sep>/FootballRoster/src/main/java/com/footballroster/daos/PlayerDAO.java
package com.footballroster.daos;
import java.util.List;
import javax.ejb.LocalBean;
import javax.ejb.Stateless;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.Query;
import com.footballroster.entities.League;
import com.footballroster.entities.Player;
import com.footballroster.entities.User;
@Stateless
@LocalBean
public class PlayerDAO {
@PersistenceContext
private EntityManager entityManager;
public List<Player> getAllPlayers() {
final Query query = entityManager.createQuery("SELECT u FROM Player u");
return query.getResultList();
}
public Player getPlayer(final int id){
return entityManager.find(Player.class, id);
}
public void save(final Player Player) {
entityManager.persist(Player);
}
public void update(final Player Player) {
entityManager.merge(Player);
}
public void delete(final int id) {
entityManager.remove(getPlayer(id));
}
}
<file_sep>/FootballRoster/src/main/java/com/footballroster/webservices/UserWS.java
package com.footballroster.webservices;
import java.util.List;
import javax.ejb.EJB;
import javax.ejb.LocalBean;
import javax.ejb.Stateless;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import com.footballroster.daos.UserDAO;
import com.footballroster.entities.User;
@Path("/user")
@Stateless
@LocalBean
public class UserWS {
@EJB
private UserDAO userDao;
@GET
@Path("/allUsers")
@Produces({ MediaType.APPLICATION_JSON})
public Response findAll() {
System.out.println("Get all users");
final List<User> users = userDao.getAllUsers();
return Response.status(200).entity(users).build();
}
@GET
@Produces({ MediaType.APPLICATION_JSON })
@Path("/email/{email}")
public Response findUserByEmail(@PathParam("email") String email) {
System.out.println("email "+email);
User user = userDao.getUserByEmail(email);
return Response.status(200).entity(user).build();
}
@POST
@Path("/addUser")
@Produces({ MediaType.APPLICATION_JSON })
public Response saveUser(final User user) {
userDao.save(user);
return Response.status(201).entity(user).build();
}
@PUT @Path("/editUser/{id}")
@Consumes({ MediaType.APPLICATION_JSON })
public Response updateUser(User user) {
userDao.update(user);
return Response.status(200).entity(user).build();
}
@DELETE
@Path("/deleteUser/{id}")
public Response deleteUser(@PathParam("id") int id){
System.out.println("user " + id + " deleted");
userDao.delete(id);
return Response.status(204).build();
}
}
| 91ad131c7afb354374ba2a2865020e834f05521d | [
"Java",
"SQL"
] | 4 | SQL | AnotherSamPower/FootballRosterRepository | 1a2a93c8456d31baa5a1d0d72e533a5052521bc4 | 27221848aed454a8407d687efd696fb06ca33187 |
refs/heads/master | <repo_name>rahul7284/LibraryAngular<file_sep>/src/app/service/book-service.service.ts
import { HttpClient } from '@angular/common/http';
import { Injectable } from '@angular/core';
import { Book } from '../library-bookcreate/library-bookcreate.component';
@Injectable({
providedIn: 'root'
})
export class BookServiceService {
constructor(private http:HttpClient) { }
book : Book
saveAllStudents(book){
console.log("service"+book);
return this.http.post<Book>(`http://localhost:8080/book`,book);
//console.log("customised Welcome")
}
setData(Buk){
this.book = Buk;
}
}
<file_sep>/src/app/library-bookcreate/library-bookcreate.component.ts
import { Component, OnInit } from '@angular/core';
import { MatTableDataSource } from '@angular/material';
import { BookServiceService } from '../service/book-service.service';
export class College{
public collgeid : number;
public collegeName : string;
public address : string;
public orientation : string;
public college_type : string;
}
export class Book{
constructor( public bookName : string,
public authorName : string,
public bookNumber : number,
public courseName : string,
public publicationDate : string,
public bookPrice : number,
public bookid : number,
public college : College
){
}
}
@Component({
selector: 'app-library-bookcreate',
templateUrl: './library-bookcreate.component.html',
styleUrls: ['./library-bookcreate.component.css']
})
export class LibraryBookcreateComponent implements OnInit {
book: Book;
booksArrsy: Book[]
displayedLibColumns: string[] = ['authorName', 'bookName', 'bookPrice', 'courseName','bookNumber','publicationDate']
dataLibSource =new MatTableDataSource<Book>();
isResponseSuccess:boolean
errorMessage:string
constructor(private bookService : BookServiceService) { }
ngOnInit() {
this.book = new Book('','',1,'','',null,1,new College);
}
saveBooks(){
console.log(this.book.bookName+"COMPO");
this.bookService.saveAllStudents(this.book).subscribe(
response=>this.handleSuccess(response)
);
}
handleSuccess(response){
this.book = response
//this.dataLibSource = [this.book]
//this.dataLibSource.data=response as Book[];
//this.dataLibSource.data = response as Book[];
this.isResponseSuccess = true
this.showTable();
console.log(this.dataLibSource.data+"HANDLSUCCESS of Component")
}
handleError(error){
this.errorMessage = error
this.isResponseSuccess = false
}
showTable(){
let def =this.bookService.saveAllStudents(this.book);
def.subscribe
}
populateData(data){
this.dataLibSource = data;
}
}
| 9c5d3599600c6d7bcf2ee36df87ce37c87961cad | [
"TypeScript"
] | 2 | TypeScript | rahul7284/LibraryAngular | 83136acf498895977a3a5df0aa10410f891f86a5 | 33825b0e4247cfd03415e5e61ce389d00a0ef2e9 |
refs/heads/master | <repo_name>brahmantyo/webgenerator<file_sep>/bin/power.sh
#PWD=`pwd`
if [ "$1" == "--help" ]||[ "$1" == "" ]; then
echo "Usage: power.sh [OPTIONS][other_executable_script [options]]"
echo ""
echo "Options:"
echo " --help This help page"
echo ""
echo "Credit:"
echo " <NAME> (2016)"
exit 0
fi
OPR=`whoami`
export OPR
CMD=$1
shift;
sudo $CMD $*
| d21a2e2c7caa2f8d736313db0549026cd50bd408 | [
"Shell"
] | 1 | Shell | brahmantyo/webgenerator | 27641a9416b1f8579185a0d16d5f0872c51295b0 | 806a88d28b4be8c2be083109db17893d598c77da |
refs/heads/master | <file_sep><?php
//só poderá ser acessado por alguem que já esteja logado
session_start();
//se existe a sessão com o id e ela não tiver vazia
if (isset($_SESSION['id']) && empty($_SESSION['id'])== false ){
//echo "Área restrita (vai ficar o Beaba)";
}else{
//redirecionar para o login
//mandando o usuário para o login.php
header("Location: login.php");
}
?>
<html lang="pt-br">
<head>
<meta charset="utf-8"/>
<title>Projeto Beaba</title>
<link rel="stylesheet" type="text/css" href="CSS/estilos1.css">
<head>
<div id ="corpo-form">
<form >
<?php
date_default_timezone_set('America/Sao_Paulo');
echo date ('d/m/Y \à\s H:i:s');
?>
<h1>Seja bem vindo aqui está sua tela inicial</h1>
<center>
<a href="Sair.php" type="button" value="Sair">Sair</a>
</center>
</form>
<file_sep><?php
session_start();
//se o campo email for citado
if(isset($_POST['email']) && empty($_POST['email']) == false ){
$email = addslashes($_POST['email']);
$senha = md5(addslashes($_POST['senha']));
//dados do usuário
//verificar se o usuario existe
// se add o email e senha corretamente
$nomedobanco = "mysql:dbname=leticia;host=127.0.0.1";
$usuariodobanco = "root";
$senhadobanco = "";
try{
//configuração do banco
$bancodedados = new PDO($nomedobanco,$usuariodobanco,$senhadobanco);
//execute($bancodedados);
//query com resultado ou não
//$sql = $bancodedados -> query("SELECT * FROM `usuarios` WHERE `email` ='$email' AND `senha` = '$senha' ");
//$sql = $bancodedados-> query( "INSERT INTO `usuarios` ( `email`, `senha`) VALUES (NULL,:$email, :$senha)");
$sql = $bancodedados-> query("INSERT INTO `usuarios` (`id`, `email`, `senha`) VALUES (NULL, '$email', '$senha')");
//se ele digitou errado ou não existe
//metodo rowCount onde vai retornar os resultados.
//if($sql-> rowCount() > 0 ){
} catch(PDOException $e){
echo "Falhou: ".$e->getMessage();
}
}
?>
<html lang="pt-br">
<head>
<meta charset="utf-8"/>
<title>Projeto </title>
<link rel="stylesheet" type="text/css" href="CSS/estilos.css">
<head>
<div id ="corpo-form">
<h1 style = "font-family: Monaco">
Cadastro de Usuário </h1>
<form method = "POST">
<!-- E-mail:<br/> -->
<input type="email" placeholder="Email" name = "email" />
<!-- Senha:<br/> -->
<input type="password" placeholder="<PASSWORD>" name="<PASSWORD>" />
<input type ="submit" value="Cadastrar" />
</form>
</div>
</head>
</html><file_sep><?php
session_start();
//se o campo email for citado
if(isset($_POST['email']) && empty($_POST['email']) == false ){
$email = addslashes($_POST['email']);
$senha = md5(addslashes($_POST['senha']));
//dados do usuário
//verificar se o usuario existe
// se add o email e senha corretamente
$nomedobanco = "mysql:dbname=leticia;host=127.0.0.1";
$usuariodobanco = "root";
$senhadobanco = "";
try{
//configuração do banco
$bancodedados = new PDO($nomedobanco,$usuariodobanco,$senhadobanco);
//execute($bancodedados);
//query com resultado ou não
$sql = $bancodedados -> query("SELECT * FROM `usuarios` WHERE `email` ='$email' AND `senha` = '$senha' ");
//se ele digitou errado ou não existe
//metodo rowCount onde vai retornar os resultados.
//if($sql-> rowCount() > 0 ){
if($sql-> rowCount() > 0 ){
//se ele digitou tudo certo vai pegar o id e salvar na sessão
//tornou dado em um array
$dado = $sql->fetch();
$_SESSION['id'] = $dado['id'];
header("Location:index.php");
}
} catch(PDOException $e){
echo "Falhou: ".$e->getMessage();
}
}
?>
<html lang="pt-br">
<head>
<meta charset="utf-8"/>
<title>Projeto Beaba</title>
<link rel="stylesheet" type="text/css" href="CSS/estilos1.css">
<head>
<div id ="corpo-form">
<h1 style = "font-family: Monaco"> Beaba Sistema de Login </h1>
<form method = "POST">
<!-- E-mail:<br/> -->
<input type="email" placeholder="Email" name = "email" />
<!-- Senha:<br/> -->
<input type="<PASSWORD>" placeholder="<PASSWORD>" name="<PASSWORD>" />
<button type="submit">Entrar</button>
<a href="cadastro.php" type="button" value="Cadastrar">Painel</a>
<div class="load"> <i class="fa fa-cog fa-spin fa-5x fa-fw"></i><span class="sr-only">Loading...</span> </div>
</form>
</div>
</head>
</html>
<?php
//se o campo email e campo senha estiverem vazios vai retornar uma mensagem
if(isset($_POST['email']) && empty($_POST['senha']) ){
?>
<div class = "msg-erro">
Preencha todos os campos
</div>
<?php
}
?> | 9dcde785a6ec8a6eeb557392c3b116cd9d73fc68 | [
"PHP"
] | 3 | PHP | letbarros2/PHPLogin | 6eae3c5eff00dfc21e31589e2a538c8f5d3b028d | b8c0421e3bd0779da7867a5768aa21b1489942ce |
refs/heads/master | <file_sep>ENABLE_HERO_RESPAWN = false
UNIVERSAL_SHOP_MODE = false
ALLOW_SAME_HERO_SELECTION = true
HERO_SELECTION_TIME = 30
PRE_GAME_TIME = 30
POST_GAME_TIME = 60.0
TREE_REGROW_TIME = 60.0
GOLD_PER_TICK = 0
GOLD_TICK_TIME = 5
RECOMMENDED_BUILDS_DISABLED = false
CAMERA_DISTANCE_OVERRIDE = 1300
BUYBACK_ENABLED = false
USE_STANDARD_HERO_GOLD_BOUNTY = false
USE_CUSTOM_TOP_BAR_VALUES = true
TOP_BAR_VISIBLE = false
SHOW_KILLS_ON_TOPBAR = true
REMOVE_ILLUSIONS_ON_DEATH = false
DISABLE_GOLD_SOUNDS = true
END_GAME_ON_KILLS = false
USE_CUSTOM_XP_VALUES = false
DISABLE_FOG_OF_WAR_ENTIRELY = true
DISABLE_GOLD_SOUNDS = true
if GameMode == nil then
GameMode = class({})
end
function GameMode:InitGameMode()
GameMode = self
if GetMapName() == "battleroyal_solo" then
print("ACTIVATE SOLO-MODE")
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_GOODGUYS, 1 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_BADGUYS, 1 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_1, 1 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_2, 1 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_3, 1 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_4, 1 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_5, 1 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_6, 1 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_7, 1 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_8, 1 )
randomMap = false
soloMode = true
duoMode = false
trioMode = false
quartetMode = false
elseif GetMapName() == "battleroyal_solo_random" then
print("ACTIVATE SOLO-RANDOM-MODE")
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_GOODGUYS, 1 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_BADGUYS, 1 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_1, 1 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_2, 1 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_3, 1 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_4, 1 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_5, 1 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_6, 1 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_7, 1 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_8, 1 )
HERO_SELECTION_TIME = 0
randomMap = true
soloMode = true
duoMode = false
trioMode = false
quartetMode = false
elseif GetMapName() == "battleroyal_duo" then
print("ACTIVATE DUO-MODE")
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_GOODGUYS, 2)
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_BADGUYS, 2)
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_1, 2 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_2, 2 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_3, 2 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_4, 2 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_5, 2 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_6, 2 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_7, 2 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_8, 2 )
randomMap = false
soloMode = false
duoMode = true
trioMode = false
quartetMode = false
elseif GetMapName() == "battleroyal_duo_random" then
print("ACTIVATE DUO-MODE-RANDOM")
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_GOODGUYS, 2)
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_BADGUYS, 2)
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_1, 2 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_2, 2 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_3, 2 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_4, 2 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_5, 2 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_6, 2 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_7, 2 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_8, 2 )
randomMap = true
soloMode = false
duoMode = true
trioMode = false
quartetMode = false
elseif GetMapName() == "battleroyal_trio" then
print("ACTIVATE TRIO-MODE")
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_GOODGUYS, 3)
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_BADGUYS, 3)
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_1, 3 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_2, 3 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_3, 3 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_4, 3 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_5, 3 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_6, 3 )
randomMap = false
soloMode = false
duoMode = false
trioMode = true
quartetMode = false
elseif GetMapName() == "battleroyal_trio_random" then
print("ACTIVATE TRIO-MODE-RANDOM")
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_GOODGUYS, 3)
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_BADGUYS, 3)
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_1, 3 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_2, 3 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_3, 3 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_4, 3 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_5, 3 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_6, 3 )
randomMap = true
soloMode = false
duoMode = false
trioMode = true
quartetMode = false
elseif GetMapName() == "battleroyal_quartet" then
print("ACTIVATE QUARTET-MODE")
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_GOODGUYS, 4)
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_BADGUYS, 4)
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_1, 4 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_2, 4 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_3, 4 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_4, 4 )
randomMap = false
soloMode = false
duoMode = false
trioMode = false
quartetMode = true
elseif GetMapName() == "battleroyal_quartet_random" then
print("ACTIVATE QUARTET-MODE-RANDOM")
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_GOODGUYS, 4)
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_BADGUYS, 4)
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_1, 4 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_2, 4 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_3, 4 )
GameRules:SetCustomGameTeamMaxPlayers( DOTA_TEAM_CUSTOM_4, 4 )
randomMap = true
soloMode = false
duoMode = false
trioMode = false
quartetMode = true
else
print("ERROR -> NO LEGAL MAP")
end
SetTeamCustomHealthbarColor(2, 255, 0, 0) --red
SetTeamCustomHealthbarColor(3, 255, 255, 0) --yellow
SetTeamCustomHealthbarColor(6, 0, 255, 0) --green
SetTeamCustomHealthbarColor(7, 0, 255, 255) --light blue
SetTeamCustomHealthbarColor(8, 0, 0, 255) --blue
SetTeamCustomHealthbarColor(9, 255, 0, 255) --magenta
SetTeamCustomHealthbarColor(10, 255, 128, 0) --orange
SetTeamCustomHealthbarColor(11, 128, 0, 255) --lila
SetTeamCustomHealthbarColor(12, 128, 255, 0) --neon
SetTeamCustomHealthbarColor(13, 255, 255, 255) --white
-- GameRules:
GameRules:SetHeroRespawnEnabled( ENABLE_HERO_RESPAWN )
GameRules:SetUseUniversalShopMode( UNIVERSAL_SHOP_MODE )
GameRules:SetSameHeroSelectionEnabled( ALLOW_SAME_HERO_SELECTION )
GameRules:SetHeroSelectionTime( HERO_SELECTION_TIME )
GameRules:SetPreGameTime( PRE_GAME_TIME)
GameRules:SetPostGameTime( POST_GAME_TIME )
GameRules:SetTreeRegrowTime( TREE_REGROW_TIME )
GameRules:SetUseCustomHeroXPValues ( USE_CUSTOM_XP_VALUES )
GameRules:SetGoldPerTick(GOLD_PER_TICK)
GameRules:SetGoldTickTime(GOLD_TICK_TIME)
GameRules:SetUseBaseGoldBountyOnHeroes(USE_STANDARD_HERO_GOLD_BOUNTY)
GameRules:SetStrategyTime( 0.0 )
GameRules:SetShowcaseTime( 0.0 )
GameRules:SetStartingGold( 0 )
GameRules:SetCreepMinimapIconScale(0.1)
ListenToGameEvent('entity_killed', Dynamic_Wrap(GameMode, 'OnEntityKilled'), self)
ListenToGameEvent('dota_player_pick_hero', Dynamic_Wrap(GameMode, 'OnPlayerPickHero'), self)
ListenToGameEvent('player_connect_full', Dynamic_Wrap(GameMode, 'OnConnectFull'), self)
ListenToGameEvent('npc_spawned', Dynamic_Wrap(GameMode, 'OnNPCSpawned'), self)
ListenToGameEvent('game_rules_state_change', Dynamic_Wrap(GameMode, 'OnGameRulesStateChange'), self)
ListenToGameEvent('player_fullyjoined', Dynamic_Wrap(GameMode, 'OnPlayerFullyjoined'), self)
GameRules.DropTable = LoadKeyValues("scripts/itemdrops.txt")
end
mode = nil
function GameMode:OnConnectFull(keys)
print("CONNECTED")
GameMode:CaptureGameMode()
print("OnConnectFull successfull")
end
function GameMode:CaptureGameMode()
print("setting up more Gamerules")
if mode == nil then
print("starting")
mode = GameRules:GetGameModeEntity()
mode:SetBuybackEnabled( BUYBACK_ENABLED )
mode:SetTopBarTeamValuesVisible(false)
mode:SetGoldSoundDisabled( DISABLE_GOLD_SOUNDS )
mode:SetRemoveIllusionsOnDeath( REMOVE_ILLUSIONS_ON_DEATH )
mode:SetHUDVisible(1, false)
mode:SetHUDVisible(2, false)
print("finishing")
end
print("finished")
end
function GameMode:OnPlayerPickHero( keys )
local hero = EntIndexToHScript(keys.heroindex)
local tpscroll = hero:GetItemInSlot(5)
hero:RemoveItem(tpscroll)
local item_para = CreateItem("item_fallschirm", hero, hero)
hero:AddItem(item_para)
local fallschirm = hero:GetItemInSlot(0)
fallschirm:EndCooldown()
end
function GameMode:OnEntityKilled( keys )
local killer = EntIndexToHScript( keys.entindex_attacker )
local killedUnit = EntIndexToHScript( keys.entindex_killed )
if soloMode then
print("DECIDING")
if killedUnit:IsCreature() or killedUnit:IsCreep() then
print("Roll Drops for: "..tostring(killedUnit:GetUnitName()))
RollDrops(killedUnit)
elseif killedUnit:IsHero() then
print("CALCULATING")
if playercount == nil then
print("GETTING PLAYERCOUNT")
playercount = PlayerResource:GetPlayerCount()
print(playercount)
end
print( "Aegis? -> "..tostring(PlayerResource:GetAegisPickups(killedUnit:GetPlayerID())))
deathpos = killedUnit:GetOrigin()
for i=0,10 do
dead_item = killedUnit:GetItemInSlot(i)
print(tostring(dead_item).." in Slot: "..i)
if dead_item ~= nil then
dead_itemname = dead_item:GetName()
print(dead_itemname)
if dead_itemname == "item_rapier" or dead_itemname == "item_gem" then
print("Not spawning item of slot "..i)
else
local dead_item_loot = CreateItem(dead_itemname, nil, nil)
dead_item_loot:SetPurchaseTime(0)
local dead_item_pos = killedUnit:GetAbsOrigin()
local dead_item_drop = CreateItemOnPositionSync( dead_item_pos, dead_item_loot )
local dead_item_launch = dead_item_pos+RandomVector(RandomFloat(150,200))
dead_item_loot:LaunchLoot(false, 200, 1, dead_item_launch)
end
end
end
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,-16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,-16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,-16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,-16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,-16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,-16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,-16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,-16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,-16000,0), 25000, 1300, false)
playercount = playercount - 1
if playercount == 1 then
print("creature? -" .. tostring(killer:IsCreature()) .. "- creep? -" .. tostring(killer:IsCreep()))
if killer:IsCreature() or killer:IsCreep() or killer == killedUnit then
print(killedUnit:GetClassname())
for i=0, PlayerResource:GetPlayerCount() do
if PlayerResource:IsValidPlayerID(i) then
lastwin_ent = PlayerResource:GetSelectedHeroEntity(i)
if lastwin_ent:IsAlive() then
lastwinner = PlayerResource:GetPlayerName(i)
lastwinnerteam = PlayerResource:GetSelectedHeroEntity(i):GetTeamNumber()
end
end
end
GameRules:SetCustomVictoryMessage("\n" .. lastwinner .. " won the Game because " .. PlayerResource:GetPlayerName(killedUnit:GetPlayerID()) .. " was killed!")
GameRules:SetGameWinner(lastwinnerteam)
else
local playerid = killer:GetPlayerOwnerID()
local heroID = PlayerResource:GetSelectedHeroName(playerid)
heroname = ""
if heroID == "npc_dota_hero_abaddon" then
heroname = "Abbadon"
elseif heroID == "npc_dota_hero_abyssal_underlord" then
heroname = "Underlord"
elseif heroID == "npc_dota_hero_alchemist" then
heroname = "Alchemist"
elseif heroID == "npc_dota_hero_ancient_apparition" then
heroname = "Ancient Apparition"
elseif heroID == "npc_dota_hero_antimage" then
heroname = "Ancient Apparition"
elseif heroID == "npc_dota_hero_arc_warden" then
heroname = "Arc Warden"
elseif heroID == "npc_dota_hero_axe" then
heroname = "Axe"
elseif heroID == "npc_dota_hero_bane" then
heroname = "Bane"
elseif heroID == "npc_dota_hero_batrider" then
heroname = "Batrider"
elseif heroID == "npc_dota_hero_beastmaster" then
heroname = "Beastmaster"
elseif heroID == "npc_dota_hero_bloodseeker" then
heroname = "Bloodseeker"
elseif heroID == "npc_dota_hero_bounty_hunter" then
heroname = "Bounty Hunter"
elseif heroID == "npc_dota_hero_brewmaster" then
heroname = "Brewmaster"
elseif heroID == "npc_dota_hero_bristleback" then
heroname = "Bristleback"
elseif heroID == "npc_dota_hero_broodmother" then
heroname = "Broodmother"
elseif heroID == "npc_dota_hero_centaur" then
heroname = "Centaur Warrunner"
elseif heroID == "npc_dota_hero_chaos_knight" then
heroname = "Chaos Knight"
elseif heroID == "npc_dota_hero_chen" then
heroname = "Chen"
elseif heroID == "npc_dota_hero_clinkz" then
heroname = "Clinkz"
elseif heroID == "npc_dota_hero_crystal_maiden" then
heroname = "Crystal Maiden"
elseif heroID == "npc_dota_hero_dark_seer" then
heroname = "Dark Seer"
elseif heroID == "npc_dota_hero_dazzle" then
heroname = "Dazzle"
elseif heroID == "npc_dota_hero_death_prophet" then
heroname = "Death Prophet"
elseif heroID == "npc_dota_hero_disruptor" then
heroname = "Disruptor"
elseif heroID == "npc_dota_hero_doom_bringer" then
heroname = "Doom"
elseif heroID == "npc_dota_hero_dragon_knight" then
heroname = "Dragon Knight"
elseif heroID == "npc_dota_hero_drow_ranger" then
heroname = "Drow Ranger"
elseif heroID == "npc_dota_earth_spirit" then
heroname = "Earth Spirit"
elseif heroID == "npc_dota_hero_earthshaker" then
heroname = "Earthshaker"
elseif heroID == "npc_dota_hero_elder_titan" then
heroname = "Elder Titan"
elseif heroID == "npc_dota_hero_ember_spirit" then
heroname = "Ember Spirit"
elseif heroID == "npc_dota_hero_enchantress" then
heroname = "Enchantress"
elseif heroID == "npc_dota_hero_enigma" then
heroname = "Enigma"
elseif heroID == "npc_dota_hero_faceless_void" then
heroname = "Faceless Void"
elseif heroID == "npc_dota_hero_furion" then
heroname = "Nature´s Prophet"
elseif heroID == "npc_dota_hero_gyrocopter" then
heroname = "Gyrocopter"
elseif heroID == "npc_dota_hero_huskar" then
heroname = "Huskar"
elseif heroID == "npc_dota_hero_invoker" then
heroname = "Invoker"
elseif heroID == "npc_dota_hero_jakiro" then
heroname = "Jakiro"
elseif heroID == "npc_dota_hero_juggernaut" then
heroname = "Juggernaut"
elseif heroID == "npc_dota_hero_keeper_of_the_light" then
heroname = "Keeper of the Light"
elseif heroID == "npc_dota_hero_kunkka" then
heroname = "Kunkka"
elseif heroID == "npc_dota_hero_legion_commander" then
heroname = "Legion Commander"
elseif heroID == "npc_dota_hero_leshrac" then
heroname = "Leshrac"
elseif heroID == "npc_dota_hero_lich" then
heroname = "Lich"
elseif heroID == "npc_dota_hero_life_stealer" then
heroname = "Life Stealer"
elseif heroID == "npc_dota_hero_lina" then
heroname = "Lina"
elseif heroID == "npc_dota_hero_lion" then
heroname = "Lion"
elseif heroID == "npc_dota_hero_lone_druid" then
heroname = "Lone Druid"
elseif heroID == "npc_dota_hero_luna" then
heroname = "Luna"
elseif heroID == "npc_dota_hero_lycan" then
heroname = "Lycan"
elseif heroID == "npc_dota_hero_magnataur" then
heroname = "Magnus"
elseif heroID == "npc_dota_hero_medusa" then
heroname = "Medusa"
elseif heroID == "npc_dota_hero_meepo" then
heroname = "Meepo"
elseif heroID == "npc_dota_hero_mirana" then
heroname = "Mirana"
elseif heroID == "npc_dota_hero_morphling" then
heroname = "Morphling"
elseif heroID == "npc_dota_hero_naga_siren" then
heroname = "Naga Siren"
elseif heroID == "npc_dota_hero_necrolyte" then
heroname = "Necrophos"
elseif heroID == "npc_dota_hero_nevermore" then
heroname = "Shadow Fiend"
elseif heroID == "npc_dota_hero_night_stalker" then
heroname = "Night Stalker"
elseif heroID == "npc_dota_hero_nyx_assassin" then
heroname = "Nyx Assassin"
elseif heroID == "npc_dota_hero_obsidian_destroyer" then
heroname = "Outworld Devourer"
elseif heroID == "npc_dota_hero_ogre_magi" then
heroname = "Ogre Magi"
elseif heroID == "npc_dota_hero_omniknight" then
heroname = "Omniknight"
elseif heroID == "npc_dota_hero_oracle" then
heroname = "Oracle"
elseif heroID == "npc_dota_hero_phantom_assassin" then
heroname = "Phantom Assassin"
elseif heroID == "npc_dota_hero_phantom_lancer" then
heroname = "Phantom Lancer"
elseif heroID == "npc_dota_hero_phoenix" then
heroname = "Phoenix"
elseif heroID == "npc_dota_hero_puck" then
heroname = "Puck"
elseif heroID == "npc_dota_hero_pudge" then
heroname = "Pudge"
elseif heroID == "npc_dota_hero_pugna" then
heroname = "Pugna"
elseif heroID == "npc_dota_hero_queenofpain" then
heroname = "Queen of Pain"
elseif heroID == "npc_dota_hero_rattletrap" then
heroname = "Clockwerk"
elseif heroID == "npc_dota_hero_razor" then
heroname = "Razor"
elseif heroID == "npc_dota_hero_riki" then
heroname = "Riki"
elseif heroID == "npc_dota_hero_rubick" then
heroname = "Rubick (the grand Magus lol)"
elseif heroID == "npc_dota_hero_sand_king" then
heroname = "Sand King"
elseif heroID == "npc_dota_hero_shadow_demon" then
heroname = "Shadow Demon"
elseif heroID == "npc_dota_hero_shadow_shaman" then
heroname = "Shadow Shaman"
elseif heroID == "npc_dota_hero_shredder" then
heroname = "Timbersaw"
elseif heroID == "npc_dota_hero_silencer" then
heroname = "Silencer"
elseif heroID == "npc_dota_hero_skeleton_king" then
heroname = "Wraith King"
elseif heroID == "npc_dota_hero_skywrath_mage" then
heroname = "Skywrath Mage"
elseif heroID == "npc_dota_hero_slardar" then
heroname = "Slardar"
elseif heroID == "npc_dota_hero_slark" then
heroname = "Slark"
elseif heroID == "npc_dota_hero_sniper" then
heroname = "Sniper"
elseif heroID == "npc_dota_hero_spectre" then
heroname = "Spectre"
elseif heroID == "npc_dota_hero_spirit_breaker" then
heroname = "Spirit Breaker"
elseif heroID == "npc_dota_hero_storm_spirit" then
heroname = "Storm Spirit"
elseif heroID == "npc_dota_hero_sven" then
heroname = "Sven"
elseif heroID == "npc_dota_hero_techies" then
heroname = "Techies"
elseif heroID == "npc_dota_hero_templar_assassin" then
heroname = "Templar Assassin"
elseif heroID == "npc_dota_hero_terrorblade" then
heroname = "Terrorblade"
elseif heroID == "npc_dota_hero_tidehunter" then
heroname = "Tidehunter"
elseif heroID == "npc_dota_hero_tinker" then
heroname = "Tinker"
elseif heroID == "npc_dota_hero_tiny" then
heroname = "Tiny"
elseif heroID == "npc_dota_hero_treant" then
heroname = "Treant Protector"
elseif heroID == "npc_dota_hero_troll_warlord" then
heroname = "Troll Warlord"
elseif heroID == "npc_dota_hero_tusk" then
heroname = "Tusk"
elseif heroID == "npc_dota_hero_undying" then
heroname = "Undying"
elseif heroID == "npc_dota_hero_ursa" then
heroname = "Ursa"
elseif heroID == "npc_dota_hero_vengefulspirit" then
heroname = "Vengeful Spirit"
elseif heroID == "npc_dota_hero_venomancer" then
heroname = "Venomancer"
elseif heroID == "npc_dota_hero_viper" then
heroname = "Viper"
elseif heroID == "npc_dota_hero_visage" then
heroname = "Visage"
elseif heroID == "npc_dota_hero_warlock" then
heroname = "Warlock"
elseif heroID == "npc_dota_hero_weaver" then
heroname = "Weaver"
elseif heroID == "npc_dota_hero_windrunner" then
heroname = "Windranger"
elseif heroID == "npc_dota_hero_winter_wyvern" then
heroname = "Winter Wyvern"
elseif heroID == "npc_dota_hero_wisp" then
heroname = "Io"
elseif heroID == "npc_dota_hero_witch_doctor" then
heroname = "Witch Doctor"
elseif heroID == "npc_dota_hero_zuus" then
heroname = "Zeus"
elseif heroID == "npc_dota_hero_dark_willow" then
heroname = "Dark Willow"
elseif heroID == "npc_dota_hero_pangolier" then
heroname = "Pangolier"
elseif heroID == "npc_dota_hero_monkey_king" then
heroname = "Monkey King"
else
heroname = "a hero!"
end
local playername = PlayerResource:GetPlayerName(playerid)
local victorymsg = playername .. " won \n the game as \n" .. heroname
GameRules:SetCustomVictoryMessage( "\n" .. victorymsg)
GameRules:SetGameWinner(killer:GetTeamNumber())
end
elseif playercount == 0 then
GameRules:SetCustomVictoryMessage(PlayerResource:GetPlayerName(killedUnit:GetPlayerID()) .. " won the game!")
GameRules:SetGameWinner(PlayerResource:GetTeam(killedUnit:GetPlayerID()))
end
end
elseif duoMode then
if killedUnit ~= nil then
killedUnitTeam = killedUnit:GetTeam()
end
if killer ~= nil then
killerTeam = killer:GetTeam()
end
print("DECIDING")
if killedUnit:IsCreature() or killedUnit:IsCreep() then
print("Killed Unit is Creep!")
print("Roll Drops for: "..tostring(killedUnit:GetUnitName()))
RollDrops(killedUnit)
elseif killedUnit:IsHero() then
print("Killed Unit is Hero!")
print("CALCULATING")
if playercount == nil then
print("GETTING PLAYERCOUNT")
playercount = PlayerResource:GetPlayerCount()
print("Playercount = "..tostring(playercount))
end
if team_setup == nil then
team_setup = true
print("Setting up Teams!")
Teams = {}
team_radiant = PlayerResource:GetPlayerCountForTeam(2)
team_dire = PlayerResource:GetPlayerCountForTeam(3)
team_1 = PlayerResource:GetPlayerCountForTeam(6)
team_2 = PlayerResource:GetPlayerCountForTeam(7)
team_3 = PlayerResource:GetPlayerCountForTeam(8)
team_4 = PlayerResource:GetPlayerCountForTeam(9)
team_5 = PlayerResource:GetPlayerCountForTeam(10)
team_6 = PlayerResource:GetPlayerCountForTeam(11)
team_7 = PlayerResource:GetPlayerCountForTeam(12)
team_8 = PlayerResource:GetPlayerCountForTeam(13)
if killedUnit:GetTeam() == 2 then
team_radiant = team_radiant - 1
elseif killedUnit:GetTeam() == 3 then
team_dire = team_dire - 1
elseif killedUnit:GetTeam() == 6 then
team_1 = team_1 - 1
elseif killedUnit:GetTeam() == 7 then
team_2 = team_2 - 1
elseif killedUnit:GetTeam() == 8 then
team_3 = team_3 - 1
elseif killedUnit:GetTeam() == 9 then
team_4 = team_4 - 1
elseif killedUnit:GetTeam() == 10 then
team_5 = team_5 - 1
elseif killedUnit:GetTeam() == 11 then
team_6 = team_6 - 1
elseif killedUnit:GetTeam() == 12 then
team_7 = team_7 - 1
elseif killedUnit:GetTeam() == 13 then
team_8 = team_8 - 1
end
end
deathpos = killedUnit:GetOrigin()
for i=0,10 do
dead_item = killedUnit:GetItemInSlot(i)
print(tostring(dead_item).." in Slot: "..i)
if dead_item ~= nil then
dead_itemname = dead_item:GetName()
print(dead_itemname)
if dead_itemname == "item_rapier" or dead_itemname == "item_gem" then
print("Not spawning item of slot "..i)
else
local dead_item_loot = CreateItem(dead_itemname, nil, nil)
dead_item_loot:SetPurchaseTime(0)
local dead_item_pos = killedUnit:GetAbsOrigin()
local dead_item_drop = CreateItemOnPositionSync( dead_item_pos, dead_item_loot )
local dead_item_launch = dead_item_pos+RandomVector(RandomFloat(150,200))
dead_item_loot:LaunchLoot(false, 200, 1, dead_item_launch)
end
end
end
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,-16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,-16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,-16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,-16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,-16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,-16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,-16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,-16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,-16000,0), 25000, 1300, false)
print("Playercount: "..tostring(playercount))
playercount = playercount - 1
print("Playercount is now: "..tostring(playercount))
if playercount == 2 then
print("Checking if the last two Heros are in the same team!")
if killer:IsHero() then
killerID = killer:GetPlayerID()
for i=0, PlayerResource:GetPlayerCount() do
if PlayerResource:IsValidPlayerID(i) then
lastwin_ent = PlayerResource:GetSelectedHeroEntity(i)
if lastwin_ent:IsAlive() and lastwin_ent ~= PlayerResource:GetSelectedHeroEntity(killerID) then
lastwinner = PlayerResource:GetPlayerName(i)
lastwinnerteam = lastwin_ent:GetTeam()
break
end
end
end
else
for i=0, PlayerResource:GetPlayerCount() do
if PlayerResource:IsValidPlayerID(i) then
lastwin_ent = PlayerResource:GetSelectedHeroEntity(i)
if lastwin_ent:IsAlive() then
lastwinner = PlayerResource:GetPlayerName(i)
lastwinnerteam = lastwin_ent:GetTeam()
break
end
end
end
end
print("Killed Hero: "..tostring(killedUnit:GetTeam()).." First Hero: "..tostring(killer:GetTeam()).." Second Hero: "..tostring(lastwinnerteam))
if killer:GetTeam() == lastwinnerteam then
print("Team: "..tostring(lastwinnerteam).." has won!")
lastwinnerteam_msg = ""
if lastwinnerteam == 2 then
lastwinnerteam_msg = "1"
elseif lastwinnerteam == 3 then
lastwinnerteam_msg = "2"
elseif lastwinnerteam == 6 then
lastwinnerteam_msg = "3"
elseif lastwinnerteam == 7 then
lastwinnerteam_msg = "4"
elseif lastwinnerteam == 8 then
lastwinnerteam_msg = "5"
elseif lastwinnerteam == 9 then
lastwinnerteam_msg = "6"
elseif lastwinnerteam == 10 then
lastwinnerteam_msg = "7"
elseif lastwinnerteam == 11 then
lastwinnerteam_msg = "8"
elseif lastwinnerteam == 12 then
lastwinnerteam_msg = "9"
elseif lastwinnerteam == 13 then
lastwinnerteam_msg = "10"
end
GameRules:SetCustomVictoryMessage("Team "..lastwinnerteam_msg.." won the game!")
GameRules:SetGameWinner(lastwinnerteam)
end
elseif playercount == 1 then
print("Only one Player remaining!")
if killer:IsCreature() or killer:IsCreep() or killer == killedUnit then
for i=0, PlayerResource:GetPlayerCount() do
if PlayerResource:IsValidPlayerID(i) then
lastwin_ent = PlayerResource:GetSelectedHeroEntity(i)
if lastwin_ent:IsAlive() then
lastwinnerteam = PlayerResource:GetSelectedHeroEntity(i):GetTeamNumber()
break
end
end
end
lastwinnerteam_msg = ""
if lastwinnerteam == 2 then
lastwinnerteam_msg = "1"
elseif lastwinnerteam == 3 then
lastwinnerteam_msg = "2"
elseif lastwinnerteam == 6 then
lastwinnerteam_msg = "3"
elseif lastwinnerteam == 7 then
lastwinnerteam_msg = "4"
elseif lastwinnerteam == 8 then
lastwinnerteam_msg = "5"
elseif lastwinnerteam == 9 then
lastwinnerteam_msg = "6"
elseif lastwinnerteam == 10 then
lastwinnerteam_msg = "7"
elseif lastwinnerteam == 11 then
lastwinnerteam_msg = "8"
elseif lastwinnerteam == 12 then
lastwinnerteam_msg = "9"
elseif lastwinnerteam == 13 then
lastwinnerteam_msg = "10"
end
GameRules:SetCustomVictoryMessage("Team "..tostring(lastwinnerteam_msg).." won the game!")
GameRules:SetGameWinner(lastwinnerteam)
else
lastwinnerteam = killer:GetTeam()
lastwinnerteam_msg = ""
if lastwinnerteam == 2 then
lastwinnerteam_msg = "1"
elseif lastwinnerteam == 3 then
lastwinnerteam_msg = "2"
elseif lastwinnerteam == 6 then
lastwinnerteam_msg = "3"
elseif lastwinnerteam == 7 then
lastwinnerteam_msg = "4"
elseif lastwinnerteam == 8 then
lastwinnerteam_msg = "5"
elseif lastwinnerteam == 9 then
lastwinnerteam_msg = "6"
elseif lastwinnerteam == 10 then
lastwinnerteam_msg = "7"
elseif lastwinnerteam == 11 then
lastwinnerteam_msg = "8"
elseif lastwinnerteam == 12 then
lastwinnerteam_msg = "9"
elseif lastwinnerteam == 13 then
lastwinnerteam_msg = "10"
end
GameRules:SetCustomVictoryMessage("Team "..tostring(lastwinnerteam_msg).." won the game!")
GameRules:SetGameWinner(lastwinnerteam)
end
elseif playercount == 0 then
GameRules:SetCustomVictoryMessage("Team "..tostring(killedUnit:GetTeam()).." won the game!")
GameRules:SetGameWinner(PlayerResource:GetTeam(killedUnit:GetPlayerID()))
end
end
elseif trioMode then
if killedUnit ~= nil then
killedUnitTeam = killedUnit:GetTeam()
end
if killer ~= nil then
killerTeam = killer:GetTeam()
end
print("DECIDING")
if killedUnit:IsCreature() or killedUnit:IsCreep() then
print("Killed Unit is Creep!")
print("Roll Drops for: "..tostring(killedUnit:GetUnitName()))
RollDrops(killedUnit)
elseif killedUnit:IsHero() then
print("Killed Unit is Hero!")
print("CALCULATING")
if playercount == nil then
print("GETTING PLAYERCOUNT")
playercount = PlayerResource:GetPlayerCount()
print("Playercount = "..tostring(playercount))
end
if team_setup == nil then
team_setup = true
print("Setting up Teams!")
Teams = {}
team_radiant = PlayerResource:GetPlayerCountForTeam(2)
team_dire = PlayerResource:GetPlayerCountForTeam(3)
team_1 = PlayerResource:GetPlayerCountForTeam(6)
team_2 = PlayerResource:GetPlayerCountForTeam(7)
team_3 = PlayerResource:GetPlayerCountForTeam(8)
team_4 = PlayerResource:GetPlayerCountForTeam(9)
team_5 = PlayerResource:GetPlayerCountForTeam(10)
team_6 = PlayerResource:GetPlayerCountForTeam(11)
team_7 = PlayerResource:GetPlayerCountForTeam(12)
team_8 = PlayerResource:GetPlayerCountForTeam(13)
if killedUnit:GetTeam() == 2 then
team_radiant = team_radiant - 1
elseif killedUnit:GetTeam() == 3 then
team_dire = team_dire - 1
elseif killedUnit:GetTeam() == 6 then
team_1 = team_1 - 1
elseif killedUnit:GetTeam() == 7 then
team_2 = team_2 - 1
elseif killedUnit:GetTeam() == 8 then
team_3 = team_3 - 1
elseif killedUnit:GetTeam() == 9 then
team_4 = team_4 - 1
elseif killedUnit:GetTeam() == 10 then
team_5 = team_5 - 1
elseif killedUnit:GetTeam() == 11 then
team_6 = team_6 - 1
elseif killedUnit:GetTeam() == 12 then
team_7 = team_7 - 1
elseif killedUnit:GetTeam() == 13 then
team_8 = team_8 - 1
end
end
deathpos = killedUnit:GetOrigin()
for i=0,10 do
dead_item = killedUnit:GetItemInSlot(i)
print(tostring(dead_item).." in Slot: "..i)
if dead_item ~= nil then
dead_itemname = dead_item:GetName()
print(dead_itemname)
if dead_itemname == "item_rapier" or dead_itemname == "item_gem" then
print("Not spawning item of slot "..i)
else
local dead_item_loot = CreateItem(dead_itemname, nil, nil)
dead_item_loot:SetPurchaseTime(0)
local dead_item_pos = killedUnit:GetAbsOrigin()
local dead_item_drop = CreateItemOnPositionSync( dead_item_pos, dead_item_loot )
local dead_item_launch = dead_item_pos+RandomVector(RandomFloat(150,200))
dead_item_loot:LaunchLoot(false, 200, 1, dead_item_launch)
end
end
end
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,-16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,-16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,-16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,-16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,-16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,-16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,-16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,-16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,-16000,0), 25000, 1300, false)
print("Playercount: "..tostring(playercount))
playercount = playercount - 1
print("Playercount is now: "..tostring(playercount))
if playercount == 3 then
print("Checking if the last three Heros are in the same team!")
if killer:IsHero() then
killerID = killer:GetPlayerID()
durchlauf = 0
for i=0, PlayerResource:GetPlayerCount() do
if PlayerResource:IsValidPlayerID(i) then
lastwin_ent = PlayerResource:GetSelectedHeroEntity(i)
if lastwin_ent:IsAlive() and lastwin_ent ~= PlayerResource:GetSelectedHeroEntity(killerID) then
if durchlauf == 0 then
lastwinner = PlayerResource:GetPlayerName(i)
lastwinnerteam = lastwin_ent:GetTeam()
elseif durchlauf == 1 then
lastwinner2 = PlayerResource:GetPlayerName(i)
lastwinnerteam2 = lastwin_ent:GetTeam()
end
durchlauf = durchlauf + 1
if durchlauf == 2 then
break
end
end
end
end
else
durchlauf = 0
for i=0, PlayerResource:GetPlayerCount() do
if PlayerResource:IsValidPlayerID(i) then
lastwin_ent = PlayerResource:GetSelectedHeroEntity(i)
if lastwin_ent:IsAlive() then
if durchlauf == 0 then
lastwinner = PlayerResource:GetPlayerName(i)
lastwinnerteam = lastwin_ent:GetTeam()
elseif durchlauf == 1 then
lastwinner2 = PlayerResource:GetPlayerName(i)
lastwinnerteam2 = lastwin_ent:GetTeam()
end
durchlauf = durchlauf + 1
if durchlauf == 2 then
break
end
end
end
end
end
print("Killed Hero: "..tostring(killedUnit:GetTeam()).."First Hero: "..tostring(killer:GetTeam()).." Second Hero: "..tostring(lastwinnerteam))
if killer:GetTeam() == lastwinnerteam and lastwinnerteam == lastwinnerteam2 then
print("Team: "..tostring(lastwinnerteam).." has won!")
lastwinnerteam_msg = ""
if lastwinnerteam == 2 then
lastwinnerteam_msg = "1"
elseif lastwinnerteam == 3 then
lastwinnerteam_msg = "2"
elseif lastwinnerteam == 6 then
lastwinnerteam_msg = "3"
elseif lastwinnerteam == 7 then
lastwinnerteam_msg = "4"
elseif lastwinnerteam == 8 then
lastwinnerteam_msg = "5"
elseif lastwinnerteam == 9 then
lastwinnerteam_msg = "6"
elseif lastwinnerteam == 10 then
lastwinnerteam_msg = "7"
elseif lastwinnerteam == 11 then
lastwinnerteam_msg = "8"
elseif lastwinnerteam == 12 then
lastwinnerteam_msg = "9"
elseif lastwinnerteam == 13 then
lastwinnerteam_msg = "10"
end
GameRules:SetCustomVictoryMessage("Team "..lastwinnerteam_msg.." won the game!")
GameRules:SetGameWinner(lastwinnerteam)
end
elseif playercount == 2 then
print("Checking if the last two Heros are in the same team!")
if killer:IsHero() then
killerID = killer:GetPlayerID()
for i=0, PlayerResource:GetPlayerCount() do
if PlayerResource:IsValidPlayerID(i) then
lastwin_ent = PlayerResource:GetSelectedHeroEntity(i)
if lastwin_ent:IsAlive() and lastwin_ent ~= PlayerResource:GetSelectedHeroEntity(killerID) then
lastwinner = PlayerResource:GetPlayerName(i)
lastwinnerteam = lastwin_ent:GetTeam()
break
end
end
end
else
for i=0, PlayerResource:GetPlayerCount() do
if PlayerResource:IsValidPlayerID(i) then
lastwin_ent = PlayerResource:GetSelectedHeroEntity(i)
if lastwin_ent:IsAlive() then
lastwinner = PlayerResource:GetPlayerName(i)
lastwinnerteam = lastwin_ent:GetTeam()
break
end
end
end
end
print("First Hero: "..tostring(killedUnit:GetTeam()).." Second Hero: "..tostring(lastwinnerteam))
if killer:GetTeam() == lastwinnerteam then
print("Team: "..tostring(lastwinnerteam).." has won!")
lastwinnerteam_msg = ""
if lastwinnerteam == 2 then
lastwinnerteam_msg = "1"
elseif lastwinnerteam == 3 then
lastwinnerteam_msg = "2"
elseif lastwinnerteam == 6 then
lastwinnerteam_msg = "3"
elseif lastwinnerteam == 7 then
lastwinnerteam_msg = "4"
elseif lastwinnerteam == 8 then
lastwinnerteam_msg = "5"
elseif lastwinnerteam == 9 then
lastwinnerteam_msg = "6"
elseif lastwinnerteam == 10 then
lastwinnerteam_msg = "7"
elseif lastwinnerteam == 11 then
lastwinnerteam_msg = "8"
elseif lastwinnerteam == 12 then
lastwinnerteam_msg = "9"
elseif lastwinnerteam == 13 then
lastwinnerteam_msg = "10"
end
GameRules:SetCustomVictoryMessage("Team "..lastwinnerteam_msg.." won the game!")
GameRules:SetGameWinner(lastwinnerteam)
end
elseif playercount == 1 then
print("Only one Player remaining!")
if killer:IsCreature() or killer:IsCreep() or killer == killedUnit then
for i=0, PlayerResource:GetPlayerCount() do
if PlayerResource:IsValidPlayerID(i) then
lastwin_ent = PlayerResource:GetSelectedHeroEntity(i)
if lastwin_ent:IsAlive() then
lastwinnerteam = PlayerResource:GetSelectedHeroEntity(i):GetTeamNumber()
break
end
end
end
lastwinnerteam_msg = ""
if lastwinnerteam == 2 then
lastwinnerteam_msg = "1"
elseif lastwinnerteam == 3 then
lastwinnerteam_msg = "2"
elseif lastwinnerteam == 6 then
lastwinnerteam_msg = "3"
elseif lastwinnerteam == 7 then
lastwinnerteam_msg = "4"
elseif lastwinnerteam == 8 then
lastwinnerteam_msg = "5"
elseif lastwinnerteam == 9 then
lastwinnerteam_msg = "6"
elseif lastwinnerteam == 10 then
lastwinnerteam_msg = "7"
elseif lastwinnerteam == 11 then
lastwinnerteam_msg = "8"
elseif lastwinnerteam == 12 then
lastwinnerteam_msg = "9"
elseif lastwinnerteam == 13 then
lastwinnerteam_msg = "10"
end
GameRules:SetCustomVictoryMessage("Team "..tostring(lastwinnerteam_msg).." won the game!")
GameRules:SetGameWinner(lastwinnerteam)
else
lastwinnerteam = killer:GetTeam()
lastwinnerteam_msg = ""
if lastwinnerteam == 2 then
lastwinnerteam_msg = "1"
elseif lastwinnerteam == 3 then
lastwinnerteam_msg = "2"
elseif lastwinnerteam == 6 then
lastwinnerteam_msg = "3"
elseif lastwinnerteam == 7 then
lastwinnerteam_msg = "4"
elseif lastwinnerteam == 8 then
lastwinnerteam_msg = "5"
elseif lastwinnerteam == 9 then
lastwinnerteam_msg = "6"
elseif lastwinnerteam == 10 then
lastwinnerteam_msg = "7"
elseif lastwinnerteam == 11 then
lastwinnerteam_msg = "8"
elseif lastwinnerteam == 12 then
lastwinnerteam_msg = "9"
elseif lastwinnerteam == 13 then
lastwinnerteam_msg = "10"
end
GameRules:SetCustomVictoryMessage("Team "..tostring(lastwinnerteam_msg).." won the game!")
GameRules:SetGameWinner(lastwinnerteam)
end
elseif playercount == 0 then
GameRules:SetCustomVictoryMessage("Team "..tostring(killedUnit:GetTeam()).." won the game!")
GameRules:SetGameWinner(PlayerResource:GetTeam(killedUnit:GetPlayerID()))
end
end
elseif quartetMode then
if killedUnit ~= nil then
killedUnitTeam = killedUnit:GetTeam()
end
if killer ~= nil then
killerTeam = killer:GetTeam()
end
print("DECIDING")
if killedUnit:IsCreature() or killedUnit:IsCreep() then
print("Killed Unit is Creep!")
print("Roll Drops for: "..tostring(killedUnit:GetUnitName()))
RollDrops(killedUnit)
elseif killedUnit:IsHero() then
print("Killed Unit is Hero!")
print("CALCULATING")
if playercount == nil then
print("GETTING PLAYERCOUNT")
playercount = PlayerResource:GetPlayerCount()
print("Playercount = "..tostring(playercount))
end
if team_setup == nil then
team_setup = true
print("Setting up Teams!")
Teams = {}
team_radiant = PlayerResource:GetPlayerCountForTeam(2)
team_dire = PlayerResource:GetPlayerCountForTeam(3)
team_1 = PlayerResource:GetPlayerCountForTeam(6)
team_2 = PlayerResource:GetPlayerCountForTeam(7)
team_3 = PlayerResource:GetPlayerCountForTeam(8)
team_4 = PlayerResource:GetPlayerCountForTeam(9)
team_5 = PlayerResource:GetPlayerCountForTeam(10)
team_6 = PlayerResource:GetPlayerCountForTeam(11)
team_7 = PlayerResource:GetPlayerCountForTeam(12)
team_8 = PlayerResource:GetPlayerCountForTeam(13)
if killedUnit:GetTeam() == 2 then
team_radiant = team_radiant - 1
elseif killedUnit:GetTeam() == 3 then
team_dire = team_dire - 1
elseif killedUnit:GetTeam() == 6 then
team_1 = team_1 - 1
elseif killedUnit:GetTeam() == 7 then
team_2 = team_2 - 1
elseif killedUnit:GetTeam() == 8 then
team_3 = team_3 - 1
elseif killedUnit:GetTeam() == 9 then
team_4 = team_4 - 1
elseif killedUnit:GetTeam() == 10 then
team_5 = team_5 - 1
elseif killedUnit:GetTeam() == 11 then
team_6 = team_6 - 1
elseif killedUnit:GetTeam() == 12 then
team_7 = team_7 - 1
elseif killedUnit:GetTeam() == 13 then
team_8 = team_8 - 1
end
end
deathpos = killedUnit:GetOrigin()
for i=0,10 do
dead_item = killedUnit:GetItemInSlot(i)
print(tostring(dead_item).." in Slot: "..i)
if dead_item ~= nil then
dead_itemname = dead_item:GetName()
print(dead_itemname)
if dead_itemname == "item_rapier" or dead_itemname == "item_gem" then
print("Not spawning item of slot "..i)
else
local dead_item_loot = CreateItem(dead_itemname, nil, nil)
dead_item_loot:SetPurchaseTime(0)
local dead_item_pos = killedUnit:GetAbsOrigin()
local dead_item_drop = CreateItemOnPositionSync( dead_item_pos, dead_item_loot )
local dead_item_launch = dead_item_pos+RandomVector(RandomFloat(150,200))
dead_item_loot:LaunchLoot(false, 200, 1, dead_item_launch)
end
end
end
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,0,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,-4000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,-8000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,-12000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-16000,-16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-12000,-16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-8000,-16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(-4000,-16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(0,-16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(16000,-16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(12000,-16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(8000,-16000,0), 25000, 1300, false)
AddFOWViewer(killedUnit:GetTeamNumber(), Vector(4000,-16000,0), 25000, 1300, false)
print("Playercount: "..tostring(playercount))
playercount = playercount - 1
print("Playercount is now: "..tostring(playercount))
if playercount == 4 then
print("Checking if the last four Heros are in the same team!")
if killer:IsHero() then
killerID = killer:GetPlayerID()
durchlauf = 0
for i=0, PlayerResource:GetPlayerCount() do
if PlayerResource:IsValidPlayerID(i) then
lastwin_ent = PlayerResource:GetSelectedHeroEntity(i)
if lastwin_ent:IsAlive() and lastwin_ent ~= PlayerResource:GetSelectedHeroEntity(killerID) then
if durchlauf == 0 then
lastwinner = PlayerResource:GetPlayerName(i)
lastwinnerteam = lastwin_ent:GetTeam()
elseif durchlauf == 1 then
lastwinner2 = PlayerResource:GetPlayerName(i)
lastwinnerteam2 = lastwin_ent:GetTeam()
elseif durchlauf == 2 then
lastwinner3 = PlayerResource:GetPlayerName(i)
lastwinnerteam3 = lastwin_ent:GetTeam()
end
durchlauf = durchlauf + 1
if durchlauf == 3 then
break
end
end
end
end
else
durchlauf = 0
for i=0, PlayerResource:GetPlayerCount() do
if PlayerResource:IsValidPlayerID(i) then
lastwin_ent = PlayerResource:GetSelectedHeroEntity(i)
if lastwin_ent:IsAlive() then
if durchlauf == 0 then
lastwinner = PlayerResource:GetPlayerName(i)
lastwinnerteam = lastwin_ent:GetTeam()
elseif durchlauf == 1 then
lastwinner2 = PlayerResource:GetPlayerName(i)
lastwinnerteam2 = lastwin_ent:GetTeam()
elseif durchlauf == 2 then
lastwinner3 = PlayerResource:GetPlayerName(i)
lastwinnerteam3 = lastwin_ent:GetTeam()
end
durchlauf = durchlauf + 1
if durchlauf == 3 then
break
end
end
end
end
end
print("Killed Hero: "..tostring(killedUnit:GetTeam()).."First Hero: "..tostring(killer:GetTeam()).." Second Hero: "..tostring(lastwinnerteam).." Third Hero: "..tostring(lastwinnerteam2).." Fourth Hero: "..tostring(lastwinnerteam3))
if killer:GetTeam() == lastwinnerteam and lastwinnerteam == lastwinnerteam2 and lastwinnerteam == lastwinnerteam3 then
print("Team: "..tostring(lastwinnerteam).." has won!")
lastwinnerteam_msg = ""
if lastwinnerteam == 2 then
lastwinnerteam_msg = "1"
elseif lastwinnerteam == 3 then
lastwinnerteam_msg = "2"
elseif lastwinnerteam == 6 then
lastwinnerteam_msg = "3"
elseif lastwinnerteam == 7 then
lastwinnerteam_msg = "4"
elseif lastwinnerteam == 8 then
lastwinnerteam_msg = "5"
elseif lastwinnerteam == 9 then
lastwinnerteam_msg = "6"
elseif lastwinnerteam == 10 then
lastwinnerteam_msg = "7"
elseif lastwinnerteam == 11 then
lastwinnerteam_msg = "8"
elseif lastwinnerteam == 12 then
lastwinnerteam_msg = "9"
elseif lastwinnerteam == 13 then
lastwinnerteam_msg = "10"
end
GameRules:SetCustomVictoryMessage("Team "..lastwinnerteam_msg.." won the game!")
GameRules:SetGameWinner(lastwinnerteam)
end
elseif playercount == 3 then
print("Checking if the last three Heros are in the same team!")
if killer:IsHero() then
killerID = killer:GetPlayerID()
durchlauf = 0
for i=0, PlayerResource:GetPlayerCount() do
if PlayerResource:IsValidPlayerID(i) then
lastwin_ent = PlayerResource:GetSelectedHeroEntity(i)
if lastwin_ent:IsAlive() and lastwin_ent ~= PlayerResource:GetSelectedHeroEntity(killerID) then
if durchlauf == 0 then
lastwinner = PlayerResource:GetPlayerName(i)
lastwinnerteam = lastwin_ent:GetTeam()
elseif durchlauf == 1 then
lastwinner2 = PlayerResource:GetPlayerName(i)
lastwinnerteam2 = lastwin_ent:GetTeam()
end
durchlauf = durchlauf + 1
if durchlauf == 2 then
break
end
end
end
end
else
durchlauf = 0
for i=0, PlayerResource:GetPlayerCount() do
if PlayerResource:IsValidPlayerID(i) then
lastwin_ent = PlayerResource:GetSelectedHeroEntity(i)
if lastwin_ent:IsAlive() then
if durchlauf == 0 then
lastwinner = PlayerResource:GetPlayerName(i)
lastwinnerteam = lastwin_ent:GetTeam()
elseif durchlauf == 1 then
lastwinner2 = PlayerResource:GetPlayerName(i)
lastwinnerteam2 = lastwin_ent:GetTeam()
end
durchlauf = durchlauf + 1
if durchlauf == 2 then
break
end
end
end
end
end
print("Killed Hero: "..tostring(killedUnit:GetTeam()).." First Hero: "..tostring(killer:GetTeam()).." Second Hero: "..tostring(lastwinnerteam).." Third Hero: "..tostring(lastwinnerteam2))
if killer:GetTeam() == lastwinnerteam and lastwinnerteam == lastwinnerteam2 then
print("Team: "..tostring(lastwinnerteam).." has won!")
lastwinnerteam_msg = ""
if lastwinnerteam == 2 then
lastwinnerteam_msg = "1"
elseif lastwinnerteam == 3 then
lastwinnerteam_msg = "2"
elseif lastwinnerteam == 6 then
lastwinnerteam_msg = "3"
elseif lastwinnerteam == 7 then
lastwinnerteam_msg = "4"
elseif lastwinnerteam == 8 then
lastwinnerteam_msg = "5"
elseif lastwinnerteam == 9 then
lastwinnerteam_msg = "6"
elseif lastwinnerteam == 10 then
lastwinnerteam_msg = "7"
elseif lastwinnerteam == 11 then
lastwinnerteam_msg = "8"
elseif lastwinnerteam == 12 then
lastwinnerteam_msg = "9"
elseif lastwinnerteam == 13 then
lastwinnerteam_msg = "10"
end
GameRules:SetCustomVictoryMessage("Team "..lastwinnerteam_msg.." won the game!")
GameRules:SetGameWinner(lastwinnerteam)
end
elseif playercount == 2 then
print("Checking if the last two Heros are in the same team!")
if killer:IsHero() then
killerID = killer:GetPlayerID()
for i=0, PlayerResource:GetPlayerCount() do
if PlayerResource:IsValidPlayerID(i) then
lastwin_ent = PlayerResource:GetSelectedHeroEntity(i)
if lastwin_ent:IsAlive() and lastwin_ent ~= PlayerResource:GetSelectedHeroEntity(killerID) then
lastwinner = PlayerResource:GetPlayerName(i)
lastwinnerteam = lastwin_ent:GetTeam()
break
end
end
end
else
for i=0, PlayerResource:GetPlayerCount() do
if PlayerResource:IsValidPlayerID(i) then
lastwin_ent = PlayerResource:GetSelectedHeroEntity(i)
if lastwin_ent:IsAlive() then
lastwinner = PlayerResource:GetPlayerName(i)
lastwinnerteam = lastwin_ent:GetTeam()
break
end
end
end
end
print("First Hero: "..tostring(killedUnit:GetTeam()).." Second Hero: "..tostring(lastwinnerteam))
if killer:GetTeam() == lastwinnerteam then
print("Team: "..tostring(lastwinnerteam).." has won!")
lastwinnerteam_msg = ""
if lastwinnerteam == 2 then
lastwinnerteam_msg = "1"
elseif lastwinnerteam == 3 then
lastwinnerteam_msg = "2"
elseif lastwinnerteam == 6 then
lastwinnerteam_msg = "3"
elseif lastwinnerteam == 7 then
lastwinnerteam_msg = "4"
elseif lastwinnerteam == 8 then
lastwinnerteam_msg = "5"
elseif lastwinnerteam == 9 then
lastwinnerteam_msg = "6"
elseif lastwinnerteam == 10 then
lastwinnerteam_msg = "7"
elseif lastwinnerteam == 11 then
lastwinnerteam_msg = "8"
elseif lastwinnerteam == 12 then
lastwinnerteam_msg = "9"
elseif lastwinnerteam == 13 then
lastwinnerteam_msg = "10"
end
GameRules:SetCustomVictoryMessage("Team "..lastwinnerteam_msg.." won the game!")
GameRules:SetGameWinner(lastwinnerteam)
end
elseif playercount == 1 then
print("Only one Player remaining!")
if killer:IsCreature() or killer:IsCreep() or killer == killedUnit then
for i=0, PlayerResource:GetPlayerCount() do
if PlayerResource:IsValidPlayerID(i) then
lastwin_ent = PlayerResource:GetSelectedHeroEntity(i)
if lastwin_ent:IsAlive() then
lastwinnerteam = PlayerResource:GetSelectedHeroEntity(i):GetTeamNumber()
break
end
end
end
lastwinnerteam_msg = ""
if lastwinnerteam == 2 then
lastwinnerteam_msg = "1"
elseif lastwinnerteam == 3 then
lastwinnerteam_msg = "2"
elseif lastwinnerteam == 6 then
lastwinnerteam_msg = "3"
elseif lastwinnerteam == 7 then
lastwinnerteam_msg = "4"
elseif lastwinnerteam == 8 then
lastwinnerteam_msg = "5"
elseif lastwinnerteam == 9 then
lastwinnerteam_msg = "6"
elseif lastwinnerteam == 10 then
lastwinnerteam_msg = "7"
elseif lastwinnerteam == 11 then
lastwinnerteam_msg = "8"
elseif lastwinnerteam == 12 then
lastwinnerteam_msg = "9"
elseif lastwinnerteam == 13 then
lastwinnerteam_msg = "10"
end
GameRules:SetCustomVictoryMessage("Team "..tostring(lastwinnerteam_msg).." won the game!")
GameRules:SetGameWinner(lastwinnerteam)
else
lastwinnerteam = killer:GetTeam()
lastwinnerteam_msg = ""
if lastwinnerteam == 2 then
lastwinnerteam_msg = "1"
elseif lastwinnerteam == 3 then
lastwinnerteam_msg = "2"
elseif lastwinnerteam == 6 then
lastwinnerteam_msg = "3"
elseif lastwinnerteam == 7 then
lastwinnerteam_msg = "4"
elseif lastwinnerteam == 8 then
lastwinnerteam_msg = "5"
elseif lastwinnerteam == 9 then
lastwinnerteam_msg = "6"
elseif lastwinnerteam == 10 then
lastwinnerteam_msg = "7"
elseif lastwinnerteam == 11 then
lastwinnerteam_msg = "8"
elseif lastwinnerteam == 12 then
lastwinnerteam_msg = "9"
elseif lastwinnerteam == 13 then
lastwinnerteam_msg = "10"
end
GameRules:SetCustomVictoryMessage("Team "..tostring(lastwinnerteam_msg).." won the game!")
GameRules:SetGameWinner(lastwinnerteam)
end
elseif playercount == 0 then
GameRules:SetCustomVictoryMessage("Team "..tostring(killedUnit:GetTeam()).." won the game!")
GameRules:SetGameWinner(PlayerResource:GetTeam(killedUnit:GetPlayerID()))
end
end
end
end
function GameMode:OnNPCSpawned( keys )
NPCName = EntIndexToHScript(keys.entindex):GetName()
if EntIndexToHScript(keys.entindex):GetUnitName() == "npc_dota_circle1" then
circle1 = EntIndexToHScript(keys.entindex)
end
if EntIndexToHScript(keys.entindex):GetUnitName() == "npc_dota_circle2" then
circle2 = EntIndexToHScript(keys.entindex)
end
if EntIndexToHScript(keys.entindex):GetUnitName() == "npc_dota_circle3" then
circle3 = EntIndexToHScript(keys.entindex)
end
if EntIndexToHScript(keys.entindex):GetUnitName() == "npc_dota_circle4" then
circle4 = EntIndexToHScript(keys.entindex)
end
if EntIndexToHScript(keys.entindex):GetUnitName() == "npc_dota_circle5" then
circle5 = EntIndexToHScript(keys.entindex)
end
if EntIndexToHScript(keys.entindex):GetUnitName() == "npc_dota_circle6" then
circle6 = EntIndexToHScript(keys.entindex)
end
if EntIndexToHScript(keys.entindex):GetUnitName() == "npc_dota_circle7" then
circle7 = EntIndexToHScript(keys.entindex)
end
if EntIndexToHScript(keys.entindex):GetUnitName() == "npc_dota_circle8" then
circle8 = EntIndexToHScript(keys.entindex)
end
if EntIndexToHScript(keys.entindex):GetUnitName() == "npc_dota_circle1_white" then
circle1_white = EntIndexToHScript(keys.entindex)
end
if EntIndexToHScript(keys.entindex):GetUnitName() == "npc_dota_circle2_white" then
circle2_white = EntIndexToHScript(keys.entindex)
end
if EntIndexToHScript(keys.entindex):GetUnitName() == "npc_dota_circle3_white" then
circle3_white = EntIndexToHScript(keys.entindex)
end
if EntIndexToHScript(keys.entindex):GetUnitName() == "npc_dota_circle4_white" then
circle4_white = EntIndexToHScript(keys.entindex)
end
if EntIndexToHScript(keys.entindex):GetUnitName() == "npc_dota_circle5_white" then
circle5_white = EntIndexToHScript(keys.entindex)
end
if EntIndexToHScript(keys.entindex):GetUnitName() == "npc_dota_circle6_white" then
circle6_white = EntIndexToHScript(keys.entindex)
end
if EntIndexToHScript(keys.entindex):GetUnitName() == "npc_dota_circle7_white" then
circle7_white = EntIndexToHScript(keys.entindex)
end
if EntIndexToHScript(keys.entindex):GetUnitName() == "npc_dota_circle8_white" then
circle8_white = EntIndexToHScript(keys.entindex)
end
if EntIndexToHScript(keys.entindex):GetUnitName() == "npc_dota_circle_red" then
circle_red = EntIndexToHScript(keys.entindex)
end
print("A NPC spawned! Name is: " .. NPCName .. " Name of unit is: " .. EntIndexToHScript(keys.entindex):GetUnitName())
end
function RollDrops(unit)
local DropInfo = GameRules.DropTable[unit:GetUnitName()]
if DropInfo then
print("Rolling Drops for "..unit:GetUnitName())
for k,ItemTable in pairs(DropInfo) do
-- If its an ItemSet entry, decide which item to drop
local item_name
if ItemTable.ItemSets then
-- Count how many there are to choose from
local count = 0
for i,v in pairs(ItemTable.ItemSets) do
count = count+1
end
local random_i = RandomInt(1,count)
item_name = ItemTable.ItemSets[tostring(random_i)]
else
item_name = ItemTable.Item
end
local chance = ItemTable.Chance or 100
local max_drops = ItemTable.Multiple or 1
for i=1,max_drops do
print("Rolling chance "..chance)
if RollPercentage(chance) then
print("Creating "..item_name)
local item = CreateItem(item_name, nil, nil)
item:SetPurchaseTime(0)
local pos = unit:GetAbsOrigin()
local drop = CreateItemOnPositionSync( pos, item )
local pos_launch = pos+RandomVector(RandomFloat(150,200))
item:LaunchLoot(false, 200, 1, pos_launch)
end
end
end
end
end
function GameMode:OnGameRulesStateChange(keys)
local newState = GameRules:State_Get()
print("New Game State entered: "..newState)
--strategy time
if newState == 3 then
if randomMap == true then
for i = 0, DOTA_MAX_TEAM_PLAYERS do
if PlayerResource:IsValidPlayer(i) then
local player = PlayerResource:GetPlayer(i)
player:MakeRandomHeroSelection()
player = 0
end
end
end
end
if newState == 4 then
for i = 0, DOTA_MAX_TEAM_PLAYERS do
if PlayerResource:IsValidPlayer(i) then
if PlayerResource:HasSelectedHero(i) == false then
local player = PlayerResource:GetPlayer(i)
player:MakeRandomHeroSelection()
end
end
end
elseif newState == 6 then
spawnNeutrals()
spawnLoot()
spawnCreepCamp()
Timers:CreateTimer(function()
allheros = HeroList:GetAllHeroes()
for _,v in pairs(allheros) do
if v:IsHero() then
xp_for_hero = v
xp_for_hero:AddExperience(100, 0, false, false)
end
end
return 30
end)
Timers:CreateTimer({
endTime = 2,
callback = function()
if Entities:FindByName(nil, "npc_dota_hero_queenofpain") ~= nil then
print("QoP is in game!")
local qop = Entities:FindByName( nil, "npc_dota_hero_queenofpain")
local qop_blink = qop:GetAbilityByIndex(1)
print("deactivating: " .. qop_blink:GetAbilityName())
qop_blink:SetActivated(false)
print("success!")
end
if Entities:FindByName(nil, "npc_dota_hero_antimage") ~= nil then
print("Antimage is in game!")
local anti = Entities:FindByName( nil, "npc_dota_hero_antimage")
local anti_blink = anti:GetAbilityByIndex(1)
print("deactivating: " .. anti_blink:GetAbilityName())
anti_blink:SetActivated(false)
print("success!")
end
if Entities:FindByName(nil, "npc_dota_hero_slark") ~= nil then
print("Slark is in game!")
local slark = Entities:FindByName( nil, "npc_dota_hero_slark")
local slark_jump = slark:GetAbilityByIndex(1)
print("deactivating: " .. slark_jump:GetAbilityName())
slark_jump:SetActivated(false)
print("success!")
end
end})
elseif newState == 8 then
CustomGameEventManager:Send_ServerToAllClients("display_timer", {msg="Time until circle will arise:", duration=180, mode=0, endfade=false, position=2, warning=15, paused=false, sound=false})
if Entities:FindByName(nil, "npc_dota_hero_queenofpain") ~= nil then
print("QoP is in game!")
local qop = Entities:FindByName( nil, "npc_dota_hero_queenofpain")
local qop_blink = qop:GetAbilityByIndex(1)
print("activating: " .. qop_blink:GetAbilityName())
qop_blink:SetActivated(true)
print("success!")
end
if Entities:FindByName(nil, "npc_dota_hero_antimage") ~= nil then
print("Antimage is in game!")
local anti = Entities:FindByName( nil, "npc_dota_hero_antimage")
local anti_blink = anti:GetAbilityByIndex(1)
print("activating: " .. anti_blink:GetAbilityName())
anti_blink:SetActivated(true)
print("success!")
end
if Entities:FindByName(nil, "npc_dota_hero_slark") ~= nil then
print("Slark is in game!")
local slark = Entities:FindByName( nil, "npc_dota_hero_slark")
local slark_jump = slark:GetAbilityByIndex(1)
print("activating: " .. slark_jump:GetAbilityName())
slark_jump:SetActivated(true)
print("success!")
end
searchRad = 23000
circlePoint = Vector(0,0,0)
circlePointSearch = Vector(0,0,0)
pro = 0.8
CreateUnitByName("npc_dota_circle1_white", circlePoint, false, nil, nil, DOTA_TEAM_NOTEAM)
Timers:CreateTimer(180,function()
print("Damage algorithym running for "..tostring(HeroList:GetHeroCount()).." Players!")
for i=0,HeroList:GetHeroCount() do
print("Iteration "..i.."!")
if PlayerResource:GetSelectedHeroEntity(i) ~= nil then
print("searching in "..searchRad.."!")
hero = nil
hero = Entities:FindByNameWithin(nil,PlayerResource:GetSelectedHeroEntity(i):GetUnitName(),circlePointSearch,searchRad)
print("hero = "..tostring(hero))
if hero ~= nil then
print("Hero "..hero:GetName().." is in the circle!")
damageHero = nil
else
print("Hero is not in the circle!")
damageHero = PlayerResource:GetSelectedHeroEntity(i)
end
print("damageHero = "..tostring(damageHero))
damage = nil
if damageHero ~= nil and damageHero ~= "" then
local damage = (damageHero:GetHealth()-((damageHero:GetMaxHealth()/100)*pro))
if damageHero:GetHealth() > (damageHero:GetMaxHealth()/100)*pro then
print("DAMAGE! (set health to:) "..damage.." %="..pro)
damageHero:SetHealth(damage)
else
damageHero:Kill(nil,damageHero)
end
end
end
end
return 1
end)
Timers:CreateTimer(230,function()
print("Running Red-Circle-Killing-Procession!")
if red_active then
for i=0,HeroList:GetHeroCount() do
if PlayerResource:GetSelectedHeroEntity(i) ~= nil then
red_hero = nil
red_hero = Entities:FindByNameWithin(nil,PlayerResource:GetSelectedHeroEntity(i):GetUnitName(),circle_red_point,6500)
if red_hero ~= nil then
red_damage_hero = PlayerResource:GetSelectedHeroEntity(i)
else
red_damage_hero = nil
end
if red_damage_hero ~= nil and red_damage_hero ~= "" then
red_damage_hero:Kill(nil,red_damage_hero)
end
end
end
end
return 10
end)
Timers:CreateTimer({--Circle 1
endTime = 180,
callback = function()
print("Circle 1 spawned!")
circle1_white:RemoveSelf()
circlePoint2 = Vector((RandomInt(-7000, 7000)), (RandomInt(-7000, 7000)), 0)
CreateUnitByName("npc_dota_circle1", circlePoint, false, nil, nil, DOTA_TEAM_NOTEAM)
CreateUnitByName("npc_dota_circle2_white", circlePoint2, false, nil, nil, DOTA_TEAM_NOTEAM)
CustomGameEventManager:Send_ServerToAllClients("display_timer", {msg="Time until circle decreases:", duration=180, mode=0, endfade=false, position=2, warning=15, paused=false, sound=false})
end})
Timers:CreateTimer({ --1st red circle
endTime = 210,
callback = function()
print("1st Red Circle spawned!")
circle_red_point = Vector(RandomInt(-8000,8000),RandomInt(-8000,8000),0)
CreateUnitByName("npc_dota_circle_red", circle_red_point, false, nil, nil, DOTA_TEAM_NOTEAM)
red_active = true
end})
Timers:CreateTimer({ --remove 1st red circle
endTime = 330,
callback = function()
print("Removing 1st red circle!")
circle_red:RemoveSelf()
red_active = false
end})
Timers:CreateTimer({--Circle 2
endTime = 360,
callback = function()
print("Circle 2 spawned!")
searchRad = 12000
pro = 1.2
circle1:RemoveSelf()
circle2_white:RemoveSelf()
circlePoint3 = Vector((RandomInt(circlePoint2.x - 3750,circlePoint2.x + 3750)),(RandomInt(circlePoint2.y - 3750,circlePoint2.y + 3750)),0)
circlePointSearch = circlePoint2
CreateUnitByName("npc_dota_circle2", circlePoint2, false, nil, nil, DOTA_TEAM_NOTEAM)
CreateUnitByName("npc_dota_circle3_white", circlePoint3, false, nil, nil, DOTA_TEAM_NOTEAM)
CustomGameEventManager:Send_ServerToAllClients("display_timer", {msg="Time until circle decreases:", duration=150, mode=0, endfade=false, position=2, warning=15, paused=false, sound=false})
end})
Timers:CreateTimer({--Circle 3
endTime = 510,
callback = function()
print("Circle 3 spawned!")
searchRad = 6500
pro = 1.6
circle2:RemoveSelf()
circle3_white:RemoveSelf()
circlePoint4 = Vector((RandomInt(circlePoint3.x - 1700,circlePoint3.x + 1700)),(RandomInt(circlePoint3.y - 1700,circlePoint3.y + 1700)),0)
circlePointSearch = circlePoint3
CreateUnitByName("npc_dota_circle3", circlePoint3, false, nil, nil, DOTA_TEAM_NOTEAM)
CreateUnitByName("npc_dota_circle4_white", circlePoint4, false, nil, nil, DOTA_TEAM_NOTEAM)
CustomGameEventManager:Send_ServerToAllClients("display_timer", {msg="Time until circle decreases:", duration=120, mode=0, endfade=false, position=2, warning=15, paused=false, sound=false})
end})
Timers:CreateTimer({ --2nd red circle
endTime = 440,
callback = function()
print("2nd Red Circle spawned!")
circle_red_point = Vector(RandomInt(-8000,8000),RandomInt(-8000,8000),0)
CreateUnitByName("npc_dota_circle_red", circle_red_point, false, nil, nil, DOTA_TEAM_NOTEAM)
Timers:CreateTimer({
endTime = 20,
callback = function()
red_active = true
end})
end})
Timers:CreateTimer({ --remove 2nd red circle
endTime = 500,
callback = function()
print("Removing 2nd red circle!")
circle_red:RemoveSelf()
red_active = false
end})
Timers:CreateTimer({--Circle 4
endTime = 630,
callback = function()
print("Circle 4 spawned!")
searchRad = 4000
pro = 2
circle3:RemoveSelf()
circle4_white:RemoveSelf()
circlePoint5 = Vector((RandomInt(circlePoint4.x - 350,circlePoint.x + 350)),(RandomInt(circlePoint4.y - 350,circlePoint4.y + 350)),0)
circlePointSearch = circlePoint4
CreateUnitByName("npc_dota_circle4", circlePoint4, false, nil, nil, DOTA_TEAM_NOTEAM)
CreateUnitByName("npc_dota_circle5_white", circlePoint5, false, nil, nil, DOTA_TEAM_NOTEAM)
CustomGameEventManager:Send_ServerToAllClients("display_timer", {msg="Time until circle decreases:", duration=120, mode=0, endfade=false, position=2, warning=15, paused=false, sound=false})
end})
Timers:CreateTimer({--Circle 5
endTime = 750,
callback = function()
print("Circle 5 spawned!")
searchRad = 2000
pro = 6
circle4:RemoveSelf()
circle5_white:RemoveSelf()
circlePoint6 = Vector((RandomInt(circlePoint5.x - 100,circlePoint5.x + 100)),(RandomInt(circlePoint5.y - 100,circlePoint5.y + 100)),0)
circlePointSearch = circlePoint5
CreateUnitByName("npc_dota_circle5", circlePoint5, false, nil, nil, DOTA_TEAM_NOTEAM)
CreateUnitByName("npc_dota_circle6_white", circlePoint6, false, nil, nil, DOTA_TEAM_NOTEAM)
CustomGameEventManager:Send_ServerToAllClients("display_timer", {msg="Time until circle decreases:", duration=90, mode=0, endfade=false, position=2, warning=15, paused=false, sound=false})
end})
Timers:CreateTimer({--Circle 6
endTime = 840,
callback = function()
print("Circle 6 spawned!")
searchRad = 1000
pro = 10
circle5:RemoveSelf()
circle6_white:RemoveSelf()
circlePoint7 = Vector((RandomInt(circlePoint6.x - 50,circlePoint6.x + 50)),(RandomInt(circlePoint6.y - 50,circlePoint6.y + 50)),0)
circlePointSearch = circlePoint6
CreateUnitByName("npc_dota_circle6", circlePoint6, false, nil, nil, DOTA_TEAM_NOTEAM)
CreateUnitByName("npc_dota_circle7_white", circlePoint7, false, nil, nil, DOTA_TEAM_NOTEAM)
CustomGameEventManager:Send_ServerToAllClients("display_timer", {msg="Time until circle decreases:", duration=90, mode=0, endfade=false, position=2, warning=15, paused=false, sound=false})
end})
Timers:CreateTimer({--Circle 7
endTime = 930,
callback = function()
print("Circle 7 spawned!")
searchRad = 500
pro = 14
circle6:RemoveSelf()
circle7_white:RemoveSelf()
circlePoint8 = Vector((RandomInt(circlePoint7.x - 10,circlePoint7.x + 10)),(RandomInt(circlePoint7.y - 10,circlePoint7.y + 10)),0)
circlePointSearch = circlePoint7
CreateUnitByName("npc_dota_circle7", circlePoint7, false, nil, nil, DOTA_TEAM_NOTEAM)
CreateUnitByName("npc_dota_circle8_white", circlePoint8, false, nil, nil, DOTA_TEAM_NOTEAM)
CustomGameEventManager:Send_ServerToAllClients("display_timer", {msg="Time until circle decreases:", duration=60, mode=0, endfade=false, position=2, warning=15, paused=false, sound=false})
end})
Timers:CreateTimer({--Circle 8
endTime = 990,
callback = function()
print("Circle 8 spawned!")
searchRad = 250
pro = 22
circle7:RemoveSelf()
circle8_white:RemoveSelf()
circlePointSearch = circlePoint8
CreateUnitByName("npc_dota_circle8", circlePoint8, false, nil, nil, DOTA_TEAM_NOTEAM)
CustomGameEventManager:Send_ServerToAllClients("display_timer", {msg="Time until circle closes down:", duration=195, mode=0, endfade=false, position=2, warning=15, paused=false, sound=false})
end})
Timers:CreateTimer({--Circle 8
endTime = 1170,
callback = function()
Timers:CreateTimer({
endTime = 15,
callback = function()
circle8:RemoveSelf()
searchRad = 0
pro = 75
end})
end})
end
end
function DebugPrintTable(...)
PrintTable(...)
end
function PrintTable(t, indent, done)
if type(t) ~= "table" then return end
done = done or {}
done[t] = true
indent = indent or 0
local l = {}
for k, v in pairs(t) do
table.insert(l, k)
end
table.sort(l)
for k, v in ipairs(l) do
-- Ignore FDesc
if v ~= 'FDesc' then
local value = t[v]
if type(value) == "table" and not done[value] then
done [value] = true
print("??????1" .. string.rep ("\t", indent)..tostring(v)..":")
PrintTable (value, indent + 2, done)
elseif type(value) == "userdata" and not done[value] then
done [value] = true
print(string.rep ("\t", indent)..tostring(v)..": "..tostring(value))
PrintTable ((getmetatable(value) and getmetatable(value).__index) or getmetatable(value), indent + 2, done)
else
if t.FDesc and t.FDesc[v] then
print(string.rep ("\t", indent)..tostring(t.FDesc[v]))
else
print("??????2" .. string.rep ("\t", indent)..tostring(v)..": "..tostring(value))
end
end
end
end
end
function spawnNeutrals()
local neutrals_easy = {
[1] = "npc_dota_neutral_kobold",
[2] = "npc_dota_neutral_kobold_tunneler",
[3] = "npc_dota_neutral_forest_troll_berserker",
[4] = "npc_dota_neutral_forest_troll_high_priest",
[5] = "npc_dota_neutral_kobold_taskmaster",
[6] = "npc_dota_neutral_gnoll_assassin",
[7] = "npc_dota_neutral_ghost",
[8] = "npc_dota_neutral_fel_beast",
[9] = "npc_dota_neutral_harpy_scout",
[10] = "npc_dota_neutral_harpy_storm"
}
local neutrals_medium = {
[1] = "npc_dota_neutral_centaur_khan",
[2] = "npc_dota_neutral_centaur_outrunner",
[3] = "npc_dota_neutral_alpha_wolf",
[4] = "npc_dota_neutral_giant_wolf",
[5] = "npc_dota_neutral_satyr_trickster",
[6] = "npc_dota_neutral_satyr_soulstealer",
[7] = "npc_dota_neutral_ogre_mauler",
[8] = "npc_dota_neutral_ogre_magi",
[9] = "npc_dota_neutral_mud_golem"
}
local neutrals_large = {
[1] = "npc_dota_neutral_satyr_hellcaller",
[2] = "npc_dota_neutral_polar_furbolg_champion",
[3] = "npc_dota_neutral_polar_furbolg_ursa_warrior",
[4] = "npc_dota_neutral_wildkin",
[5] = "npc_dota_neutral_enraged_wildkin",
[6] = "npc_dota_neutral_dark_troll",
[7] = "npc_dota_neutral_dark_troll_warlord"
}
local neutrals_ancient = {
[1] = "npc_dota_neutral_black_drake",
[2] = "npc_dota_neutral_black_dragon",
[3] = "npc_dota_neutral_rock_golem",
[4] = "npc_dota_neutral_granite_golem",
[5] = "npc_dota_neutral_big_thunder_lizard",
[6] = "npc_dota_neutral_small_thunder_lizard",
[7] = "npc_dota_neutral_prowler_acolyte",
[8] = "npc_dota_neutral_prowler_shaman"
}
--Military
point = Entities:FindByName( nil, "spawner_military1"):GetAbsOrigin()
CreateUnitByName(neutrals_medium[RandomInt(1,9)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_medium[RandomInt(1,9)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_medium[RandomInt(1,9)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
point = Entities:FindByName( nil, "spawner_military2"):GetAbsOrigin()
CreateUnitByName(neutrals_medium[RandomInt(1,9)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_medium[RandomInt(1,9)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_medium[RandomInt(1,9)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
point = Entities:FindByName( nil, "spawner_military3"):GetAbsOrigin()
CreateUnitByName(neutrals_medium[RandomInt(1,9)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_medium[RandomInt(1,9)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_medium[RandomInt(1,9)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
point = Entities:FindByName( nil, "spawner_military4"):GetAbsOrigin()
CreateUnitByName(neutrals_medium[RandomInt(1,9)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_medium[RandomInt(1,9)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_medium[RandomInt(1,9)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
point = Entities:FindByName( nil, "spawner_military5"):GetAbsOrigin()
CreateUnitByName(neutrals_easy[RandomInt(1,10)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_easy[RandomInt(1,10)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_easy[RandomInt(1,10)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
point = Entities:FindByName( nil, "spawner_military6"):GetAbsOrigin()
CreateUnitByName(neutrals_easy[RandomInt(1,10)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_easy[RandomInt(1,10)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_easy[RandomInt(1,10)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
--RoundHill
point = Entities:FindByName( nil, "spawner_roundhill1"):GetAbsOrigin()
CreateUnitByName(neutrals_easy[RandomInt(1,10)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_easy[RandomInt(1,10)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_easy[RandomInt(1,10)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
point = Entities:FindByName( nil, "spawner_roundhill2"):GetAbsOrigin()
CreateUnitByName(neutrals_medium[RandomInt(1,9)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_medium[RandomInt(1,9)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_medium[RandomInt(1,9)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
--bighill
point = Entities:FindByName( nil, "spawner_bighill1"):GetAbsOrigin()
CreateUnitByName(neutrals_medium[RandomInt(1,9)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_medium[RandomInt(1,9)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_medium[RandomInt(1,9)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
point = Entities:FindByName( nil, "spawner_bighill2"):GetAbsOrigin()
CreateUnitByName(neutrals_large[RandomInt(1,7)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_large[RandomInt(1,7)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_large[RandomInt(1,7)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
--nohill
point = Entities:FindByName( nil, "spawner_nohill1"):GetAbsOrigin()
CreateUnitByName(neutrals_large[RandomInt(1,7)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_large[RandomInt(1,7)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_large[RandomInt(1,7)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
point = Entities:FindByName( nil, "spawner_nohill2"):GetAbsOrigin()
CreateUnitByName(neutrals_large[RandomInt(1,7)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_large[RandomInt(1,7)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_large[RandomInt(1,7)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
--rosh
point = Entities:FindByName( nil, "spawner_rosh1"):GetAbsOrigin()
CreateUnitByName(neutrals_ancient[RandomInt(1,8)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_ancient[RandomInt(1,8)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_ancient[RandomInt(1,8)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
point = Entities:FindByName( nil, "spawner_rosh2"):GetAbsOrigin()
CreateUnitByName(neutrals_ancient[RandomInt(1,8)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_ancient[RandomInt(1,8)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_ancient[RandomInt(1,8)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
point = Entities:FindByName( nil, "spawner_rosh3"):GetAbsOrigin()
CreateUnitByName(neutrals_ancient[RandomInt(1,8)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_ancient[RandomInt(1,8)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_ancient[RandomInt(1,8)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
--woods
point = Entities:FindByName( nil, "spawner_woods1"):GetAbsOrigin()
CreateUnitByName(neutrals_medium[RandomInt(1,9)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_medium[RandomInt(1,9)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_medium[RandomInt(1,9)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
point = Entities:FindByName( nil, "spawner_woods2"):GetAbsOrigin()
CreateUnitByName(neutrals_medium[RandomInt(1,9)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_medium[RandomInt(1,9)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_medium[RandomInt(1,9)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
point = Entities:FindByName( nil, "spawner_woods3"):GetAbsOrigin()
CreateUnitByName(neutrals_medium[RandomInt(1,9)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_medium[RandomInt(1,9)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_medium[RandomInt(1,9)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
point = Entities:FindByName( nil, "spawner_woods4"):GetAbsOrigin()
CreateUnitByName(neutrals_large[RandomInt(1,7)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_large[RandomInt(1,7)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_large[RandomInt(1,7)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
point = Entities:FindByName( nil, "spawner_woods5"):GetAbsOrigin()
CreateUnitByName(neutrals_large[RandomInt(1,7)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_large[RandomInt(1,7)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_large[RandomInt(1,7)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
point = Entities:FindByName( nil, "spawner_woods6"):GetAbsOrigin()
CreateUnitByName(neutrals_large[RandomInt(1,7)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_large[RandomInt(1,7)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_large[RandomInt(1,7)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
--triangle cross
point = Entities:FindByName( nil, "spawner_cross1"):GetAbsOrigin()
CreateUnitByName(neutrals_medium[RandomInt(1,9)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_medium[RandomInt(1,9)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_medium[RandomInt(1,9)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
point = Entities:FindByName( nil, "spawner_cross2"):GetAbsOrigin()
CreateUnitByName(neutrals_medium[RandomInt(1,9)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_medium[RandomInt(1,9)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_medium[RandomInt(1,9)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
--dead
point = Entities:FindByName( nil, "spawner_dead1"):GetAbsOrigin()
CreateUnitByName(neutrals_large[RandomInt(1,7)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_large[RandomInt(1,7)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_large[RandomInt(1,7)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
point = Entities:FindByName( nil, "spawner_dead2"):GetAbsOrigin()
CreateUnitByName(neutrals_large[RandomInt(1,7)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_large[RandomInt(1,7)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
CreateUnitByName(neutrals_large[RandomInt(1,7)], point, true, nil, nil, DOTA_TEAM_NEUTRALS)
end
function spawnLoot()
for i=1,50 do
loot_x = RandomInt(-14000, 14000)
loot_y = RandomInt(-14000, 14000)
point = Vector(loot_x, loot_y, 0)
CreateUnitByName("npc_lootbox", point, true, nil, nil, DOTA_TEAM_NEUTRALS)
end
for i=1,10 do
lootm_x = RandomInt(0, 10000)
lootm_y = RandomInt(-14500, -8500)
pointm = Vector(lootm_x, lootm_y, 0)
CreateUnitByName("npc_lootbox_m", pointm, true, nil, nil, DOTA_TEAM_NEUTRALS)
end
end
function spawnCreepCamp()
--military
CreateUnitByName("npc_dota_creepcamp", Vector(1500, -12700, 0), false, nil, nil, DOTA_TEAM_NOTEAM)
CreateUnitByName("npc_dota_creepcamp", Vector(8800, -13000, 0), false, nil, nil, DOTA_TEAM_NOTEAM)
CreateUnitByName("npc_dota_creepcamp", Vector(5000, -11500, 0), false, nil, nil, DOTA_TEAM_NOTEAM)
--rosh
CreateUnitByName("npc_dota_creepcamp", Vector(-6200, -3500, 0), false, nil, nil, DOTA_TEAM_NOTEAM)
--roundhill
CreateUnitByName("npc_dota_creepcamp", Vector(11700, -1600, 0), false, nil, nil, DOTA_TEAM_NOTEAM)
--bighill
CreateUnitByName("npc_dota_creepcamp", Vector(11100, 14000, 0), false, nil, nil, DOTA_TEAM_NOTEAM)
--nohill
CreateUnitByName("npc_dota_creepcamp", Vector(-13000, 12000,0), false, nil, nil, DOTA_TEAM_NOTEAM)
--dead
CreateUnitByName("npc_dota_creepcamp", Vector(-12000,-10000,0), false, nil, nil, DOTA_TEAM_NOTEAM)
CreateUnitByName("npc_dota_creepcamp", Vector(-6000, -11000,0), false, nil, nil, DOTA_TEAM_NOTEAM)
--cross
CreateUnitByName("npc_dota_creepcamp", Vector( 3000, 9000, 0), false, nil, nil, DOTA_TEAM_NOTEAM)
--woods
CreateUnitByName("npc_dota_creepcamp", Vector( 4500, -1000, 0), false, nil, nil, DOTA_TEAM_NOTEAM)
end
function CompareTables(t1,t2,orderby,n1,n2,fmt1,fmt2,comparef,CBdupli,CBonly1,CBonly2)
local t1 = t1 or nil
local t2 = t2 or nil
if t1[1]==nil then print("The first table is empty or not index based (t1[1]==nil)") return nil, 0 end
if t2[1]==nil then print("The second table is empty or not index based (t2[1]==nil)") return nil, 0 end
local comparisonTabLog = {}
local counter = 0
local fieldlist = {}
local fieldlistlast = orderby:gsub("([^,]*)[,]", function(s) table.insert(fieldlist,s) return "" end )
table.insert(fieldlist,fieldlistlast)
for i,v in ipairs(fieldlist) do
fieldlist[i]={}
if i==1 then
v = v:gsub("^ORDER BY ",""):gsub("^order by ","")
end
fieldlist[i].name = v:gsub("^%s+",""):gsub("%s+$","")
local _,c = fieldlist[i].name:gsub("%w+","")
if c>1 then
fieldlist[i].name = fieldlist[i].name:gsub("%s.*$","")
fieldlist[i].desc = true
end
end
local function alias(nn,field)
if #nn==0 then -- n1=={}
return field else return nn[field]
end
end
local fmt1 = fmt1 or function(s) return s end
local fmt2 = fmt2 or function(s) return s end
local t1x = {}
local t2x = {}
for i,v in ipairs(t1) do
t1x[i] = {}
t1x[i]._i = i
for j,field in ipairs(fieldlist) do
t1x[i][field.name] = fmt1( v[ alias(n1,field.name) ] )
end
end
for i,v in ipairs(t2) do
t2x[i] = {}
t2x[i]._i=i
for j,field in ipairs(fieldlist) do
t2x[i][field.name] = fmt2( v[ alias(n2,field.name) ] )
end
end
local sf = function (a,b)
for i,v in ipairs(fieldlist) do
if a[v.name] ~= b[v.name] then
if v.desc then
return a[v.name] > b[v.name]
else
return a[v.name] < b[v.name]
end
end
end
return a._i < b._i
end
table.sort(t1x, sf)
table.sort(t2x, sf)
local i2 = 1
local v1f_previous = ""
local found = false
local cfc = comparef or function(a,b) return a==b end
for i1,v1 in ipairs(t1x) do
local v1f = v1[fieldlist[1].name]
if i1>1 and v1f_previous == v1f and comparisonTabLog[i1-1]~=nil then
i2 = comparisonTabLog[i1-1][1]
end
found = false
while t2x[i2] do
counter = counter +1
local v2 = t2x[i2]
local v2f = t2x[i2][fieldlist[1].name]
if cfc(v1f,v2f) then
found = true
CBdupli(v1._i,v2._i)
if not comparisonTabLog[i1] then
comparisonTabLog[i1] = {}
end
table.insert(comparisonTabLog[i1], i2)
i2 = i2+1
elseif v2f > v1f then
break
elseif v2f < v1f then
CBonly2(v2._i)
i2 = i2+1
end
end
if not found then
CBonly1(v1._i)
end
v1f_previous = v1f
end
if CBonly2~=nil then
while t2x[i2] do
CBonly2(t2x[i2]._i)
i2 = i2+1
end
end
return comparisonTabLog , counter
end<file_sep>if item_fallschirm == nil then
item_fallschirm = class({})
end
function item_fallschirm:GetBehavior()
local behav = DOTA_ABILITY_BEHAVIOR_POINT
return behav
end
function item_fallschirm:OnSpellStart()
local hCaster = self:GetCaster()
local hTarget = false
if not self:GetCursorTargetingNothing() then
hTarget = self:GetCursorTarget()
end
local vPoint = self:GetCursorPosition()
local vOrigin = hCaster:GetAbsOrigin()
local nMaxBlink = 2147483600
ProjectileManager:ProjectileDodge(hCaster)
ParticleManager:CreateParticle("particles/items_fx/blink_dagger_start.vpcf", PATTACH_ABSORIGIN, hCaster)
hCaster:EmitSound("DOTA_Item.BlinkDagger.Activate")
hCaster:SetAbsOrigin(vPoint)
FindClearSpaceForUnit(hCaster, vPoint, false)
local notstuck = self:GetCaster():GetAbsOrigin()
FindClearSpaceForUnit(self:GetCaster(), notstuck, false)
ParticleManager:CreateParticle("particles/items_fx/blink_dagger_end.vpcf", PATTACH_ABSORIGIN, hCaster)
self:SetCurrentCharges(0)
hero = self:GetCaster()
for i=0,5,1 do
local fallschirm = hero:GetItemInSlot(i)
hero:RemoveItem(fallschirm)
end
end<file_sep>require("GameRules")
require( 'timers' )
function Precache( context )
PrecacheUnitByNameSync("npc_lootbox", context)
PrecacheUnitByNameSync("npc_lootbox_m", context)
PrecacheUnitByNameSync("npc_dota_creepcamp", context)
PrecacheUnitByNameSync("npc_dota_circle1", context)
PrecacheUnitByNameSync("npc_dota_circle2", context)
PrecacheUnitByNameSync("npc_dota_circle3", context)
PrecacheUnitByNameSync("npc_dota_circle4", context)
PrecacheUnitByNameSync("npc_dota_circle5", context)
PrecacheUnitByNameSync("npc_dota_circle6", context)
PrecacheUnitByNameSync("npc_dota_circle7", context)
PrecacheUnitByNameSync("npc_dota_circle8", context)
PrecacheUnitByNameSync("npc_dota_circle1_white", context)
PrecacheUnitByNameSync("npc_dota_circle2_white", context)
PrecacheUnitByNameSync("npc_dota_circle3_white", context)
PrecacheUnitByNameSync("npc_dota_circle4_white", context)
PrecacheUnitByNameSync("npc_dota_circle5_white", context)
PrecacheUnitByNameSync("npc_dota_circle6_white", context)
PrecacheUnitByNameSync("npc_dota_circle7_white", context)
PrecacheUnitByNameSync("npc_dota_circle8_white", context)
end
-- Create the game mode when we activate
function Activate()
GameRules.GameMode = GameMode()
GameRules.GameMode:InitGameMode()
end<file_sep># DotA Royal
Welcome to the GitHub for the DotA 2 mod "DotA Royal"!
To install this mod: Start DotA 2, go to "custom games" and search for "DotA Royal" **or** visit this [Workshop-page](http://steamcommunity.com/sharedfiles/filedetails/?id=1313436042) and subscribe to the mod.
| 150b4ff5e178078145b6bb40cc418062f161ed3b | [
"Markdown",
"Lua"
] | 4 | Lua | chillkroetenstyle/dota_royal | 949acc071948c30aedd27bdb6a60f7ccaaa000d6 | 34ff9b478e36514eae98667669e593e7abdc10d3 |
refs/heads/master | <file_sep>//
// ViewController.swift
// Huizhong
//
// Created by 阮冬雪 on 2019/5/3.
// Copyright © 2019年 lihuizhong. All rights reserved.
//
import UIKit
class ViewController: UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
let a = 1 + 1
print(a)
for x in 0...10 {
print("\(x)",terminator:"")
}
}
}
| 37a9e1b4c302b75ebe1f2dcaa150b556a225feca | [
"Swift"
] | 1 | Swift | 824482910/lhzdemo | 827c74a965fd7f9e9206a4f1a8f3125846258977 | 8e9c1aced3f492fb2882ee0322ea08c610cede62 |
refs/heads/master | <repo_name>jesus003/indexestadistica<file_sep>/IndexEstadistica/Objetos/DBClass.cs
using System;
using System.Collections.Generic;
using System.Data;
using System.Data.SqlClient;
using System.Diagnostics;
using System.Linq;
using System.Web;
namespace IndexEstadistica.Objetos
{
public class BDClass
{
public SqlConnection conectar;
public SqlConnection conectarCheck;
String tbl = String.Empty;
String ipDisplay = String.Empty;
private string connStrng;
public BDClass()
{
ConexionOpen();
}
public string getConnStrng()
{
return connStrng;
}
#region Inicializa Conexion
public void ConexionOpen()
{
try
{
}
catch (Exception)
{
throw;
}
try
{//Data source=172.16.58.3\\REPCASETAS;Initial Catalog=Telepeaje;User Id=sa;Password=<PASSWORD>; MultipleActiveResultSets=True
string ConnString = "Data source=sql.softcame.net,2302;Initial Catalog=indexestadisticas;User Id=sistemas;Password=<PASSWORD>; MultipleActiveResultSets=True";
connStrng = ConnString;
conectar = new SqlConnection(ConnString);
conectarCheck = new SqlConnection(ConnString);
}
catch (SqlException ex)
{
EventLog.WriteEntry("BDClass", "CONEXION_OPEN" + ex.Message);
}
catch (Exception ex)
{
EventLog.WriteEntry("BDClass", "CONEXION_OPEN" + ex.Message);
}
}
#endregion
#region Abre Conexion
private bool AbreConexion()
{
try
{
if (conectar.State != System.Data.ConnectionState.Open)
{
ConexionOpen();
conectar.Open();
}
return true;
}
catch (SqlException ex)
{
//string metodo = "AbreConexion";
this.CerrarConexion();
conectar.Close();
EventLog.WriteEntry("BDClass", "ABRIR_CONEXION" + ex.Message);
return false;
}
catch (Exception ex)
{
EventLog.WriteEntry("BDClass", "ABRIR_CONEXION" + ex.Message);
return false;
}
}
private bool AbreConexionCheck()
{
try
{
if (conectarCheck.State != System.Data.ConnectionState.Open)
{
conectarCheck.Open();
}
return true;
}
catch (SqlException ex)
{
//string metodo = "AbreConexion";
this.CerrarConexion();
conectarCheck.Close();
EventLog.WriteEntry("BDClass", "ABRE_CONEXION_CHECK" + ex.Message);
return false;
}
catch (Exception ex)
{
EventLog.WriteEntry("BDClass", "ABRE_CONEXION_CHECK" + ex.Message);
return false;
}
}
#endregion
#region Cierra Conexion
private bool CerrarConexion()
{
try
{
if (conectar.State != System.Data.ConnectionState.Closed)
{
conectar.Close();
conectar.Dispose();
}
return true;
}
catch (SqlException ex)
{
//string metodo = "CerrarConexion";
string linea = ex.StackTrace.Substring(ex.StackTrace.Length - 8, 8);
EventLog.WriteEntry("BDClass", "CERRAR_CONEXION" + ex.Message);
return false;
}
catch (Exception ex)
{
EventLog.WriteEntry("BDClass", "CERRAR_CONEXION" + ex.Message);
return false;
}
}
#endregion
#region captura
public decimal INS_ANTIGUEDAD(ObjCaptura _cap)
{
decimal valid = 0;
if (this.AbreConexion())
{
try
{
DataTable table = new DataTable();
SqlCommand cmd = new SqlCommand("SP_INS_ANTIGUEDAD", conectar);
SqlDataReader read;
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.AddWithValue("@a1_menos3", _cap.a1_menos3);
cmd.Parameters.AddWithValue("@descripcion", _cap.descripcion);
cmd.Parameters.AddWithValue("@empresa", _cap.empresa);
cmd.Parameters.AddWithValue("@folio", _cap.folio);
cmd.Parameters.AddWithValue("@quiencaptura", _cap.quiencaptura);
cmd.Parameters.AddWithValue("@a1_1a2", _cap.a1_1a2);
cmd.Parameters.AddWithValue("@a1_2a3", _cap.a1_2a3);
cmd.Parameters.AddWithValue("@a1_3a4", _cap.a1_3a4);
cmd.Parameters.AddWithValue("@a1_4a6", _cap.a1_4a6);
cmd.Parameters.AddWithValue("@a1_7a12",_cap.a1_7a12);
cmd.Parameters.AddWithValue("@a1_mas4",_cap.a1_mas4);
read = cmd.ExecuteReader();
while (read.Read())
{
valid = read.GetDecimal(0);
}
read.Close();
CerrarConexion();
return valid;
}
catch (SqlException ex)
{
EventLog.WriteEntry("BDClass", "SP_CMBX_RECICLADORES" + ex.Message);
}
catch (Exception ex)
{
EventLog.WriteEntry("BDClass", "SP_CMBX_RECICLADORES" + ex.Message);
}
}
return 0;
}
public decimal INS_PRESTACIONES(ObjPrestaciones _cap)
{
decimal valid = 0;
if (this.AbreConexion())
{
try
{
DataTable table = new DataTable();
SqlCommand cmd = new SqlCommand("SP_INS_PRESTACIONES", conectar);
SqlDataReader read;
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.AddWithValue("@valor", _cap.p1);
cmd.Parameters.AddWithValue("@prestacion", _cap.pvalor);
cmd.Parameters.AddWithValue("@folio", _cap.folio);
Debug.WriteLine("valor:" + _cap.p1);
Debug.WriteLine("prestacion:" + _cap.pvalor);
Debug.WriteLine("folio:" + _cap.folio);
read = cmd.ExecuteReader();
while (read.Read())
{
valid = read.GetDecimal(0);
}
read.Close();
CerrarConexion();
return valid;
}
catch (SqlException ex)
{
EventLog.WriteEntry("BDClass", "SP_CMBX_RECICLADORES" + ex.Message);
}
catch (Exception ex)
{
EventLog.WriteEntry("BDClass", "SP_CMBX_RECICLADORES" + ex.Message);
}
}
return 0;
}
public int INS_BONOS(ObjBonos _cap)
{
int valid = 0;
if (this.AbreConexion())
{
try
{
DataTable table = new DataTable();
SqlCommand cmd = new SqlCommand("SP_INS_BONOS", conectar);
SqlDataReader read;
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.AddWithValue("@folio", _cap.folio);
cmd.Parameters.AddWithValue("@descripcion", _cap.a_Descripcion);
cmd.Parameters.AddWithValue("@importe", _cap.a_Importe);
cmd.Parameters.AddWithValue("@periodicidad", _cap.a_Periodicidad);
cmd.Parameters.AddWithValue("@incidencia", _cap.a_incidencia);
read = cmd.ExecuteReader();
return 1;
read.Close();
CerrarConexion();
return valid;
}
catch (SqlException ex)
{
EventLog.WriteEntry("BDClass", "SP_CMBX_RECICLADORES" + ex.Message);
}
catch (Exception ex)
{
EventLog.WriteEntry("BDClass", "SP_CMBX_RECICLADORES" + ex.Message);
}
}
return 0;
}
public string getNombre(string id)
{
string nombre = string.Empty;
if (this.AbreConexion())
{
try
{
SqlCommand cmd = new SqlCommand("SP_GET_NOMBRE", conectar);
SqlDataReader read;
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.AddWithValue("@id", id);
read = cmd.ExecuteReader();
while (read.Read()) {
nombre = read.GetString(0);
}
read.Close();
CerrarConexion();
return nombre;
}
catch (SqlException ex)
{
EventLog.WriteEntry("BDClass", "SP_GET_NOMBRE" + ex.Message);
}
catch (Exception ex)
{
EventLog.WriteEntry("BDClass", "SP_GET_NOMBRE" + ex.Message);
}
}
return null;
}
public Int64 getFolio()
{
Int64 folio = 0;
if (this.AbreConexion())
{
try
{
SqlCommand cmd = new SqlCommand("SP_GET_FOLIO", conectar);
SqlDataReader read;
cmd.CommandType = CommandType.StoredProcedure;
read = cmd.ExecuteReader();
while (read.Read())
{
folio = read.GetInt64(0);
}
read.Close();
CerrarConexion();
return folio;
}
catch (SqlException ex)
{
EventLog.WriteEntry("BDClass", "SP_GET_FOLIO" + ex.Message);
}
catch (Exception ex)
{
EventLog.WriteEntry("BDClass", "SP_GET_FOLIO" + ex.Message);
}
}
return 0;
}
public decimal INS_DONACIONDETALLE(string folio, string idMaterial, string boleta_pesaje, string pase_salida, string peso_pase, string peso_compra, string precio_unitario, string @unidad_medida, string total)
{
decimal valid = 0;
if (this.AbreConexion())
{
try
{
DataTable table = new DataTable();
SqlCommand cmd = new SqlCommand("SP_INS_DONACIONESDETALLE", conectar);
SqlDataReader read;
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.AddWithValue("@folio", folio);
cmd.Parameters.AddWithValue("@idMaterial", idMaterial);
cmd.Parameters.AddWithValue("@boleta_pesaje", boleta_pesaje);
cmd.Parameters.AddWithValue("@pase_salida", @pase_salida);
cmd.Parameters.AddWithValue("@peso_pase", peso_pase);
cmd.Parameters.AddWithValue("@peso_compra", peso_compra);
cmd.Parameters.AddWithValue("@precio_unitario", precio_unitario);
cmd.Parameters.AddWithValue("@unidad_medida", unidad_medida);
cmd.Parameters.AddWithValue("@total", total);
read = cmd.ExecuteReader();
while (read.Read())
{
valid = read.GetDecimal(0);
}
read.Close();
CerrarConexion();
return valid;
}
catch (SqlException ex)
{
EventLog.WriteEntry("BDClass", "SP_INS_DONACIONESDETALLE" + ex.Message);
}
catch (Exception ex)
{
EventLog.WriteEntry("BDClass", "SP_INS_DONACIONESDETALLE" + ex.Message);
}
}
return 0;
}
#endregion
}
}<file_sep>/IndexEstadistica/Objetos/ObjCaptura.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
namespace IndexEstadistica.Objetos
{
public class ObjCaptura
{
public string a1_menos3 { get; set; }
public string a1_4a6 { get; set; }
public string a1_7a12 { get; set; }
public string a1_1a2 { get; set; }
public string a1_2a3 { get; set; }
public string a1_3a4 { get; set; }
public string a1_mas4 { get; set; }
public string descripcion { get; set; }
public string quiencaptura { get; set; }
public string empresa { get; set; }
public Int64 folio { get; set; }
}
public class ObjBonos {
public Int64 folio { get; set; }
public string a_Descripcion { get; set; }
public string a_Importe { get; set; }
public string a_Periodicidad { get; set; }
public string a_incidencia { get; set; }
}
public class ObjPrestaciones
{
public Int64 folio { get; set; }
public string p1 { get; set; }
public string pvalor { get; set; }
}
}<file_sep>/IndexEstadistica/Captura.aspx.cs
using IndexEstadistica.Objetos;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Web;
using System.Web.UI;
using System.Web.UI.WebControls;
namespace IndexEstadistica
{
public partial class Captura : System.Web.UI.Page
{
BDClass conn = new BDClass();
string nombre;
Int64 folio;
protected void Page_Load(object sender, EventArgs e)
{
Debug.WriteLine("Hello, world!");
nombre = conn.getNombre(Session["user_name"].ToString());
folio = conn.getFolio();
if (!IsPostBack)
{
lblFecha.Text = DateTime.Now.ToString();
lblEmpresa.Text = nombre.ToUpper();
}
}
List<ObjCaptura> listAntiguedad = new List<ObjCaptura>();
List<ObjPrestaciones> listPrestaciones = new List<ObjPrestaciones>();
List<ObjBonos> listBonos = new List<ObjBonos>();
protected void Button1_Click(object sender, EventArgs e)
{
setData();
setBonos();
setPrestaciones();
foreach (var item in listAntiguedad)
{
conn.INS_ANTIGUEDAD(item);
}
foreach (var item in listPrestaciones)
{
conn.INS_PRESTACIONES(item);
}
int valid = 0;
foreach (var item in listBonos)
{
valid = conn.INS_BONOS(item);
}
Response.Write("<script>alert('Se ha Registrada Con Exito.');</script>");
Response.Redirect("Captura");
}
public ObjCaptura setData()
{
ObjCaptura cap = new ObjCaptura();
string ddd = a1_1a2.Text;
cap.quiencaptura = txtQuienCapturo.Value;
cap.empresa = nombre;
cap.folio = folio;
cap.a1_menos3 = a1_menor3.Text;
cap.a1_1a2 = a1_1a2.Text;
cap.a1_2a3 = a1_2a3.Text;
cap.a1_3a4 = a1_3a4.Text;
cap.a1_4a6 = a1_4a6.Text;
cap.a1_7a12 = a1_7a12.Text;
cap.a1_mas4 = a1_menos4.Text;
cap.descripcion = "poblacion";
listAntiguedad.Add(cap);
ObjCaptura cap1 = new ObjCaptura();
cap1.quiencaptura = txtQuienCapturo.Value;
cap1.empresa = nombre;
cap1.folio = folio;
cap1.a1_menos3 = a2_menor3.Text;
cap1.a1_1a2 = a2_1a2.Text;
cap1.a1_2a3 = a2_2a3.Text;
cap1.a1_3a4 = a2_3a4.Text;
cap1.a1_4a6 = a2_4a6.Text;
cap1.a1_7a12 = a2_7a12.Text;
cap1.a1_mas4 = a2_menos4.Text;
cap1.descripcion = "Salario_Diario_Base";
listAntiguedad.Add(cap1);
ObjCaptura cap2 = new ObjCaptura();
cap2.quiencaptura = txtQuienCapturo.Value;
cap2.empresa = nombre;
cap2.folio = folio;
cap2.a1_menos3 = a3_menor3.Text;
cap2.a1_4a6 = a3_4a9.Text;
cap2.a1_7a12 = a3_7a13.Text;
cap2.a1_1a2 = a3_1a2.Text;
cap2.a1_2a3 = a3_2a3.Text;
cap2.a1_3a4 = a3_3a4.Text;
cap2.a1_mas4 = a3_menos4.Text;
cap2.descripcion = "Dias_de_Vacaciones_al_Año";
listAntiguedad.Add(cap2);
ObjCaptura cap3 = new ObjCaptura();
cap3.quiencaptura = txtQuienCapturo.Value;
cap3.empresa = nombre;
cap3.folio = folio;
cap3.a1_menos3 = a4_menor3.Text;
cap3.a1_1a2 = a4_1a2.Text;
cap3.a1_2a3 = a4_2a3.Text;
cap3.a1_3a4 = a4_3a4.Text;
cap3.a1_4a6 = a4_4a6.Text;
cap3.a1_7a12 = a4_7a12.Text;
cap3.a1_mas4 = a4_menos4.Text;
cap3.descripcion = "Aguinaldo";
listAntiguedad.Add(cap3);
ObjCaptura cap4 = new ObjCaptura();
cap4.quiencaptura = txtQuienCapturo.Value;
cap4.empresa = nombre;
cap4.folio = folio;
cap4.a1_menos3 = a5_menor3.Text;
cap4.a1_1a2 = a5_1a2.Text;
cap4.a1_2a3 = a5_2a3.Text;
cap4.a1_3a4 = a5_3a4.Text;
cap4.a1_4a6 = a5_4a6.Text;
cap4.a1_7a12 = a5_7a12.Text;
cap4.a1_mas4 = a5_menos4.Text;
cap4.descripcion = "Prima_Vacacional";
listAntiguedad.Add(cap4);
ObjCaptura cap5 = new ObjCaptura();
cap5.quiencaptura = txtQuienCapturo.Value;
cap5.empresa = nombre;
cap5.folio = folio;
cap5.a1_menos3 = a6_menor3.Text;
cap5.a1_1a2 = a6_1a2.Text;
cap5.a1_2a3 = a6_2a3.Text;
cap5.a1_3a4 = a6_3a4.Text;
cap5.a1_4a6 = a6_4a6.Text;
cap5.a1_7a12 = a6_7a12.Text;
cap5.a1_mas4 = a6_menos4.Text;
cap5.descripcion = "Fondo_de_Ahorro";
listAntiguedad.Add(cap5);
ObjCaptura cap6 = new ObjCaptura();
cap6.quiencaptura = txtQuienCapturo.Value;
cap6.empresa = nombre;
cap6.folio = folio;
cap6.a1_menos3 = a7_menor3.Text;
cap6.a1_1a2 = a7_1a2.Text;
cap6.a1_2a3 = a7_2a3.Text;
cap6.a1_3a4 = a7_3a4.Text;
cap6.a1_4a6 = a7_4a6.Text;
cap6.a1_7a12 = a7_7a12.Text;
cap6.a1_mas4 = a7_menos4.Text;
cap6.descripcion = "Bono_De_Despensa";
listAntiguedad.Add(cap6);
ObjCaptura cap7 = new ObjCaptura();
cap7.quiencaptura = txtQuienCapturo.Value;
cap7.empresa = nombre;
cap7.folio = folio;
cap7.a1_menos3 = a8_menor3.Text;
cap7.a1_1a2 = a8_1a2.Text;
cap7.a1_2a3 = a8_2a3.Text;
cap7.a1_3a4 = a8_3a4.Text;
cap7.a1_4a6 = a8_4a6.Text;
cap7.a1_7a12 = a8_7a12.Text;
cap7.a1_mas4 = a8_menos4.Text;
cap7.descripcion = "Premio_de_Puntualidad";
listAntiguedad.Add(cap7);
ObjCaptura cap8 = new ObjCaptura();
cap8.quiencaptura = txtQuienCapturo.Value;
cap8.empresa = nombre;
cap8.folio = folio;
cap8.a1_menos3 = a9_menor3.Text;
cap8.a1_1a2 = a9_1a2.Text;
cap8.a1_2a3 = a9_2a3.Text;
cap8.a1_3a4 = a9_3a4.Text;
cap8.a1_4a6 = a9_4a6.Text;
cap8.a1_7a12 = a9_7a12.Text;
cap8.a1_mas4 = a9_menos4.Text;
cap8.descripcion = "Premio_Asistencia";
listAntiguedad.Add(cap8);
ObjCaptura cap9 = new ObjCaptura();
cap9.quiencaptura = txtQuienCapturo.Value;
cap9.empresa = nombre;
cap9.folio = folio;
cap9.a1_menos3 = a10_menor3.Text;
cap9.a1_1a2 = a10_1a2.Text;
cap9.a1_2a3 = a10_2a3.Text;
cap9.a1_3a4 = a10_3a4.Text;
cap9.a1_4a6 = a10_4a6.Text;
cap9.a1_7a12 = a10_7a12.Text;
cap9.a1_mas4 = a10_menos4.Text;
cap9.descripcion = "Bono_Sustitudo_de_PTU";
listAntiguedad.Add(cap9);
ObjCaptura cap10 = new ObjCaptura();
cap10.quiencaptura = txtQuienCapturo.Value;
cap10.empresa = nombre;
cap10.folio = folio;
cap10.a1_menos3 = a11_menor3.Text;
cap10.a1_1a2 = a11_1a2.Text;
cap10.a1_2a3 = a11_2a3.Text;
cap10.a1_3a4 = a11_3a4.Text;
cap10.a1_4a6 = a11_4a6.Text;
cap10.a1_7a12 = a11_7a12.Text;
cap10.a1_mas4 = a11_menos4.Text;
cap10.descripcion = "Dias Festivos (legales y adicionales)";
listAntiguedad.Add(cap10);
ObjCaptura cap11 = new ObjCaptura();
cap11.quiencaptura = txtQuienCapturo.Value;
cap11.empresa = nombre;
cap11.folio = folio;
cap11.a1_menos3 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a3_menor3.Text)).ToString();
cap11.a1_1a2 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a3_1a2.Text)).ToString();
cap11.a1_2a3 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a3_2a3.Text)).ToString();
cap11.a1_3a4 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a3_3a4.Text)).ToString();
cap11.a1_4a6 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a3_4a9.Text)).ToString();
cap11.a1_7a12 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a3_7a13.Text)).ToString();
cap11.a1_mas4 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a3_menos4.Text)).ToString();
cap11.descripcion = "TVacaciones";
listAntiguedad.Add(cap11);
ObjCaptura cap12 = new ObjCaptura();
cap12.quiencaptura = txtQuienCapturo.Value;
cap12.empresa = nombre;
cap12.folio = folio;
cap12.a1_menos3 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a3_menor3.Text) * Convert.ToDouble(a3_menos4.Text)).ToString();
cap12.a1_1a2 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a5_1a2.Text) * Convert.ToDouble(a3_menos4.Text)).ToString();
cap12.a1_2a3 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a5_2a3.Text) * Convert.ToDouble(a3_menos4.Text)).ToString();
cap12.a1_3a4 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a5_3a4.Text) * Convert.ToDouble(a3_menos4.Text)).ToString();
cap12.a1_4a6 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a5_4a6.Text) * Convert.ToDouble(a3_menos4.Text)).ToString();
cap12.a1_7a12 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a5_7a12.Text) * Convert.ToDouble(a3_menos4.Text)).ToString();
cap12.a1_mas4 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a5_menos4.Text) * Convert.ToDouble(a3_menos4.Text)).ToString();
cap12.descripcion = "TPrima Vacacional";
listAntiguedad.Add(cap12);
ObjCaptura cap13 = new ObjCaptura();
cap13.quiencaptura = txtQuienCapturo.Value;
cap13.empresa = nombre;
cap13.folio = folio;
cap13.a1_menos3 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a3_menor3.Text) * Convert.ToDouble(a3_menos4.Text)).ToString();
cap13.a1_1a2 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a5_1a2.Text) * Convert.ToDouble(a3_menos4.Text)).ToString();
cap13.a1_2a3 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a5_2a3.Text) * Convert.ToDouble(a3_menos4.Text)).ToString();
cap13.a1_3a4 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a5_3a4.Text) * Convert.ToDouble(a3_menos4.Text)).ToString();
cap13.a1_4a6 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a5_4a6.Text) * Convert.ToDouble(a3_menos4.Text)).ToString();
cap13.a1_7a12 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a5_7a12.Text) * Convert.ToDouble(a3_menos4.Text)).ToString();
cap13.a1_mas4 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a5_menos4.Text) * Convert.ToDouble(a3_menos4.Text)).ToString();
cap13.descripcion = "TPrima Vacacional";
listAntiguedad.Add(cap13);
ObjCaptura cap14 = new ObjCaptura();
cap14.quiencaptura = txtQuienCapturo.Value;
cap14.empresa = nombre;
cap14.folio = folio;
cap14.a1_menos3 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a11_menor3.Text)).ToString();
cap14.a1_1a2 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a11_1a2.Text)).ToString();
cap14.a1_2a3 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a11_2a3.Text)).ToString();
cap14.a1_3a4 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a11_3a4.Text)).ToString();
cap14.a1_4a6 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a11_4a6.Text)).ToString();
cap14.a1_7a12 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a11_7a12.Text)).ToString();
cap14.a1_mas4 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a11_menos4.Text)).ToString();
cap14.descripcion = "TDias Festivos";
listAntiguedad.Add(cap14);
ObjCaptura cap15 = new ObjCaptura();
cap15.quiencaptura = txtQuienCapturo.Value;
cap15.empresa = nombre;
cap15.folio = folio;
cap15.a1_menos3 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a4_menor3.Text)).ToString();
cap15.a1_1a2 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a4_1a2.Text)).ToString();
cap15.a1_2a3 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a4_2a3.Text)).ToString();
cap15.a1_3a4 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a4_3a4.Text)).ToString();
cap15.a1_4a6 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a4_4a6.Text)).ToString();
cap15.a1_7a12 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a4_7a12.Text)).ToString();
cap15.a1_mas4 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a4_menos4.Text)).ToString();
cap15.descripcion = "TAguinaldo (Dias)";
listAntiguedad.Add(cap15);
ObjCaptura cap16 = new ObjCaptura();
cap16.quiencaptura = txtQuienCapturo.Value;
cap16.empresa = nombre;
cap16.folio = folio;
cap16.a1_menos3 = (Convert.ToDouble(a2_menor3.Text) * 365 *Convert.ToDouble(a6_menor3.Text)).ToString();
cap16.a1_1a2 = (Convert.ToDouble(a2_menor3.Text) * 365 * Convert.ToDouble(a6_1a2.Text)).ToString();
cap16.a1_2a3 = (Convert.ToDouble(a2_menor3.Text) * 365* Convert.ToDouble(a6_2a3.Text)).ToString();
cap16.a1_3a4 = (Convert.ToDouble(a2_menor3.Text) * 365 * Convert.ToDouble(a6_3a4.Text)).ToString();
cap16.a1_4a6 = (Convert.ToDouble(a2_menor3.Text) * 365 * Convert.ToDouble(a6_4a6.Text)).ToString();
cap16.a1_7a12 = (Convert.ToDouble(a2_menor3.Text) * 365 * Convert.ToDouble(a6_7a12.Text)).ToString();
cap16.a1_mas4 = (Convert.ToDouble(a2_menor3.Text) * 365 * Convert.ToDouble(a6_menos4.Text)).ToString();
cap16.descripcion = "TFondo de Ahorro";
listAntiguedad.Add(cap16);
ObjCaptura cap17 = new ObjCaptura();
cap17.quiencaptura = txtQuienCapturo.Value;
cap17.empresa = nombre;
cap17.folio = folio;
cap17.a1_menos3 = (Convert.ToDouble(a7_menor3.Text) * 52 ).ToString();
cap17.a1_1a2 = (Convert.ToDouble(a7_1a2.Text) * 52 ).ToString();
cap17.a1_2a3 = (Convert.ToDouble(a7_2a3.Text) * 52 ).ToString();
cap17.a1_3a4 = (Convert.ToDouble(a7_3a4.Text) * 52 ).ToString();
cap17.a1_4a6 = (Convert.ToDouble(a7_4a6.Text) * 52 ).ToString();
cap17.a1_7a12 = (Convert.ToDouble(a7_7a12.Text) * 52 ).ToString();
cap17.a1_mas4 = (Convert.ToDouble(a7_menos4.Text) * 52 ).ToString();
cap17.descripcion = "TBono de Despensa";
listAntiguedad.Add(cap17);
ObjCaptura cap18 = new ObjCaptura();
cap18.quiencaptura = txtQuienCapturo.Value;
cap18.empresa = nombre;
cap18.folio = folio;
cap18.a1_menos3 = (Convert.ToDouble(a8_menor3.Text) * 52).ToString();
cap18.a1_1a2 = (Convert.ToDouble(a8_1a2.Text) * 52).ToString();
cap18.a1_2a3 = (Convert.ToDouble(a8_2a3.Text) * 52).ToString();
cap18.a1_3a4 = (Convert.ToDouble(a8_3a4.Text) * 52).ToString();
cap18.a1_4a6 = (Convert.ToDouble(a8_4a6.Text) * 52).ToString();
cap18.a1_7a12 = (Convert.ToDouble(a8_7a12.Text) * 52).ToString();
cap18.a1_mas4 = (Convert.ToDouble(a8_menos4.Text) * 52).ToString();
cap18.descripcion = "TBono de Puntualidad";
listAntiguedad.Add(cap18);
ObjCaptura cap19 = new ObjCaptura();
cap19.quiencaptura = txtQuienCapturo.Value;
cap19.empresa = nombre;
cap19.folio = folio;
cap19.a1_menos3 = (Convert.ToDouble(a9_menor3.Text) * 52).ToString();
cap19.a1_1a2 = (Convert.ToDouble(a9_1a2.Text) * 52).ToString();
cap19.a1_2a3 = (Convert.ToDouble(a9_2a3.Text) * 52).ToString();
cap19.a1_3a4 = (Convert.ToDouble(a9_3a4.Text) * 52).ToString();
cap19.a1_4a6 = (Convert.ToDouble(a9_4a6.Text) * 52).ToString();
cap19.a1_7a12 = (Convert.ToDouble(a9_7a12.Text) * 52).ToString();
cap19.a1_mas4 = (Convert.ToDouble(a9_menos4.Text) * 52).ToString();
cap19.descripcion = "TBono de Asistencia";
listAntiguedad.Add(cap19);
ObjCaptura cap20 = new ObjCaptura();
cap20.quiencaptura = txtQuienCapturo.Value;
cap20.empresa = nombre;
cap20.folio = folio;
cap20.a1_menos3 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a10_menor3.Text)).ToString();
cap20.a1_1a2 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a10_1a2.Text)).ToString();
cap20.a1_2a3 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a10_2a3.Text)).ToString();
cap20.a1_3a4 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a10_3a4.Text)).ToString();
cap20.a1_4a6 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a10_4a6.Text)).ToString();
cap20.a1_7a12 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a10_7a12.Text)).ToString();
cap20.a1_mas4 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a10_menos4.Text)).ToString();
cap20.descripcion = "TBono Sustituto (Dias por Ano)";
listAntiguedad.Add(cap20);
ObjCaptura cap21 = new ObjCaptura();
cap21.quiencaptura = txtQuienCapturo.Value;
cap21.empresa = nombre;
cap21.folio = folio;
cap21.a1_menos3 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a10_menor3.Text)).ToString();
cap21.a1_1a2 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a10_1a2.Text)).ToString();
cap21.a1_2a3 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a10_2a3.Text)).ToString();
cap21.a1_3a4 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a10_3a4.Text)).ToString();
cap21.a1_4a6 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a10_4a6.Text)).ToString();
cap21.a1_7a12 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a10_7a12.Text)).ToString();
cap21.a1_mas4 = (Convert.ToDouble(a2_menor3.Text) * Convert.ToDouble(a10_menos4.Text)).ToString();
cap21.descripcion = "TCafeteria";
listAntiguedad.Add(cap21);
ObjCaptura cap22 = new ObjCaptura();
cap22.quiencaptura = txtQuienCapturo.Value;
cap22.empresa = nombre;
cap22.folio = folio;
cap22.a1_menos3 = (Convert.ToDouble(a2_menor3.Text) * (365 - Convert.ToDouble(a3_menor3.Text) - Convert.ToDouble(a11_menor3.Text))).ToString();
cap22.a1_1a2 = (Convert.ToDouble(a2_1a2.Text) * (365 - Convert.ToDouble(a3_1a2.Text) - Convert.ToDouble(a11_1a2.Text))).ToString();
cap22.a1_2a3 = (Convert.ToDouble(a2_2a3.Text)*(365 - Convert.ToDouble(a3_2a3.Text) - Convert.ToDouble(a11_2a3.Text))).ToString();
cap22.a1_3a4 = (Convert.ToDouble(a2_3a4.Text)*(365 - Convert.ToDouble(a3_3a4.Text) - Convert.ToDouble(a11_3a4.Text))).ToString();
cap22.a1_4a6 = (Convert.ToDouble(a2_4a6.Text)*(365 - Convert.ToDouble(a3_4a9.Text) - Convert.ToDouble(a11_4a6.Text))).ToString();
cap22.a1_7a12 = (Convert.ToDouble(a2_7a12.Text)*(365 - Convert.ToDouble(a3_7a13.Text) - Convert.ToDouble(a11_7a12.Text))).ToString();
cap22.a1_mas4 = (Convert.ToDouble(a3_menos4.Text)*(365 - Convert.ToDouble(a3_menos4.Text) - Convert.ToDouble(a3_menos4.Text))).ToString();
cap22.descripcion = "Salario Base (Pesos por Ano)";
listAntiguedad.Add(cap22);
ObjCaptura cap23 = new ObjCaptura();
cap23.quiencaptura = txtQuienCapturo.Value;
cap23.empresa = nombre;
cap23.folio = folio;
for (int i = 11; i <=20; i++)
{
cap23.a1_menos3 += listAntiguedad[i].a1_menos3;
cap23.a1_1a2 += listAntiguedad[i].a1_1a2;
cap23.a1_2a3 += listAntiguedad[i].a1_2a3;
cap23.a1_3a4 += listAntiguedad[i].a1_3a4;
cap23.a1_4a6 += listAntiguedad[i].a1_4a6;
cap23.a1_7a12 += listAntiguedad[i].a1_7a12;
cap23.a1_mas4 += listAntiguedad[i].a1_mas4;
}
cap23.descripcion = "Salario Base (Pesos por Ano)";
listAntiguedad.Add(cap23);
ObjCaptura cap24 = new ObjCaptura();
cap24.quiencaptura = txtQuienCapturo.Value;
cap24.empresa = nombre;
cap24.folio = folio;
for (int i = 11; i <= 20; i++)
{
cap24.a1_menos3 += listAntiguedad[i].a1_menos3;
cap24.a1_1a2 += listAntiguedad[i].a1_1a2;
cap24.a1_2a3 += listAntiguedad[i].a1_2a3;
cap24.a1_3a4 += listAntiguedad[i].a1_3a4;
cap24.a1_4a6 += listAntiguedad[i].a1_4a6;
cap24.a1_7a12 += listAntiguedad[i].a1_7a12;
cap24.a1_mas4 += listAntiguedad[i].a1_mas4;
}
cap24.descripcion = "Subtotal";
listAntiguedad.Add(cap24);
ObjCaptura cap25 = new ObjCaptura();
cap25.quiencaptura = txtQuienCapturo.Value;
cap25.empresa = nombre;
cap25.folio = folio;
cap25.a1_menos3 = (Convert.ToDouble(listAntiguedad[23].a1_menos3) + Convert.ToDouble(listAntiguedad[22].a1_menos3)).ToString();
cap25.a1_1a2 = (Convert.ToDouble(listAntiguedad[23].a1_1a2) + Convert.ToDouble(listAntiguedad[22].a1_1a2)).ToString();
cap25.a1_2a3 = (Convert.ToDouble(listAntiguedad[23].a1_2a3) + Convert.ToDouble(listAntiguedad[22].a1_2a3)).ToString();
cap25.a1_3a4 = (Convert.ToDouble(listAntiguedad[23].a1_3a4) + Convert.ToDouble(listAntiguedad[22].a1_3a4)).ToString();
cap25.a1_4a6 = (Convert.ToDouble(listAntiguedad[23].a1_4a6) + Convert.ToDouble(listAntiguedad[22].a1_4a6)).ToString();
cap25.a1_7a12 = (Convert.ToDouble(listAntiguedad[23].a1_7a12) + Convert.ToDouble(listAntiguedad[22].a1_7a12)).ToString();
cap25.a1_mas4 = (Convert.ToDouble(listAntiguedad[23].a1_mas4) + Convert.ToDouble(listAntiguedad[22].a1_mas4)).ToString();
cap25.descripcion = "Total Anual";
listAntiguedad.Add(cap25);
ObjCaptura cap26 = new ObjCaptura();
cap26.quiencaptura = txtQuienCapturo.Value;
cap26.empresa = nombre;
cap26.folio = folio;
cap26.a1_menos3 = (Convert.ToDouble(listAntiguedad[25].a1_menos3) / 85).ToString();
cap26.a1_1a2 = (Convert.ToDouble(listAntiguedad[25].a1_1a2) / 85).ToString();
cap26.a1_2a3 = (Convert.ToDouble(listAntiguedad[25].a1_2a3) / 85).ToString();
cap26.a1_3a4 = (Convert.ToDouble(listAntiguedad[25].a1_3a4) / 85).ToString();
cap26.a1_4a6 = (Convert.ToDouble(listAntiguedad[25].a1_4a6) / 85).ToString();
cap26.a1_7a12 = (Convert.ToDouble(listAntiguedad[25].a1_7a12) / 85).ToString();
cap26.a1_mas4 = (Convert.ToDouble(listAntiguedad[25].a1_mas4) / 85).ToString();
cap26.descripcion = "Costo Total Inc. Fiscal";
listAntiguedad.Add(cap26);
return cap;
}
public void setBonos()
{
ObjBonos b1 = new ObjBonos();
b1.folio = folio;
b1.a_Descripcion = a_Descripcion.Text;
b1.a_Importe = a_Importe.Text;
b1.a_incidencia = a_incidencia.Text;
b1.a_Periodicidad = a_Periodicidad.Text;
listBonos.Add(b1);
ObjBonos b2 = new ObjBonos();
b2.folio = folio;
b2.a_Descripcion = b_Descripcion.Text;
b2.a_Importe = b_Importe.Text;
b2.a_incidencia = b_incidencia.Text;
b2.a_Periodicidad = b_Periodicidad.Text;
listBonos.Add(b2);
ObjBonos b3 = new ObjBonos();
b3.folio = folio;
b3.a_Descripcion = c_Descripcion.Text;
b3.a_Importe = c_Importe.Text;
b3.a_incidencia = c_incidencia.Text;
b3.a_Periodicidad = c_Periodicidad.Text;
listBonos.Add(b3);
//ObjBonos b4 = new ObjBonos();
//b4.folio = folio;
//b4.a_Descripcion = d_Descripcion.Text;
//b4.a_Importe = d_Importe.Text;
//b4.a_incidencia = d_incidencia.Text;
//b4.a_Periodicidad = d_Periodicidad.Text;
//listBonos.Add(b4);
}
public void setPrestaciones()
{
ObjPrestaciones pp1 = new ObjPrestaciones();
pp1.folio = folio;
pp1.p1 = p1.Text;
pp1.pvalor = "Cafeteria";
listPrestaciones.Add(pp1);
ObjPrestaciones pp2 = new ObjPrestaciones();
pp2.folio = folio;
pp2.p1 = p2.Text;
pp2.pvalor = "Festividad_Navideña";
listPrestaciones.Add(pp2);
ObjPrestaciones pp3 = new ObjPrestaciones();
pp3.folio = folio;
pp3.p1 = p3.Text;
pp3.pvalor = "Bono de Transporte Semanal";
listPrestaciones.Add(pp3);
ObjPrestaciones pp4 = new ObjPrestaciones();
pp4.folio = folio;
pp4.p1 = p4.Text;
pp4.pvalor = "Costo de Transporte";
listPrestaciones.Add(pp4);
ObjPrestaciones pp5 = new ObjPrestaciones();
pp5.folio = folio;
pp5.p1 = p4.Text;
pp5.pvalor = "Seguro de Vida";
listPrestaciones.Add(pp5);
ObjPrestaciones pp6 = new ObjPrestaciones();
pp6.folio = folio;
pp6.p1 = p4.Text;
pp6.pvalor = "Ropa de Trabajo";
listPrestaciones.Add(pp6);
ObjPrestaciones pp7 = new ObjPrestaciones();
pp7.folio = folio;
pp7.p1 = p4.Text;
pp7.pvalor = "Medicamentos";
listPrestaciones.Add(pp7);
ObjPrestaciones pp8 = new ObjPrestaciones();
pp8.folio = folio;
pp8.p1 = p4.Text;
pp8.pvalor = "Actividades Sociales";
listPrestaciones.Add(pp8);
ObjPrestaciones pp9 = new ObjPrestaciones();
pp9.folio = folio;
pp9.p1 = p4.Text;
pp9.pvalor = "Actividades Sindicales";
listPrestaciones.Add(pp9);
ObjPrestaciones pp10 = new ObjPrestaciones();
pp10.folio = folio;
pp10.p1 = p4.Text;
pp10.pvalor = "Actividades Deportivas";
listPrestaciones.Add(pp10);
ObjPrestaciones pp11 = new ObjPrestaciones();
pp11.folio = folio;
pp11.p1 = p4.Text;
pp11.pvalor = "Días Festivos Adicionales a los de Ley";
listPrestaciones.Add(pp11);
}
protected void p11_TextChanged(object sender, EventArgs e)
{
}
}
}<file_sep>/IndexEstadistica/Default.aspx.cs
using IndexEstadistica.Objetos;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Web;
using System.Web.Security;
using System.Web.UI;
using System.Web.UI.WebControls;
namespace IndexEstadistica
{
public partial class _Default : Page
{
protected void Login1_LoggedIn(object sender, EventArgs e)
{
string nombre = Login1.UserName;
Session["user_name"] = nombre;
string[] rol = Roles.GetRolesForUser(nombre);
MembershipUser usuario = Membership.GetUser(User.Identity.Name);
Response.Redirect("Captura");
}
}
}<file_sep>/IndexEstadistica/Site.Master.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.UI;
using System.Web.UI.WebControls;
namespace IndexEstadistica
{
public partial class SiteMaster : MasterPage
{
protected void Page_Load(object sender, EventArgs e)
{
if (Session["user_name"] != null) {
captura.Visible = true;
}
else {
captura.Visible = false;
}
}
protected void LoginStatus1_LoggedOut(object sender, EventArgs e)
{
Session.Remove("user_name");
}
}
} | 75980416e515eb3ad5c75f5995c3756719b4e251 | [
"C#"
] | 5 | C# | jesus003/indexestadistica | bb14f29c4552efc0d36a9f8ac818557356678190 | 52bc788ac387f4a2caf96ed08934b3c21d9253f7 |
refs/heads/master | <repo_name>sujaynr/HackerRank<file_sep>/README.md
# HackerRank
Solutions to Hackerrank Problems by <NAME>
<file_sep>/textwrap.py
def wrap(string, max_width):
a = textwrap.wrap(string, max_width)
return "\n".join(a)<file_sep>/Division.py
from __future__ import division
if _name__ == '__main__':
a = int(raw_input())
b = int(raw_input())
print (a//b)
print(a/b)<file_sep>/Tuples.py
if __name__ == '__main__':
n = (raw_input())
integers = (raw_input())
integer_list = integers.split()
integer_list = map(int, integer_list)
t = tuple(integer_list)
print (hash(t))<file_sep>/IfElse.py
#!/bin/python
# Enter your code here. Read input from STDIN. Print output to STDOUT
N = int(raw_input())
if (N % 2 != 0):
print ("Weird")
if (N % 2 == 0) and (N >= 2) and (N <= 5):
print ("Not Weird")
if (N % 2 == 0) and (N >= 6) and (N <= 20):
print ("Weird")
if (N % 2 == 0) and (N > 20):
print ("Not Weird")
<file_sep>/NoIdea.py
n, m = (map(int, raw_input().split()))
list_of_numbers = (map(int, raw_input().split()))
setA = set(map(int, raw_input().split()))
setB = set(map(int, raw_input().split()))
happ = 0
for elem in list_of_numbers:
if elem in setA:
happ = happ + 1
if elem in setB:
happ = happ - 1
print(happ)<file_sep>/Stringsplitjoin.py
print (raw_input()).replace(" ", "-") <file_sep>/2ndlargestbadbadbad.py
if __name__ == '__main__':
n = int(raw_input())
arr = map(int, raw_input().split())
big_numb = -101
scnd_numb = -101
for elem in arr:
if elem > big_numb:
scnd_numb = big_numb
big_numb = elem
if (elem > scnd_numb) and (elem < big_numb):
scnd_numb = elem
print(scnd_numb)
<file_sep>/idontrememberthisname.py
print (len(set([ raw_input().strip() for _ in range(int(raw_input().strip())) ])))<file_sep>/Input.py
x, k = map(int,input().split())
if eval(input()) == k:
print(True)
else:
print(False)<file_sep>/countingintersections.py
c= int(raw_input())
list_n = raw_input().split()
d= int(raw_input())
list_b = raw_input().split()
def counter(n, b):
count = 0
for elem in n:
if elem in b:
count += 1
return count
print (counter(list_n, list_b))
<file_sep>/Loops.py
if __name__ == '__main__':
n = int(raw_input())
for elem in range(0, n):
print (elem * elem) | cce2f8bc3c735c00efb9052eee2fe6952f35a4df | [
"Markdown",
"Python"
] | 12 | Markdown | sujaynr/HackerRank | 1aeb5e7b6e8269edc156fdc9cff879eb5c24cf6a | f654f3f8fea0f8ee8e2efe425623038790d4f52b |
refs/heads/master | <file_sep>import os
# default config
class BaseConfig(object):
DEBUG = False
# shortened for readability
SECRET_KEY = " <KEY>"
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
print SQLALCHEMY_DATABASE_URI
class DevelopmentConfig(BaseConfig):
DEBUG = True
class ProductionConfig(BaseConfig):
DEBUG = False<file_sep>Flask==0.10.1
Flask-Migrate==1.7.0
Flask-SQLAlchemy==2.1
Flask-Script==2.0.5
Jinja2==2.8
Mako==1.0.3
MarkupSafe==0.23
SQLAlchemy==1.0.11
Werkzeug==0.11.4
alembic==0.8.4
argparse==1.2.1
decorator==4.0.9
gunicorn==19.4.5
ipython-genutils==0.1.0
itsdangerous==0.24
path.py==8.1.2
pexpect==4.0.1
pickleshare==0.6
psycopg2==2.6.1
ptyprocess==0.5.1
python-editor==0.5
simplegeneric==0.8.1
traitlets==4.1.0
wsgiref==0.1.2
<file_sep>echo export APP_SETTINGS="config.DevelopmentConfig"
echo export DATABASE_URL="postgresql://pedro:porto205050@localhost/discover_flask_dev"
| d34985a2ed17262e40b42ca3eab21a9dab720298 | [
"Python",
"Text",
"Shell"
] | 3 | Python | pedrocarvalhodev/flask-intro | 87c7c1e25e779e09fc58c9538af81b694042f181 | 267c5a38c4c2020ccefe574e969b0552d07efb8b |
refs/heads/main | <file_sep>from flask import Flask,render_template, make_response
import keras2onnx
from tensorflow import keras
import tensorflow as tf
import onnx
from google.protobuf.json_format import MessageToJson
import json
from torch.autograd import Variable
import torch.onnx as torch_onnx
from onnx_tf.backend import prepare
import torch
from keras.utils import plot_model
import torch.nn as nn
from torchviz import make_dot
import tensorflowjs as tfjs
dtype = torch.FloatTensor
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.h1 = nn.Linear(6, 10)
self.ol = nn.Linear(6, 1)
self.relu=nn.ReLU()
self.softmax = nn.LogSoftmax()
def forward(self, x):
hidden = self.h1(x)
output = self.ol(x)
activation = self.relu(hidden)
activation1 = self.softmax(output)
return output
net = Net()
app=Flask(__name__)
@app.route('/')
def index():
return render_template('Index.html')
@app.route("/kconversion/",methods=['POST'])
def kconversion():
model=keras.models.load_model('model_keras')
plot_model(model, to_file="model.png",show_shapes=True,show_layer_names=True,rankdir='TB',expand_nested=True,dpi=96)
onnx_model = keras2onnx.convert_keras(model, 'model0.onnx', debug_mode=True)
output_model_path = "./model0.onnx"
# and save the model in ONNX format
keras2onnx.save_model(onnx_model, output_model_path)
onnx_model = onnx.load("model0.onnx")
s = MessageToJson(onnx_model)
onnx_json = json.loads(s)
# Convert JSON to String
onnx_str = json.dumps(onnx_json)
with open("model1.json", "w") as json_file:
json_file.write(onnx_str)
resp = make_response(onnx_str)
resp.headers['Access-Control-Allow-Origin'] = '*'
return resp
@app.route("/tfconversion/",methods=['POST'])
def tfconversion():
model = keras.models.load_model('model_keras')
converter = tf.lite.TFLiteConverter.from_keras_model(model)
tflite_model = converter.convert()
with open('model.tflite', 'wb') as f:
f.write(tflite_model)
tfjs.converters.save_keras_model(model,'/home/vatsal/Desktop/projects/open-api-workflow')
file_name = "/home/vatsal/Desktop/projects/open-api-workflow/model.json"
with open(file_name, 'r') as f:
s1 = json.loads(f.read())
s1_str = json.dumps(s1)
resp = make_response(s1_str)
resp.headers['Access-Control-Allow-Origin'] = '*'
return resp
@app.route("/ptconversion/",methods=['POST'])
def ptconversion():
model=torch.load('entire_model.pt')
x = Variable(torch.randn(1,10,6))
dot= make_dot(model(x), params=dict(model.named_parameters()))
dot.format = 'png'
dot.render('torchviz-sample')
model.eval()
input_shape = (1, 10, 6)
model_onnx_path = "torch_model.onnx"
dummy_input = Variable(torch.randn(1, *input_shape).type(dtype), requires_grad=True)
# plot graph of variable, not of a nn.Module
output = torch_onnx.export(net, dummy_input, model_onnx_path, verbose=True)
# plot graph of variable, not of a nn.Module
print("Export of torch_model.onnx complete!")
onnx_model = onnx.load("torch_model.onnx")
s = MessageToJson(onnx_model)
onnx_json = json.loads(s)
# Convert JSON to String
onnx_str = json.dumps(onnx_json)
with open("model2.json", "w") as json_file:
json_file.write(onnx_str)
resp = make_response(onnx_str)
resp.headers['Access-Control-Allow-Origin'] = '*'
return resp
@app.route("/tpconversion/",methods=['POST'])
def tpconversion():
onnx_model = onnx.load("torch_model.onnx")
tf_rep = prepare(onnx_model)
tf_rep.export_graph("/home/vatsal/Desktop/projects/open-api-workflow/model.pb")
# Convert the model
converter = tf.lite.TFLiteConverter.from_saved_model("/home/vatsal/Desktop/projects/open-api-workflow/model.pb") # path to the SavedModel directory
tflite_model = converter.convert()
# Save the model
with open('model2.tflite', 'wb') as f:
f.write(tflite_model)
tfjs.converters.convert_tf_saved_model('/home/vatsal/Desktop/projects/open-api-workflow/model.pb','/home/vatsal/Desktop/projects/open-api-workflow/model3.json')
file_name = "/home/vatsal/Desktop/projects/open-api-workflow/model3.json/model.json"
with open(file_name, 'r') as f:
s1 = json.loads(f.read())
s1_str=json.dumps(s1)
resp = make_response(s1_str)
resp.headers['Access-Control-Allow-Origin'] = '*'
return resp
if __name__=='__main__':
app.run(debug=True)
| 8d6833daf1d810c3cfa5018974c314a806c84c4e | [
"Python"
] | 1 | Python | Zeelraj/ml-model-converter | 8bf418fc4a43fa24f58e01840268716a7daecd3e | b78ec903301d9778ed2866a316e1ef70c317fb61 |
refs/heads/master | <repo_name>aqiangtester/xiaozhiyuan.autotest<file_sep>/src/cn/com/digiwin/justsharecloud/initialization/JsonParseBean.java
package cn.com.digiwin.justsharecloud.initialization;
import com.alibaba.fastjson.JSON;
public class JsonParseBean {
private String retCode;
private String retMsg;
private String token;
private String secretNumber;
private String isDisplayVerifyCode;
private MemberInfoBean memberInfo;
public String getRetCode() {
return retCode;
}
public void setRetCode(String retCode) {
this.retCode = retCode;
}
public String getRetMsg() {
return retMsg;
}
public void setRetMsg(String retMsg) {
this.retMsg = retMsg;
}
public String getToken() {
return token;
}
public void setToken(String token) {
this.token = token;
}
public String getSecretNumber() {
return secretNumber;
}
public void setSecretNumber(String secretNumber) {
this.secretNumber = secretNumber;
}
public String getIsDisplayVerifyCode() {
return isDisplayVerifyCode;
}
public void setIsDisplayVerifyCode(String isDisplayVerifyCode) {
this.isDisplayVerifyCode = isDisplayVerifyCode;
}
public MemberInfoBean getMemberInfo() {
return memberInfo;
}
public void setMemberInfo(MemberInfoBean memberInfo) {
this.memberInfo = memberInfo;
}
public static class MemberInfoBean{
private String memberId;
private String nickName;
private String employeeId;
private String employeeCode;
private String employeeName;
private String companyId;
private String companyCode;
private String companyName;
private String companyAbbr;
private String hxAccount;
public String getMemberId() {
return memberId;
}
public void setMemberId(String memberId) {
this.memberId = memberId;
}
public String getNickName() {
return nickName;
}
public void setNickName(String nickName) {
this.nickName = nickName;
}
public String getEmployeeId() {
return employeeId;
}
public void setEmployeeId(String employeeId) {
this.employeeId = employeeId;
}
public String getEmployeeCode() {
return employeeCode;
}
public void setEmployeeCode(String employeeCode) {
this.employeeCode = employeeCode;
}
public String getEmployeeName() {
return employeeName;
}
public void setEmployeeName(String employeeName) {
this.employeeName = employeeName;
}
public String getCompanyId() {
return companyId;
}
public void setCompanyId(String companyId) {
this.companyId = companyId;
}
public String getCompanyCode() {
return companyCode;
}
public void setCompanyCode(String companyCode) {
this.companyCode = companyCode;
}
public String getCompanyName() {
return companyName;
}
public void setCompanyName(String companyName) {
this.companyName = companyName;
}
public String getCompanyAbbr() {
return companyAbbr;
}
public void setCompanyAbbr(String companyAbbr) {
this.companyAbbr = companyAbbr;
}
public String getHxAccount() {
return hxAccount;
}
public void setHxAccount(String hxAccount) {
this.hxAccount = hxAccount;
}
}
public static JsonParseBean httpResponseParse(String response) {
// 解析JSON响应
JsonParseBean jpb = JSON.parseObject(response, JsonParseBean.class);
return jpb;
}
}
<file_sep>/src/cn/com/digiwin/justsharecloud/testCases/getAccountStatus/cases/TestGetAccountStatus.java
package cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases;
import org.testng.annotations.Test;
import cn.com.digiwin.justsharecloud.commonfunctions.HttpRequestMethod;
import cn.com.digiwin.justsharecloud.constants.Constants;
import cn.com.digiwin.justsharecloud.dataProvider.GetAccountStatusDataProvider;
import cn.com.digiwin.justsharecloud.testCases.getAccountStatus.bean.JsonParseBean;
import cn.com.digiwin.justsharecloud.testCases.getAccountStatus.requestParams.RequestParams;
import org.testng.annotations.BeforeTest;
import org.apache.log4j.Logger;
import org.testng.Assert;
import org.testng.annotations.AfterTest;
/**
* @date 2016年6月30日下午3:26:31
* @author 田亚强
* @description 接口说明:注册完从未登录的账号 , 显示为"未激活" ;登录过一次之后显示为"已激活"
*/
public class TestGetAccountStatus {
private static Logger logger = Logger.getLogger(TestGetAccountStatus.class);
private static String url = Constants.LOCAL_TEST_HOST + Constants.GETACCOUNTSTATUS_LOCATION;
/**
* @date 2016年6月24日下午12:00:01
* @param account
* @param password
* @param dataSource
* @param expectedRetCode
* @return_type void
* @description 注册完之后立刻调用getAccountStatus接口 , token与secretNumber使用体验账号的 , 验证未激活
*/
@Test(dataProvider = "Inactivated", dataProviderClass = GetAccountStatusDataProvider.class , priority=1)
public void Inactivated(String token, String secretNumber, String account, String expectedRetCode) {
String requestParams = RequestParams.requestParams(token, secretNumber, account);
logger.info(requestParams);
String response = HttpRequestMethod.sendPost(url, requestParams);
logger.info(response);
// 断言响应retCode和retMsg不为空
JsonParseBean jpb = JsonParseBean.httpResponseParse(response);
Assert.assertNotNull(jpb.getErrorMsg());
Assert.assertNotNull(jpb.getErrorCode());
// 断言响应retCode为01210
Assert.assertEquals(jpb.getErrorCode(), expectedRetCode);
}
/**
* @date 2016年6月30日下午2:34:39
* @param token
* @param secretNumber
* @param account
* @param expectedRetCode
* @return_type void
* @description 先使用该账号登录一次 , 使用该账号的token与secretNumber , 验证已激活
*/
@Test(dataProvider = "Activated", dataProviderClass = GetAccountStatusDataProvider.class , priority=2)
public void Activated(String token, String secretNumber, String account, String expectedRetCode) {
String requestParams = RequestParams.requestParams(token, secretNumber, account);
logger.info(requestParams);
String response = HttpRequestMethod.sendPost(url, requestParams);
logger.info(response);
// 断言响应retCode和retMsg不为空
JsonParseBean jpb = JsonParseBean.httpResponseParse(response);
Assert.assertNotNull(jpb.getErrorMsg());
Assert.assertNotNull(jpb.getErrorCode());
// 断言响应retCode为01210
Assert.assertEquals(jpb.getErrorCode(), expectedRetCode);
}
/**
* @date 2016年6月24日下午12:02:19
* @param token
* @param expectedRetCode
* @return_type void
* @description 失败的测试用例
*/
@Test(dataProvider = "fail", dataProviderClass = GetAccountStatusDataProvider.class, enabled = true ,priority=3)
public void testGetAccountStatusFail(String token, String secretNumber, String account, String expectedRetCode) {
String requestParams = RequestParams.requestParams(token, secretNumber, account);
logger.info(requestParams);
String response = HttpRequestMethod.sendPost(url, requestParams);
logger.info(response);
// 断言响应retCode和retMsg不为空
JsonParseBean jpb = JsonParseBean.httpResponseParse(response);
Assert.assertNotNull(jpb.getErrorMsg());
Assert.assertNotNull(jpb.getErrorCode());
// 断言响应retCode为01233
Assert.assertEquals(jpb.getErrorCode(), expectedRetCode);
}
@BeforeTest
public void beforeTest() {
}
@AfterTest
public void afterTest() {
}
}
<file_sep>/src/cn/com/digiwin/justsharecloud/dataProvider/GetAccountStatusDataProvider.java
package cn.com.digiwin.justsharecloud.dataProvider;
import org.testng.annotations.DataProvider;
import cn.com.digiwin.justsharecloud.constants.Constants;
import cn.com.digiwin.justsharecloud.initialization.JsonParseBean;
import cn.com.digiwin.justsharecloud.initialization.Login;
public class GetAccountStatusDataProvider extends Login{
private static String const_token = Constants.CONST_TOKEN;
private static String const_secretNumber = Constants.CONST_SECRETNUMBER;
/**
* @date 2016年6月24日上午11:20:14
* @return
* @return_type Object[][]
* @description 注册完之后立刻调用getAccountStatus接口 , token与secretNumber使用体验账号的 , 验证未激活
*/
@DataProvider(name = "Inactivated" )
public static Object[][] Inactivated(){
return new Object[][]{
{const_token ,const_secretNumber ,account ,"0"}, //注册完之后直接调用该接口 , "0"代表未激活
};
}
/**
* @date 2016年6月30日下午2:30:32
* @return
* @return_type Object[][]
* @description 测试之前先使用该账号登录一次 , 使用该账号的token与secretNumber , 验证已激活
*/
@DataProvider(name = "Activated" )
public static Object[][] Activated(){
JsonParseBean jpb = login();
String token = jpb.getToken();
String secretNumber = jpb.getSecretNumber();
return new Object[][]{
{token ,secretNumber ,account ,"01261"} //注册完之后直接调用该接口 , "01261"代表已激活
};
}
/**
* @date 2016年6月24日上午11:56:07
* @return
* @return_type Object[][]
* @description 异常测试
*/
@DataProvider(name = "fail")
public static Object[][] failData(){
JsonParseBean jpb = login();
String token = jpb.getToken();
String secretNumber = jpb.getSecretNumber();
return new Object[][]{
{token ,secretNumber ,"<PASSWORD>", "01262"}, //错误的account , 01262:该账号不存在
{"5467r<PASSWORD>ghfgh" , secretNumber ,account, "4"}, //错误的token
{token , "<PASSWORD>",account, "4"}, //错误的secretNumber
{"" , secretNumber,account, "1"}, //传入空的token
{token , "",account, "1"}, //传入空的secretNumber
{token , secretNumber,"", "1"}, //传入空的account
};
}
}
<file_sep>/src/cn/com/digiwin/justsharecloud/testCases/companyRegister/bean/JsonParseBean.java
package cn.com.digiwin.justsharecloud.testCases.companyRegister.bean;
import com.alibaba.fastjson.JSON;
public class JsonParseBean {
private String retCode;
private String retMsg;
private int companyId;
private String companyCode;
public String getRetCode() {
return retCode;
}
public void setRetCode(String retCode) {
this.retCode = retCode;
}
public String getRetMsg() {
return retMsg;
}
public void setRetMsg(String retMsg) {
this.retMsg = retMsg;
}
public int getCompanyId() {
return companyId;
}
public void setCompanyId(int companyId) {
this.companyId = companyId;
}
public String getCompanyCode() {
return companyCode;
}
public void setCompanyCode(String companyCode) {
this.companyCode = companyCode;
}
public static JsonParseBean ResponseParse(String response) {
//解析JSON响应
JsonParseBean jpb = JSON.parseObject(response, JsonParseBean.class);
return jpb;
}
}
<file_sep>/src/cn/com/digiwin/justsharecloud/dataProvider/ResendPasswordDataProvider.java
package cn.com.digiwin.justsharecloud.dataProvider;
import org.testng.annotations.DataProvider;
import cn.com.digiwin.justsharecloud.constants.Constants;
import cn.com.digiwin.justsharecloud.initialization.Login;
public class ResendPasswordDataProvider extends Login{
private static String token = Constants.CONST_TOKEN;
private static String secretNumber = Constants.CONST_SECRETNUMBER;
private static String emplName = "Beyond";
private static String companyName = "maitian";
/**
* @date 2016年6月24日上午11:20:14
* @return
* @return_type Object[][]
* @description
*/
@DataProvider(name = "resendPassword" )
public static Object[][] resendPassword(){
return new Object[][]{
//正确的测试用例
{account ,"18520820075" ,token ,secretNumber, emplName, companyName, "0"},
//验证各参数的必填性
{"" ,"18520820075" ,token ,secretNumber, emplName, companyName, "1"},
{account ,"" ,token ,secretNumber, emplName, companyName, "1"},
{account ,"18520820075" ,"" ,secretNumber, emplName, companyName, "1"},
{account ,"18520820075" ,token ,"", emplName, companyName, "1"},
{account ,"18520820075" ,token ,secretNumber, "", companyName, "1"},
{account ,"18520820075" ,token ,secretNumber, emplName, "", "1"},
//验证错误的手机号格式
{account ,"18520" ,token ,secretNumber, emplName, companyName, "5"},
//不存在的账号
{"dfuhu87" ,"18520820075" ,token ,secretNumber, emplName, companyName, "01271"},
//错误的token
{account ,"18520820075" ,"ty6r76hrehgthd" ,secretNumber, emplName, companyName, "4"},
//错误的secretNumber
{account ,"18520820075" ,token ,"46bh<PASSWORD>", emplName, companyName, "4"},
};
}
}
<file_sep>/test-output/xiaozhiyun.usersystem.interface_autotest.html
<html xmlns:math="http://exslt.org/math" xmlns:testng="http://testng.org">
<head>
<title>TestNG Results</title>
<meta http-equiv="content-type" content="text/html; charset=utf-8"></meta>
<meta http-equiv="pragma" content="no-cache"></meta>
<meta http-equiv="cache-control" content="max-age=0"></meta>
<meta http-equiv="cache-control" content="no-cache"></meta>
<meta http-equiv="cache-control" content="no-store"></meta>
<LINK rel="stylesheet" href="style.css"></LINK>
<script type="text/javascript" src="main.js"></script>
</head>
<body>
<table width="100%" style="font-size: 16px; margin-bottom: 10px;" cellspacing="1">
<tr>
<td width="100%">
All methods in suite
<b>xiaozhiyun.usersystem.interface_autotest</b>
</td>
<td style="background-color: #FFBBBB; padding: 3px 3px 3px 0;" align="center">
<div style="width: 50px;">0</div>
</td>
<td style="background-color: lightgreen; padding: 3px 3px 3px 0;" align="center">
<div style="width: 50px;">46</div>
</td>
<td style="background-color: #FFFFBB; padding: 3px 3px 3px 0;" align="center">
<div style="width: 50px;">0</div>
</td>
<td align="center" style="background-color: #eaf0f7; padding: 3px 3px 3px 0;">
<div style="width: 50px;">46</div>
</td>
<td align="center"
style="font-weight: bold; background-color: #eaf0f7; padding: 3px 3px 3px 0;">
<div style="width: 50px;">100%</div>
</td>
<td style="background-color: #eaf0f7; padding: 3px 3px 3px 0;" align="center"
nowrap="true">17s
</td>
</tr>
</table>
<div style="width: 200px;">
<label for="groupMethodsCheckBox" style="font-weight: bold; margin: 0;">
<input id="groupMethodsCheckBox" type="checkbox" onclick="switchTestMethodsView(this)"></input>
Group by class
</label>
<br></br>
<label for="methodsFilter_ALL" style="font-weight: bold; margin: 0;">
<input id="methodsFilter_ALL" type="checkbox"
onclick="testMethodsFilterChanged(this, 'ALL')"></input>
All
</label>
</div>
<label for="methodsFilter_FAIL" style="margin-left: 20px;">
<input id="methodsFilter_FAIL" type="checkbox"
onclick="testMethodsFilterChanged(this, 'FAIL')"
checked=""></input>
Failed
</label>
<label for="methodsFilter_PASS">
<input id="methodsFilter_PASS" type="checkbox"
onclick="testMethodsFilterChanged(this, 'PASS')"
checked=""></input>
Passed
</label>
<label for="methodsFilter_SKIP">
<input id="methodsFilter_SKIP" type="checkbox"
onclick="testMethodsFilterChanged(this, 'SKIP')"
checked=""></input>
Skipped
</label>
<label for="methodsFilter_CONF">
<input id="methodsFilter_CONF" type="checkbox"
onclick="testMethodsFilterChanged(this, 'CONF')"></input>
Config
</label>
<br></br>
<div id="testMethodsByStatus">
<table class="testMethodsTable" cellpadding="0" cellspacing="0">
<tr class="methodsTableHeader">
<td width="100%">Name</td>
<td nowrap="true">Started</td>
<td nowrap="true">Duration</td>
<td>Exception</td>
</tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getSmsVerifyCode.cases.TestGetSmsVerifyCode_all_byStatus_passed_PASS1_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getSmsVerifyCode.cases.TestGetSmsVerifyCode_all_byStatus_passed_PASS1_details')">all(18520820075
,
01250)
</a>
</td>
<td nowrap="true">17:58:23</td>
<td nowrap="true" align="right">391 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getSmsVerifyCode.cases.TestGetSmsVerifyCode_all_byStatus_passed_PASS1_details"
class="testMethodDetails">
<div>
<b>Name:
</b>all
</div>
<div>
<b>Signature:
</b>all(java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getSmsVerifyCode.cases.TestGetSmsVerifyCode@7f6fa3f]
</div>
<div>
<b>Parameters:
</b>
18520820075
,
01250
</div>
<div>
<b>Start time:
</b>17:58:23
</div>
<div>
<b>End time:
</b>17:58:23
</div>
<div>
<b>Duration:
</b>391 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getSmsVerifyCode.cases.TestGetSmsVerifyCode_all_byStatus_passed_PASS2_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getSmsVerifyCode.cases.TestGetSmsVerifyCode_all_byStatus_passed_PASS2_details')">all(,
01251)
</a>
</td>
<td nowrap="true">17:58:23</td>
<td nowrap="true" align="right">43 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getSmsVerifyCode.cases.TestGetSmsVerifyCode_all_byStatus_passed_PASS2_details"
class="testMethodDetails">
<div>
<b>Name:
</b>all
</div>
<div>
<b>Signature:
</b>all(java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getSmsVerifyCode.cases.TestGetSmsVerifyCode@7f6fa3f]
</div>
<div>
<b>Parameters:
</b>
,
01251
</div>
<div>
<b>Start time:
</b>17:58:23
</div>
<div>
<b>End time:
</b>17:58:23
</div>
<div>
<b>Duration:
</b>43 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getSmsVerifyCode.cases.TestGetSmsVerifyCode_all_byStatus_passed_PASS3_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getSmsVerifyCode.cases.TestGetSmsVerifyCode_all_byStatus_passed_PASS3_details')">all(185
,
01254)
</a>
</td>
<td nowrap="true">17:58:23</td>
<td nowrap="true" align="right">144 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getSmsVerifyCode.cases.TestGetSmsVerifyCode_all_byStatus_passed_PASS3_details"
class="testMethodDetails">
<div>
<b>Name:
</b>all
</div>
<div>
<b>Signature:
</b>all(java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getSmsVerifyCode.cases.TestGetSmsVerifyCode@7f6fa3f]
</div>
<div>
<b>Parameters:
</b>
185
,
01254
</div>
<div>
<b>Start time:
</b>17:58:23
</div>
<div>
<b>End time:
</b>17:58:23
</div>
<div>
<b>Duration:
</b>144 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_beforeSuite_byStatus_passed_PASS4_row"
class="testMethodStatusPASS testMethodStatusCONF">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_beforeSuite_byStatus_passed_PASS4_details')">beforeSuite()</a>
</td>
<td nowrap="true">17:58:11</td>
<td nowrap="true" align="right">9 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_beforeSuite_byStatus_passed_PASS4_details"
class="testMethodDetails">
<div>
<b>Name:
</b>beforeSuite
</div>
<div>
<b>Signature:
</b>beforeSuite()[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister@277ddc70]
</div>
<div>
<b>Start time:
</b>17:58:11
</div>
<div>
<b>End time:
</b>17:58:11
</div>
<div>
<b>Duration:
</b>9 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testDataSource_byStatus_passed_PASS5_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testDataSource_byStatus_passed_PASS5_details')">testDataSource(18520820075
,
tiantony
,
,
tian123
,
tonytian
,
,
1
,
tian123
,
,
,
,
01131)
</a>
</td>
<td nowrap="true">17:58:12</td>
<td nowrap="true" align="right">287 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testDataSource_byStatus_passed_PASS5_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testDataSource
</div>
<div>
<b>Signature:
</b>testDataSource(java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String,
int, java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister@277ddc70]
</div>
<div>
<b>Parameters:
</b>
18520820075
,
tiantony
,
,
tian123
,
tonytian
,
,
1
,
tian123
,
,
,
,
01131
</div>
<div>
<b>Start time:
</b>17:58:12
</div>
<div>
<b>End time:
</b>17:58:12
</div>
<div>
<b>Duration:
</b>287 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testSourceSystem_byStatus_passed_PASS6_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testSourceSystem_byStatus_passed_PASS6_details')">testSourceSystem(18520820075
,
tiantony
,
,
tian123
,
tonytian
,
,
1
,
tian123
,
,
,
,
01131)
</a>
</td>
<td nowrap="true">17:58:12</td>
<td nowrap="true" align="right">45 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testSourceSystem_byStatus_passed_PASS6_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testSourceSystem
</div>
<div>
<b>Signature:
</b>testSourceSystem(java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String,
int, java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister@277ddc70]
</div>
<div>
<b>Parameters:
</b>
18520820075
,
tiantony
,
,
tian123
,
tonytian
,
,
1
,
tian123
,
,
,
,
01131
</div>
<div>
<b>Start time:
</b>17:58:12
</div>
<div>
<b>End time:
</b>17:58:12
</div>
<div>
<b>Duration:
</b>45 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byStatus_passed_PASS7_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byStatus_passed_PASS7_details')">testRegisterSuccess({account=tonytian, confirmPasswd=<PASSWORD>, dataSource=1, expectRetCode=01130, mobile=18520820075, mobileModel=,
mobileSysVersion=, nickName=tiantony, password=<PASSWORD>, smsVerifyKey=, sourceSystem=1, verifyCode=, verifyKey=})
</a>
</td>
<td nowrap="true">17:58:12</td>
<td nowrap="true" align="right">3s
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byStatus_passed_PASS7_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testRegisterSuccess
</div>
<div>
<b>Signature:
</b>testRegisterSuccess(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister@277ddc70]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, confirmPasswd=<PASSWORD>, dataSource=1, expectRetCode=01130, mobile=18520820075, mobileModel=, mobileSysVersion=,
nickName=tiantony, password=<PASSWORD>, smsVerifyKey=, sourceSystem=1, verifyCode=, verifyKey=}
</div>
<div>
<b>Start time:
</b>17:58:12
</div>
<div>
<b>End time:
</b>17:58:15
</div>
<div>
<b>Duration:
</b>3s
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byStatus_passed_PASS8_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byStatus_passed_PASS8_details')">testRegisterSuccess({account=tonytian, confirmPasswd=<PASSWORD>, dataSource=1, expectRetCode=01130, mobile=18520820075, mobileModel=,
mobileSysVersion=, nickName=tiantony, password=<PASSWORD>, smsVerifyKey=, sourceSystem=2, verifyCode=, verifyKey=})
</a>
</td>
<td nowrap="true">17:58:15</td>
<td nowrap="true" align="right">1s
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byStatus_passed_PASS8_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testRegisterSuccess
</div>
<div>
<b>Signature:
</b>testRegisterSuccess(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister@277ddc70]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, confirmPasswd=<PASSWORD>, dataSource=1, expectRetCode=01130, mobile=18520820075, mobileModel=, mobileSysVersion=,
nickName=tiantony, password=<PASSWORD>, smsVerifyKey=, sourceSystem=2, verifyCode=, verifyKey=}
</div>
<div>
<b>Start time:
</b>17:58:15
</div>
<div>
<b>End time:
</b>17:58:16
</div>
<div>
<b>Duration:
</b>1s
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byStatus_passed_PASS9_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byStatus_passed_PASS9_details')">testRegisterSuccess({account=tonytian, confirmPasswd=<PASSWORD>, dataSource=1, expectRetCode=01130, mobile=18520820075, mobileModel=,
mobileSysVersion=, nickName=tiantony, password=<PASSWORD>, smsVerifyKey=, sourceSystem=3, verifyCode=, verifyKey=})
</a>
</td>
<td nowrap="true">17:58:16</td>
<td nowrap="true" align="right">1s
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byStatus_passed_PASS9_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testRegisterSuccess
</div>
<div>
<b>Signature:
</b>testRegisterSuccess(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister@277ddc70]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, confirmPasswd=<PASSWORD>, dataSource=1, expectRetCode=01130, mobile=18520820075, mobileModel=, mobileSysVersion=,
nickName=tiantony, password=<PASSWORD>, smsVerifyKey=, sourceSystem=3, verifyCode=, verifyKey=}
</div>
<div>
<b>Start time:
</b>17:58:16
</div>
<div>
<b>End time:
</b>17:58:17
</div>
<div>
<b>Duration:
</b>1s
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byStatus_passed_PASS10_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byStatus_passed_PASS10_details')">testRegisterSuccess({account=tonytian, confirmPasswd=<PASSWORD>, dataSource=2, expectRetCode=01130, mobile=18520820075, mobileModel=,
mobileSysVersion=, nickName=tiantony, password=<PASSWORD>, smsVerifyKey=, sourceSystem=1, verifyCode=, verifyKey=})
</a>
</td>
<td nowrap="true">17:58:17</td>
<td nowrap="true" align="right">1s
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byStatus_passed_PASS10_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testRegisterSuccess
</div>
<div>
<b>Signature:
</b>testRegisterSuccess(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister@277ddc70]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, confirmPasswd=<PASSWORD>, dataSource=2, expectRetCode=01130, mobile=18520820075, mobileModel=, mobileSysVersion=,
nickName=tiantony, password=<PASSWORD>, smsVerifyKey=, sourceSystem=1, verifyCode=, verifyKey=}
</div>
<div>
<b>Start time:
</b>17:58:17
</div>
<div>
<b>End time:
</b>17:58:19
</div>
<div>
<b>Duration:
</b>1s
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byStatus_passed_PASS11_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byStatus_passed_PASS11_details')">testRegisterSuccess({account=tonytian, confirmPasswd=<PASSWORD>, dataSource=3, expectRetCode=01130, mobile=18520820075, mobileModel=,
mobileSysVersion=android, nickName=tiantony, password=<PASSWORD>, smsVerifyKey=, sourceSystem=1, verifyCode=, verifyKey=})
</a>
</td>
<td nowrap="true">17:58:19</td>
<td nowrap="true" align="right">2s
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byStatus_passed_PASS11_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testRegisterSuccess
</div>
<div>
<b>Signature:
</b>testRegisterSuccess(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister@277ddc70]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, confirmPasswd=<PASSWORD>, dataSource=3, expectRetCode=01130, mobile=18520820075, mobileModel=, mobileSysVersion=android,
nickName=tiantony, password=<PASSWORD>, smsVerifyKey=, sourceSystem=1, verifyCode=, verifyKey=}
</div>
<div>
<b>Start time:
</b>17:58:19
</div>
<div>
<b>End time:
</b>17:58:20
</div>
<div>
<b>Duration:
</b>2s
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byStatus_passed_PASS12_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byStatus_passed_PASS12_details')">testRegisterSuccess({account=tonytian, confirmPasswd=<PASSWORD>, dataSource=4, expectRetCode=01130, mobile=18520820075, mobileModel=,
mobileSysVersion=ios, nickName=tiantony, password=<PASSWORD>, smsVerifyKey=, sourceSystem=1, verifyCode=, verifyKey=})
</a>
</td>
<td nowrap="true">17:58:20</td>
<td nowrap="true" align="right">1s
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byStatus_passed_PASS12_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testRegisterSuccess
</div>
<div>
<b>Signature:
</b>testRegisterSuccess(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister@277ddc70]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, confirmPasswd=<PASSWORD>, dataSource=4, expectRetCode=01130, mobile=18520820075, mobileModel=, mobileSysVersion=ios,
nickName=tiantony, password=<PASSWORD>, smsVerifyKey=, sourceSystem=1, verifyCode=, verifyKey=}
</div>
<div>
<b>Start time:
</b>17:58:20
</div>
<div>
<b>End time:
</b>17:58:22
</div>
<div>
<b>Duration:
</b>1s
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRepeatRegisterData_byStatus_passed_PASS13_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRepeatRegisterData_byStatus_passed_PASS13_details')">testRepeatRegisterData(18520820075
,
tiantony
,
,
tian123
,
tonytian
,
,
1
,
1
,
tian123
,
,
,
,
01137)
</a>
</td>
<td nowrap="true">17:58:22</td>
<td nowrap="true" align="right">277 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRepeatRegisterData_byStatus_passed_PASS13_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testRepeatRegisterData
</div>
<div>
<b>Signature:
</b>testRepeatRegisterData(java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String,
int, int, java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister@277ddc70]
</div>
<div>
<b>Parameters:
</b>
18520820075
,
tiantony
,
,
tian123
,
tonytian
,
,
1
,
1
,
tian123
,
,
,
,
01137
</div>
<div>
<b>Start time:
</b>17:58:22
</div>
<div>
<b>End time:
</b>17:58:22
</div>
<div>
<b>Duration:
</b>277 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_afterSuite_byStatus_passed_PASS14_row"
class="testMethodStatusPASS testMethodStatusCONF">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_afterSuite_byStatus_passed_PASS14_details')">afterSuite()</a>
</td>
<td nowrap="true">17:58:28</td>
<td nowrap="true" align="right">2s
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_afterSuite_byStatus_passed_PASS14_details"
class="testMethodDetails">
<div>
<b>Name:
</b>afterSuite
</div>
<div>
<b>Signature:
</b>afterSuite()[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister@277ddc70]
</div>
<div>
<b>Start time:
</b>17:58:28
</div>
<div>
<b>End time:
</b>17:58:30
</div>
<div>
<b>Duration:
</b>2s
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.companyRegister.cases.TestCompanyRegister_testCompantRegisterSuccess_byStatus_passed_PASS15_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.companyRegister.cases.TestCompanyRegister_testCompantRegisterSuccess_byStatus_passed_PASS15_details')">testCompantRegisterSuccess(1
,
1
,
18520820075
,
CHINA
,
TIANYAQIANG
,
1875
,
a20fbc720181498645238ff01908be66
,
1b77b7111e2980e9001c5f459dc13a5a
,
01160
,
1)
</a>
</td>
<td nowrap="true">17:58:25</td>
<td nowrap="true" align="right">2s
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.companyRegister.cases.TestCompanyRegister_testCompantRegisterSuccess_byStatus_passed_PASS15_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testCompantRegisterSuccess
</div>
<div>
<b>Signature:
</b>testCompantRegisterSuccess(java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String, int,
java.lang.String, java.lang.String, java.lang.String, int)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.companyRegister.cases.TestCompanyRegister@7a82e4d6]
</div>
<div>
<b>Parameters:
</b>
1
,
1
,
18520820075
,
CHINA
,
TIANYAQIANG
,
1875
,
a20fbc720181498645238ff01908be66
,
1b77b7111e2980e9001c5f459dc13a5a
,
01160
,
1
</div>
<div>
<b>Start time:
</b>17:58:25
</div>
<div>
<b>End time:
</b>17:58:27
</div>
<div>
<b>Duration:
</b>2s
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.companyRegister.cases.TestCompanyRegister_afterMethod_byStatus_passed_PASS16_row"
class="testMethodStatusPASS testMethodStatusCONF">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.companyRegister.cases.TestCompanyRegister_afterMethod_byStatus_passed_PASS16_details')">afterMethod()</a>
</td>
<td nowrap="true">17:58:27</td>
<td nowrap="true" align="right">119 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.companyRegister.cases.TestCompanyRegister_afterMethod_byStatus_passed_PASS16_details"
class="testMethodDetails">
<div>
<b>Name:
</b>afterMethod
</div>
<div>
<b>Signature:
</b>afterMethod()[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.companyRegister.cases.TestCompanyRegister@7a82e4d6]
</div>
<div>
<b>Start time:
</b>17:58:27
</div>
<div>
<b>End time:
</b>17:58:27
</div>
<div>
<b>Duration:
</b>119 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testDataSource_byStatus_passed_PASS17_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testDataSource_byStatus_passed_PASS17_details')">testDataSource(tonytian
,
tian123
,
01213)
</a>
</td>
<td nowrap="true">17:58:23</td>
<td nowrap="true" align="right">47 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testDataSource_byStatus_passed_PASS17_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testDataSource
</div>
<div>
<b>Signature:
</b>testDataSource(java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
tonytian
,
tian123
,
01213
</div>
<div>
<b>Start time:
</b>17:58:23
</div>
<div>
<b>End time:
</b>17:58:23
</div>
<div>
<b>Duration:
</b>47 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS18_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS18_details')">testLoginFail({account=, dataSource=1, expectRetCode=01213, password=<PASSWORD>, uniqueCode=})</a>
</td>
<td nowrap="true">17:58:23</td>
<td nowrap="true" align="right">44 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS18_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testLoginFail
</div>
<div>
<b>Signature:
</b>testLoginFail(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
{account=, dataSource=1, expectRetCode=01213, password=<PASSWORD>, uniqueCode=}
</div>
<div>
<b>Start time:
</b>17:58:23
</div>
<div>
<b>End time:
</b>17:58:23
</div>
<div>
<b>Duration:
</b>44 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS19_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS19_details')">testLoginFail({account=tonytian, dataSource=1, expectRetCode=01213, password=, uniqueCode=})</a>
</td>
<td nowrap="true">17:58:23</td>
<td nowrap="true" align="right">48 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS19_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testLoginFail
</div>
<div>
<b>Signature:
</b>testLoginFail(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, dataSource=1, expectRetCode=01213, password=, uniqueCode=}
</div>
<div>
<b>Start time:
</b>17:58:23
</div>
<div>
<b>End time:
</b>17:58:24
</div>
<div>
<b>Duration:
</b>48 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS20_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS20_details')">testLoginFail({account=tonytian, dataSource=3, expectRetCode=01131, password=<PASSWORD>, uniqueCode=})</a>
</td>
<td nowrap="true">17:58:24</td>
<td nowrap="true" align="right">47 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS20_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testLoginFail
</div>
<div>
<b>Signature:
</b>testLoginFail(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, dataSource=3, expectRetCode=01131, password=<PASSWORD>, uniqueCode=}
</div>
<div>
<b>Start time:
</b>17:58:24
</div>
<div>
<b>End time:
</b>17:58:24
</div>
<div>
<b>Duration:
</b>47 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS21_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS21_details')">testLoginFail({account=tonytian, dataSource=4, expectRetCode=01131, password=<PASSWORD>, uniqueCode=})</a>
</td>
<td nowrap="true">17:58:24</td>
<td nowrap="true" align="right">48 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS21_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testLoginFail
</div>
<div>
<b>Signature:
</b>testLoginFail(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, dataSource=4, expectRetCode=01131, password=<PASSWORD>, uniqueCode=}
</div>
<div>
<b>Start time:
</b>17:58:24
</div>
<div>
<b>End time:
</b>17:58:24
</div>
<div>
<b>Duration:
</b>48 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS22_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS22_details')">testLoginFail({account=yihj454try, dataSource=1, expectRetCode=01211, password=<PASSWORD>, uniqueCode=})</a>
</td>
<td nowrap="true">17:58:24</td>
<td nowrap="true" align="right">51 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS22_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testLoginFail
</div>
<div>
<b>Signature:
</b>testLoginFail(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
{account=yihj454try, dataSource=1, expectRetCode=01211, password=<PASSWORD>, uniqueCode=}
</div>
<div>
<b>Start time:
</b>17:58:24
</div>
<div>
<b>End time:
</b>17:58:24
</div>
<div>
<b>Duration:
</b>51 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS23_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS23_details')">testLoginFail({account=yihj454try, dataSource=2, expectRetCode=01211, password=<PASSWORD>, uniqueCode=})</a>
</td>
<td nowrap="true">17:58:24</td>
<td nowrap="true" align="right">45 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS23_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testLoginFail
</div>
<div>
<b>Signature:
</b>testLoginFail(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
{account=yihj454try, dataSource=2, expectRetCode=01211, password=<PASSWORD>, uniqueCode=}
</div>
<div>
<b>Start time:
</b>17:58:24
</div>
<div>
<b>End time:
</b>17:58:24
</div>
<div>
<b>Duration:
</b>45 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS24_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS24_details')">testLoginFail({account=yihj454try, dataSource=3, expectRetCode=01211, password=<PASSWORD>, uniqueCode=android})</a>
</td>
<td nowrap="true">17:58:24</td>
<td nowrap="true" align="right">46 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS24_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testLoginFail
</div>
<div>
<b>Signature:
</b>testLoginFail(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
{account=yihj454try, dataSource=3, expectRetCode=01211, password=<PASSWORD>, uniqueCode=android}
</div>
<div>
<b>Start time:
</b>17:58:24
</div>
<div>
<b>End time:
</b>17:58:24
</div>
<div>
<b>Duration:
</b>46 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS25_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS25_details')">testLoginFail({account=yihj454try, dataSource=4, expectRetCode=01211, password=<PASSWORD>, uniqueCode=ios})</a>
</td>
<td nowrap="true">17:58:24</td>
<td nowrap="true" align="right">44 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS25_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testLoginFail
</div>
<div>
<b>Signature:
</b>testLoginFail(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
{account=yihj454try, dataSource=4, expectRetCode=01211, password=<PASSWORD>, uniqueCode=ios}
</div>
<div>
<b>Start time:
</b>17:58:24
</div>
<div>
<b>End time:
</b>17:58:24
</div>
<div>
<b>Duration:
</b>44 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS26_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS26_details')">testLoginFail({account=tonytian, dataSource=1, expectRetCode=01211, password=<PASSWORD>, uniqueCode=})</a>
</td>
<td nowrap="true">17:58:24</td>
<td nowrap="true" align="right">45 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS26_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testLoginFail
</div>
<div>
<b>Signature:
</b>testLoginFail(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, dataSource=1, expectRetCode=01211, password=<PASSWORD>, uniqueCode=}
</div>
<div>
<b>Start time:
</b>17:58:24
</div>
<div>
<b>End time:
</b>17:58:24
</div>
<div>
<b>Duration:
</b>45 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS27_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS27_details')">testLoginFail({account=tonytian, dataSource=2, expectRetCode=01211, password=<PASSWORD>, uniqueCode=})</a>
</td>
<td nowrap="true">17:58:24</td>
<td nowrap="true" align="right">43 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS27_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testLoginFail
</div>
<div>
<b>Signature:
</b>testLoginFail(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, dataSource=2, expectRetCode=01211, password=<PASSWORD>, uniqueCode=}
</div>
<div>
<b>Start time:
</b>17:58:24
</div>
<div>
<b>End time:
</b>17:58:24
</div>
<div>
<b>Duration:
</b>43 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS28_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS28_details')">testLoginFail({account=tonytian, dataSource=3, expectRetCode=01211, password=<PASSWORD>, uniqueCode=android})</a>
</td>
<td nowrap="true">17:58:24</td>
<td nowrap="true" align="right">44 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS28_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testLoginFail
</div>
<div>
<b>Signature:
</b>testLoginFail(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, dataSource=3, expectRetCode=01211, password=<PASSWORD>, uniqueCode=android}
</div>
<div>
<b>Start time:
</b>17:58:24
</div>
<div>
<b>End time:
</b>17:58:24
</div>
<div>
<b>Duration:
</b>44 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS29_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS29_details')">testLoginFail({account=tonytian, dataSource=4, expectRetCode=01211, password=<PASSWORD>, uniqueCode=ios})</a>
</td>
<td nowrap="true">17:58:24</td>
<td nowrap="true" align="right">46 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byStatus_passed_PASS29_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testLoginFail
</div>
<div>
<b>Signature:
</b>testLoginFail(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, dataSource=4, expectRetCode=01211, password=<PASSWORD>, uniqueCode=ios}
</div>
<div>
<b>Start time:
</b>17:58:24
</div>
<div>
<b>End time:
</b>17:58:24
</div>
<div>
<b>Duration:
</b>46 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginSuccess_byStatus_passed_PASS30_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginSuccess_byStatus_passed_PASS30_details')">testLoginSuccess({account=tonytian, dataSource=1, expectRetCode=01210, password=<PASSWORD>, uniqueCode=})</a>
</td>
<td nowrap="true">17:58:24</td>
<td nowrap="true" align="right">134 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginSuccess_byStatus_passed_PASS30_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testLoginSuccess
</div>
<div>
<b>Signature:
</b>testLoginSuccess(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, dataSource=1, expectRetCode=01210, password=<PASSWORD>, uniqueCode=}
</div>
<div>
<b>Start time:
</b>17:58:24
</div>
<div>
<b>End time:
</b>17:58:24
</div>
<div>
<b>Duration:
</b>134 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginSuccess_byStatus_passed_PASS31_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginSuccess_byStatus_passed_PASS31_details')">testLoginSuccess({account=tonytian, dataSource=2, expectRetCode=01210, password=<PASSWORD>, uniqueCode=})</a>
</td>
<td nowrap="true">17:58:24</td>
<td nowrap="true" align="right">95 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginSuccess_byStatus_passed_PASS31_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testLoginSuccess
</div>
<div>
<b>Signature:
</b>testLoginSuccess(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, dataSource=2, expectRetCode=01210, password=<PASSWORD>, uniqueCode=}
</div>
<div>
<b>Start time:
</b>17:58:24
</div>
<div>
<b>End time:
</b>17:58:24
</div>
<div>
<b>Duration:
</b>95 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginSuccess_byStatus_passed_PASS32_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginSuccess_byStatus_passed_PASS32_details')">testLoginSuccess({account=tonytian, dataSource=3, expectRetCode=01210, password=<PASSWORD>, uniqueCode=android })</a>
</td>
<td nowrap="true">17:58:24</td>
<td nowrap="true" align="right">109 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginSuccess_byStatus_passed_PASS32_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testLoginSuccess
</div>
<div>
<b>Signature:
</b>testLoginSuccess(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, dataSource=3, expectRetCode=01210, password=<PASSWORD>, uniqueCode=android }
</div>
<div>
<b>Start time:
</b>17:58:24
</div>
<div>
<b>End time:
</b>17:58:24
</div>
<div>
<b>Duration:
</b>109 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginSuccess_byStatus_passed_PASS33_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginSuccess_byStatus_passed_PASS33_details')">testLoginSuccess({account=tonytian, dataSource=4, expectRetCode=01210, password=<PASSWORD>, uniqueCode=ios})</a>
</td>
<td nowrap="true">17:58:24</td>
<td nowrap="true" align="right">115 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginSuccess_byStatus_passed_PASS33_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testLoginSuccess
</div>
<div>
<b>Signature:
</b>testLoginSuccess(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, dataSource=4, expectRetCode=01210, password=<PASSWORD>, uniqueCode=ios}
</div>
<div>
<b>Start time:
</b>17:58:24
</div>
<div>
<b>End time:
</b>17:58:25
</div>
<div>
<b>Duration:
</b>115 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_beforeTest_byStatus_passed_PASS34_row"
class="testMethodStatusPASS testMethodStatusCONF">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_beforeTest_byStatus_passed_PASS34_details')">beforeTest()</a>
</td>
<td nowrap="true">17:58:12</td>
<td nowrap="true" align="right">0 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_beforeTest_byStatus_passed_PASS34_details"
class="testMethodDetails">
<div>
<b>Name:
</b>beforeTest
</div>
<div>
<b>Signature:
</b>beforeTest()[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus@3a1adbf]
</div>
<div>
<b>Start time:
</b>17:58:12
</div>
<div>
<b>End time:
</b>17:58:12
</div>
<div>
<b>Duration:
</b>0 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_beforeTest_byStatus_passed_PASS35_row"
class="testMethodStatusPASS testMethodStatusCONF">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_beforeTest_byStatus_passed_PASS35_details')">beforeTest()</a>
</td>
<td nowrap="true">17:58:12</td>
<td nowrap="true" align="right">0 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_beforeTest_byStatus_passed_PASS35_details"
class="testMethodDetails">
<div>
<b>Name:
</b>beforeTest
</div>
<div>
<b>Signature:
</b>beforeTest()[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus@38135151]
</div>
<div>
<b>Start time:
</b>17:58:12
</div>
<div>
<b>End time:
</b>17:58:12
</div>
<div>
<b>Duration:
</b>0 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_beforeTest_byStatus_passed_PASS36_row"
class="testMethodStatusPASS testMethodStatusCONF">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_beforeTest_byStatus_passed_PASS36_details')">beforeTest()</a>
</td>
<td nowrap="true">17:58:12</td>
<td nowrap="true" align="right">1 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_beforeTest_byStatus_passed_PASS36_details"
class="testMethodDetails">
<div>
<b>Name:
</b>beforeTest
</div>
<div>
<b>Signature:
</b>beforeTest()[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getTokenList.cases.TestGetTokenList@691ef005]
</div>
<div>
<b>Start time:
</b>17:58:12
</div>
<div>
<b>End time:
</b>17:58:12
</div>
<div>
<b>Duration:
</b>1 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_beforeTest_byStatus_passed_PASS37_row"
class="testMethodStatusPASS testMethodStatusCONF">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_beforeTest_byStatus_passed_PASS37_details')">beforeTest()</a>
</td>
<td nowrap="true">17:58:12</td>
<td nowrap="true" align="right">0 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_beforeTest_byStatus_passed_PASS37_details"
class="testMethodDetails">
<div>
<b>Name:
</b>beforeTest
</div>
<div>
<b>Signature:
</b>beforeTest()[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Start time:
</b>17:58:12
</div>
<div>
<b>End time:
</b>17:58:12
</div>
<div>
<b>Duration:
</b>0 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_Inactivated_byStatus_passed_PASS38_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_Inactivated_byStatus_passed_PASS38_details')">Inactivated(94ab2ec1ac3c0d159ed8b3297a99b0
,
c933f9cb96273829d524dcacd91201
,
tonytian
,
0)
</a>
</td>
<td nowrap="true">17:58:22</td>
<td nowrap="true" align="right">45 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_Inactivated_byStatus_passed_PASS38_details"
class="testMethodDetails">
<div>
<b>Name:
</b>Inactivated
</div>
<div>
<b>Signature:
</b>Inactivated(java.lang.String, java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus@3a1adbf]
</div>
<div>
<b>Parameters:
</b>
94ab2ec1ac3c0d159ed8b3297a99b0
,
c933f9cb96273829d524dcacd91201
,
tonytian
,
0
</div>
<div>
<b>Start time:
</b>17:58:22
</div>
<div>
<b>End time:
</b>17:58:22
</div>
<div>
<b>Duration:
</b>45 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_Activated_byStatus_passed_PASS39_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_Activated_byStatus_passed_PASS39_details')">Activated(a20fbc720181498645238ff01908be66
,
1b77b7111e2980e9001c5f459dc13a5a
,
tonytian
,
01261)
</a>
</td>
<td nowrap="true">17:58:22</td>
<td nowrap="true" align="right">88 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_Activated_byStatus_passed_PASS39_details"
class="testMethodDetails">
<div>
<b>Name:
</b>Activated
</div>
<div>
<b>Signature:
</b>Activated(java.lang.String, java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus@3a1adbf]
</div>
<div>
<b>Parameters:
</b>
a20fbc720181498645238ff01908be66
,
1b77b7111e2980e9001c5f459dc13a5a
,
tonytian
,
01261
</div>
<div>
<b>Start time:
</b>17:58:22
</div>
<div>
<b>End time:
</b>17:58:22
</div>
<div>
<b>Duration:
</b>88 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byStatus_passed_PASS40_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byStatus_passed_PASS40_details')">testGetAccountStatusFail(a20fbc720181498645238ff01908be66
,
1b77b7111e2980e9001c5f459dc13a5a
,
rertrdgf8443
,
01262)
</a>
</td>
<td nowrap="true">17:58:22</td>
<td nowrap="true" align="right">93 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byStatus_passed_PASS40_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testGetAccountStatusFail
</div>
<div>
<b>Signature:
</b>testGetAccountStatusFail(java.lang.String, java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus@3a1adbf]
</div>
<div>
<b>Parameters:
</b>
a20fbc720181498645238ff01908be66
,
1b77b7111e2980e9001c5f459dc13a5a
,
rertrdgf8443
,
01262
</div>
<div>
<b>Start time:
</b>17:58:22
</div>
<div>
<b>End time:
</b>17:58:23
</div>
<div>
<b>Duration:
</b>93 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byStatus_passed_PASS41_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byStatus_passed_PASS41_details')">testGetAccountStatusFail(5467rthftghfgh
,
1b77b7111e2980e9001c5f459dc13a5a
,
tonytian
,
4)
</a>
</td>
<td nowrap="true">17:58:23</td>
<td nowrap="true" align="right">47 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byStatus_passed_PASS41_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testGetAccountStatusFail
</div>
<div>
<b>Signature:
</b>testGetAccountStatusFail(java.lang.String, java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus@3a1adbf]
</div>
<div>
<b>Parameters:
</b>
5467rthftghfgh
,
1b77b7111e2980e9001c5f459dc13a5a
,
tonytian
,
4
</div>
<div>
<b>Start time:
</b>17:58:23
</div>
<div>
<b>End time:
</b>17:58:23
</div>
<div>
<b>Duration:
</b>47 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byStatus_passed_PASS42_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byStatus_passed_PASS42_details')">testGetAccountStatusFail(a20fbc720181498645238ff01908be66
,
dsgdrgr54654654
,
tonytian
,
4)
</a>
</td>
<td nowrap="true">17:58:23</td>
<td nowrap="true" align="right">46 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byStatus_passed_PASS42_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testGetAccountStatusFail
</div>
<div>
<b>Signature:
</b>testGetAccountStatusFail(java.lang.String, java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus@3a1adbf]
</div>
<div>
<b>Parameters:
</b>
a20fbc720181498645238ff01908be66
,
dsgdrgr54654654
,
tonytian
,
4
</div>
<div>
<b>Start time:
</b>17:58:23
</div>
<div>
<b>End time:
</b>17:58:23
</div>
<div>
<b>Duration:
</b>46 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byStatus_passed_PASS43_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byStatus_passed_PASS43_details')">testGetAccountStatusFail(,
1b77b7111e2980e9001c5f459dc13a5a
,
tonytian
,
1)
</a>
</td>
<td nowrap="true">17:58:23</td>
<td nowrap="true" align="right">49 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byStatus_passed_PASS43_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testGetAccountStatusFail
</div>
<div>
<b>Signature:
</b>testGetAccountStatusFail(java.lang.String, java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus@3a1adbf]
</div>
<div>
<b>Parameters:
</b>
,
1b77b7111e2980e9001c5f459dc13a5a
,
tonytian
,
1
</div>
<div>
<b>Start time:
</b>17:58:23
</div>
<div>
<b>End time:
</b>17:58:23
</div>
<div>
<b>Duration:
</b>49 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byStatus_passed_PASS44_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byStatus_passed_PASS44_details')">testGetAccountStatusFail(a20fbc720181498645238ff01908be66
,
,
tonytian
,
1)
</a>
</td>
<td nowrap="true">17:58:23</td>
<td nowrap="true" align="right">51 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byStatus_passed_PASS44_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testGetAccountStatusFail
</div>
<div>
<b>Signature:
</b>testGetAccountStatusFail(java.lang.String, java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus@3a1adbf]
</div>
<div>
<b>Parameters:
</b>
a20fbc720181498645238ff01908be66
,
,
tonytian
,
1
</div>
<div>
<b>Start time:
</b>17:58:23
</div>
<div>
<b>End time:
</b>17:58:23
</div>
<div>
<b>Duration:
</b>51 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byStatus_passed_PASS45_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byStatus_passed_PASS45_details')">testGetAccountStatusFail(a20fbc720181498645238ff01908be66
,
1b77b7111e2980e9001c5f459dc13a5a
,
,
1)
</a>
</td>
<td nowrap="true">17:58:23</td>
<td nowrap="true" align="right">46 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byStatus_passed_PASS45_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testGetAccountStatusFail
</div>
<div>
<b>Signature:
</b>testGetAccountStatusFail(java.lang.String, java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus@3a1adbf]
</div>
<div>
<b>Parameters:
</b>
a20fbc720181498645238ff01908be66
,
1b77b7111e2980e9001c5f459dc13a5a
,
,
1
</div>
<div>
<b>Start time:
</b>17:58:23
</div>
<div>
<b>End time:
</b>17:58:23
</div>
<div>
<b>Duration:
</b>46 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_afterTest_byStatus_passed_PASS46_row"
class="testMethodStatusPASS testMethodStatusCONF">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_afterTest_byStatus_passed_PASS46_details')">afterTest()</a>
</td>
<td nowrap="true">17:58:28</td>
<td nowrap="true" align="right">2 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_afterTest_byStatus_passed_PASS46_details"
class="testMethodDetails">
<div>
<b>Name:
</b>afterTest
</div>
<div>
<b>Signature:
</b>afterTest()[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus@3a1adbf]
</div>
<div>
<b>Start time:
</b>17:58:28
</div>
<div>
<b>End time:
</b>17:58:28
</div>
<div>
<b>Duration:
</b>2 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_afterTest_byStatus_passed_PASS47_row"
class="testMethodStatusPASS testMethodStatusCONF">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_afterTest_byStatus_passed_PASS47_details')">afterTest()</a>
</td>
<td nowrap="true">17:58:28</td>
<td nowrap="true" align="right">1 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_afterTest_byStatus_passed_PASS47_details"
class="testMethodDetails">
<div>
<b>Name:
</b>afterTest
</div>
<div>
<b>Signature:
</b>afterTest()[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus@38135151]
</div>
<div>
<b>Start time:
</b>17:58:28
</div>
<div>
<b>End time:
</b>17:58:28
</div>
<div>
<b>Duration:
</b>1 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_afterTest_byStatus_passed_PASS48_row"
class="testMethodStatusPASS testMethodStatusCONF">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_afterTest_byStatus_passed_PASS48_details')">afterTest()</a>
</td>
<td nowrap="true">17:58:28</td>
<td nowrap="true" align="right">1 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_afterTest_byStatus_passed_PASS48_details"
class="testMethodDetails">
<div>
<b>Name:
</b>afterTest
</div>
<div>
<b>Signature:
</b>afterTest()[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getTokenList.cases.TestGetTokenList@691ef005]
</div>
<div>
<b>Start time:
</b>17:58:28
</div>
<div>
<b>End time:
</b>17:58:28
</div>
<div>
<b>Duration:
</b>1 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_afterTest_byStatus_passed_PASS49_row"
class="testMethodStatusPASS testMethodStatusCONF">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_afterTest_byStatus_passed_PASS49_details')">afterTest()</a>
</td>
<td nowrap="true">17:58:28</td>
<td nowrap="true" align="right">1 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_afterTest_byStatus_passed_PASS49_details"
class="testMethodDetails">
<div>
<b>Name:
</b>afterTest
</div>
<div>
<b>Signature:
</b>afterTest()[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Start time:
</b>17:58:28
</div>
<div>
<b>End time:
</b>17:58:28
</div>
<div>
<b>Duration:
</b>1 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus_testGetLoginStatusFail_byStatus_passed_PASS50_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus_testGetLoginStatusFail_byStatus_passed_PASS50_details')">testGetLoginStatusFail(sudfhuhf2343
,
1b77b7111e2980e9001c5f459dc13a5a
,
01221)
</a>
</td>
<td nowrap="true">17:58:27</td>
<td nowrap="true" align="right">44 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus_testGetLoginStatusFail_byStatus_passed_PASS50_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testGetLoginStatusFail
</div>
<div>
<b>Signature:
</b>testGetLoginStatusFail(java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus@38135151]
</div>
<div>
<b>Parameters:
</b>
sudfhuhf2343
,
1b77b7111e2980e9001c5f459dc13a5a
,
01221
</div>
<div>
<b>Start time:
</b>17:58:27
</div>
<div>
<b>End time:
</b>17:58:27
</div>
<div>
<b>Duration:
</b>44 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus_testGetLoginStatusFail_byStatus_passed_PASS51_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus_testGetLoginStatusFail_byStatus_passed_PASS51_details')">testGetLoginStatusFail(a20fbc720181498645238ff01908be66
,
34654gfdgdt
,
01221)
</a>
</td>
<td nowrap="true">17:58:27</td>
<td nowrap="true" align="right">53 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus_testGetLoginStatusFail_byStatus_passed_PASS51_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testGetLoginStatusFail
</div>
<div>
<b>Signature:
</b>testGetLoginStatusFail(java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus@38135151]
</div>
<div>
<b>Parameters:
</b>
a20fbc720181498645238ff01908be66
,
34654gfdgdt
,
01221
</div>
<div>
<b>Start time:
</b>17:58:27
</div>
<div>
<b>End time:
</b>17:58:27
</div>
<div>
<b>Duration:
</b>53 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus_testGetLoginStatusFail_byStatus_passed_PASS52_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus_testGetLoginStatusFail_byStatus_passed_PASS52_details')">testGetLoginStatusFail(,
1b77b7111e2980e9001c5f459dc13a5a
,
01223)
</a>
</td>
<td nowrap="true">17:58:27</td>
<td nowrap="true" align="right">44 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus_testGetLoginStatusFail_byStatus_passed_PASS52_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testGetLoginStatusFail
</div>
<div>
<b>Signature:
</b>testGetLoginStatusFail(java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus@38135151]
</div>
<div>
<b>Parameters:
</b>
,
1b77b7111e2980e9001c5f459dc13a5a
,
01223
</div>
<div>
<b>Start time:
</b>17:58:27
</div>
<div>
<b>End time:
</b>17:58:27
</div>
<div>
<b>Duration:
</b>44 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus_testGetLoginStatusFail_byStatus_passed_PASS53_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus_testGetLoginStatusFail_byStatus_passed_PASS53_details')">testGetLoginStatusFail(a20fbc720181498645238ff01908be66
,
,
01223)
</a>
</td>
<td nowrap="true">17:58:27</td>
<td nowrap="true" align="right">43 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus_testGetLoginStatusFail_byStatus_passed_PASS53_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testGetLoginStatusFail
</div>
<div>
<b>Signature:
</b>testGetLoginStatusFail(java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus@38135151]
</div>
<div>
<b>Parameters:
</b>
a20fbc720181498645238ff01908be66
,
,
01223
</div>
<div>
<b>Start time:
</b>17:58:27
</div>
<div>
<b>End time:
</b>17:58:27
</div>
<div>
<b>Duration:
</b>43 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus_testGetLoginStatusSuccess_byStatus_passed_PASS54_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus_testGetLoginStatusSuccess_byStatus_passed_PASS54_details')">testGetLoginStatusSuccess(a20fbc720181498645238ff01908be66
,
1b77b7111e2980e9001c5f459dc13a5a
,
01220)
</a>
</td>
<td nowrap="true">17:58:28</td>
<td nowrap="true" align="right">91 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus_testGetLoginStatusSuccess_byStatus_passed_PASS54_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testGetLoginStatusSuccess
</div>
<div>
<b>Signature:
</b>testGetLoginStatusSuccess(java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus@38135151]
</div>
<div>
<b>Parameters:
</b>
a20fbc720181498645238ff01908be66
,
1b77b7111e2980e9001c5f459dc13a5a
,
01220
</div>
<div>
<b>Start time:
</b>17:58:28
</div>
<div>
<b>End time:
</b>17:58:28
</div>
<div>
<b>Duration:
</b>91 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getTokenList.cases.TestGetTokenList_testGetTokenListFail_byStatus_passed_PASS55_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getTokenList.cases.TestGetTokenList_testGetTokenListFail_byStatus_passed_PASS55_details')">testGetTokenListFail(,
01223)
</a>
</td>
<td nowrap="true">17:58:28</td>
<td nowrap="true" align="right">54 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getTokenList.cases.TestGetTokenList_testGetTokenListFail_byStatus_passed_PASS55_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testGetTokenListFail
</div>
<div>
<b>Signature:
</b>testGetTokenListFail(java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getTokenList.cases.TestGetTokenList@691ef005]
</div>
<div>
<b>Parameters:
</b>
,
01223
</div>
<div>
<b>Start time:
</b>17:58:28
</div>
<div>
<b>End time:
</b>17:58:28
</div>
<div>
<b>Duration:
</b>54 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getTokenList.cases.TestGetTokenList_testGetTokenListFail_byStatus_passed_PASS56_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getTokenList.cases.TestGetTokenList_testGetTokenListFail_byStatus_passed_PASS56_details')">testGetTokenListFail(dfgh345
,
01232)
</a>
</td>
<td nowrap="true">17:58:28</td>
<td nowrap="true" align="right">42 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getTokenList.cases.TestGetTokenList_testGetTokenListFail_byStatus_passed_PASS56_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testGetTokenListFail
</div>
<div>
<b>Signature:
</b>testGetTokenListFail(java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getTokenList.cases.TestGetTokenList@691ef005]
</div>
<div>
<b>Parameters:
</b>
dfgh345
,
01232
</div>
<div>
<b>Start time:
</b>17:58:28
</div>
<div>
<b>End time:
</b>17:58:28
</div>
<div>
<b>Duration:
</b>42 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getTokenList.cases.TestGetTokenList_testGetTokenListSuccess_byStatus_passed_PASS57_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getTokenList.cases.TestGetTokenList_testGetTokenListSuccess_byStatus_passed_PASS57_details')">testGetTokenListSuccess(a20fbc720181498645238ff01908be66
,
01230)
</a>
</td>
<td nowrap="true">17:58:28</td>
<td nowrap="true" align="right">48 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getTokenList.cases.TestGetTokenList_testGetTokenListSuccess_byStatus_passed_PASS57_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testGetTokenListSuccess
</div>
<div>
<b>Signature:
</b>testGetTokenListSuccess(java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getTokenList.cases.TestGetTokenList@691ef005]
</div>
<div>
<b>Parameters:
</b>
a20fbc720181498645238ff01908be66
,
01230
</div>
<div>
<b>Start time:
</b>17:58:28
</div>
<div>
<b>End time:
</b>17:58:28
</div>
<div>
<b>Duration:
</b>48 ms
</div>
</div>
</td>
</tr>
<tr></tr>
</table>
</div>
<div id="testMethodsByClass" style="display: none;">
<table class="testMethodsTable" cellpadding="0" cellspacing="0">
<tr>
<td colspan="4">
<h3 style="display: inline;">cn.com.digiwin.justsharecloud.testCases.companyRegister.cases.TestCompanyRegister</h3>
</td>
</tr>
<tr class="methodsTableHeader">
<td width="100%">Name</td>
<td nowrap="true">Started</td>
<td nowrap="true">Duration</td>
<td>Exception</td>
</tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.companyRegister.cases.TestCompanyRegister_testCompantRegisterSuccess_byClass_PASS1_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.companyRegister.cases.TestCompanyRegister_testCompantRegisterSuccess_byClass_PASS1_details')">testCompantRegisterSuccess(1
,
1
,
18520820075
,
CHINA
,
TIANYAQIANG
,
1875
,
a20fbc720181498645238ff01908be66
,
1b77b7111e2980e9001c5f459dc13a5a
,
01160
,
1)
</a>
</td>
<td nowrap="true">17:58:25</td>
<td nowrap="true" align="right">2s
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.companyRegister.cases.TestCompanyRegister_testCompantRegisterSuccess_byClass_PASS1_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testCompantRegisterSuccess
</div>
<div>
<b>Signature:
</b>testCompantRegisterSuccess(java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String, int,
java.lang.String, java.lang.String, java.lang.String, int)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.companyRegister.cases.TestCompanyRegister@7a82e4d6]
</div>
<div>
<b>Parameters:
</b>
1
,
1
,
18520820075
,
CHINA
,
TIANYAQIANG
,
1875
,
a20fbc720181498645238ff01908be66
,
1b77b7111e2980e9001c5f459dc13a5a
,
01160
,
1
</div>
<div>
<b>Start time:
</b>17:58:25
</div>
<div>
<b>End time:
</b>17:58:27
</div>
<div>
<b>Duration:
</b>2s
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.companyRegister.cases.TestCompanyRegister_afterMethod_byClass_PASS2_row"
class="testMethodStatusPASS testMethodStatusCONF">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.companyRegister.cases.TestCompanyRegister_afterMethod_byClass_PASS2_details')">afterMethod()</a>
</td>
<td nowrap="true">17:58:27</td>
<td nowrap="true" align="right">119 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.companyRegister.cases.TestCompanyRegister_afterMethod_byClass_PASS2_details"
class="testMethodDetails">
<div>
<b>Name:
</b>afterMethod
</div>
<div>
<b>Signature:
</b>afterMethod()[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.companyRegister.cases.TestCompanyRegister@7a82e4d6]
</div>
<div>
<b>Start time:
</b>17:58:27
</div>
<div>
<b>End time:
</b>17:58:27
</div>
<div>
<b>Duration:
</b>119 ms
</div>
</div>
</td>
</tr>
<tr></tr>
</table>
<br></br>
<table class="testMethodsTable" cellpadding="0" cellspacing="0">
<tr>
<td colspan="4">
<h3 style="display: inline;">cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus</h3>
</td>
</tr>
<tr class="methodsTableHeader">
<td width="100%">Name</td>
<td nowrap="true">Started</td>
<td nowrap="true">Duration</td>
<td>Exception</td>
</tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_beforeTest_byClass_PASS1_row"
class="testMethodStatusPASS testMethodStatusCONF">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_beforeTest_byClass_PASS1_details')">beforeTest()</a>
</td>
<td nowrap="true">17:58:12</td>
<td nowrap="true" align="right">0 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_beforeTest_byClass_PASS1_details"
class="testMethodDetails">
<div>
<b>Name:
</b>beforeTest
</div>
<div>
<b>Signature:
</b>beforeTest()[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus@3a1adbf]
</div>
<div>
<b>Start time:
</b>17:58:12
</div>
<div>
<b>End time:
</b>17:58:12
</div>
<div>
<b>Duration:
</b>0 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_beforeTest_byClass_PASS2_row"
class="testMethodStatusPASS testMethodStatusCONF">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_beforeTest_byClass_PASS2_details')">beforeTest()</a>
</td>
<td nowrap="true">17:58:12</td>
<td nowrap="true" align="right">0 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_beforeTest_byClass_PASS2_details"
class="testMethodDetails">
<div>
<b>Name:
</b>beforeTest
</div>
<div>
<b>Signature:
</b>beforeTest()[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus@38135151]
</div>
<div>
<b>Start time:
</b>17:58:12
</div>
<div>
<b>End time:
</b>17:58:12
</div>
<div>
<b>Duration:
</b>0 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_beforeTest_byClass_PASS3_row"
class="testMethodStatusPASS testMethodStatusCONF">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_beforeTest_byClass_PASS3_details')">beforeTest()</a>
</td>
<td nowrap="true">17:58:12</td>
<td nowrap="true" align="right">1 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_beforeTest_byClass_PASS3_details"
class="testMethodDetails">
<div>
<b>Name:
</b>beforeTest
</div>
<div>
<b>Signature:
</b>beforeTest()[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getTokenList.cases.TestGetTokenList@691ef005]
</div>
<div>
<b>Start time:
</b>17:58:12
</div>
<div>
<b>End time:
</b>17:58:12
</div>
<div>
<b>Duration:
</b>1 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_beforeTest_byClass_PASS4_row"
class="testMethodStatusPASS testMethodStatusCONF">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_beforeTest_byClass_PASS4_details')">beforeTest()</a>
</td>
<td nowrap="true">17:58:12</td>
<td nowrap="true" align="right">0 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_beforeTest_byClass_PASS4_details"
class="testMethodDetails">
<div>
<b>Name:
</b>beforeTest
</div>
<div>
<b>Signature:
</b>beforeTest()[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Start time:
</b>17:58:12
</div>
<div>
<b>End time:
</b>17:58:12
</div>
<div>
<b>Duration:
</b>0 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_Inactivated_byClass_PASS5_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_Inactivated_byClass_PASS5_details')">Inactivated(94ab2ec1ac3c0d159ed8b3297a99b0
,
c933f9cb96273829d524dcacd91201
,
tonytian
,
0)
</a>
</td>
<td nowrap="true">17:58:22</td>
<td nowrap="true" align="right">45 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_Inactivated_byClass_PASS5_details"
class="testMethodDetails">
<div>
<b>Name:
</b>Inactivated
</div>
<div>
<b>Signature:
</b>Inactivated(java.lang.String, java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus@3a1adbf]
</div>
<div>
<b>Parameters:
</b>
94ab2ec1ac3c0d159ed8b3297a99b0
,
c933f9cb96273829d524dcacd91201
,
tonytian
,
0
</div>
<div>
<b>Start time:
</b>17:58:22
</div>
<div>
<b>End time:
</b>17:58:22
</div>
<div>
<b>Duration:
</b>45 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_Activated_byClass_PASS6_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_Activated_byClass_PASS6_details')">Activated(a20fbc720181498645238ff01908be66
,
1b77b7111e2980e9001c5f459dc13a5a
,
tonytian
,
01261)
</a>
</td>
<td nowrap="true">17:58:22</td>
<td nowrap="true" align="right">88 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_Activated_byClass_PASS6_details"
class="testMethodDetails">
<div>
<b>Name:
</b>Activated
</div>
<div>
<b>Signature:
</b>Activated(java.lang.String, java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus@3a1adbf]
</div>
<div>
<b>Parameters:
</b>
a20fbc720181498645238ff01908be66
,
1b77b7111e2980e9001c5f459dc13a5a
,
tonytian
,
01261
</div>
<div>
<b>Start time:
</b>17:58:22
</div>
<div>
<b>End time:
</b>17:58:22
</div>
<div>
<b>Duration:
</b>88 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byClass_PASS7_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byClass_PASS7_details')">testGetAccountStatusFail(a20fbc720181498645238ff01908be66
,
1b77b7111e2980e9001c5f459dc13a5a
,
rertrdgf8443
,
01262)
</a>
</td>
<td nowrap="true">17:58:22</td>
<td nowrap="true" align="right">93 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byClass_PASS7_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testGetAccountStatusFail
</div>
<div>
<b>Signature:
</b>testGetAccountStatusFail(java.lang.String, java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus@3a1adbf]
</div>
<div>
<b>Parameters:
</b>
a20fbc720181498645238ff01908be66
,
1b77b7111e2980e9001c5f459dc13a5a
,
rertrdgf8443
,
01262
</div>
<div>
<b>Start time:
</b>17:58:22
</div>
<div>
<b>End time:
</b>17:58:23
</div>
<div>
<b>Duration:
</b>93 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byClass_PASS8_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byClass_PASS8_details')">testGetAccountStatusFail(5467rthftghfgh
,
1b77b7111e2980e9001c5f459dc13a5a
,
tonytian
,
4)
</a>
</td>
<td nowrap="true">17:58:23</td>
<td nowrap="true" align="right">47 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byClass_PASS8_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testGetAccountStatusFail
</div>
<div>
<b>Signature:
</b>testGetAccountStatusFail(java.lang.String, java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus@3a1adbf]
</div>
<div>
<b>Parameters:
</b>
5467rthftghfgh
,
1b77b7111e2980e9001c5f459dc13a5a
,
tonytian
,
4
</div>
<div>
<b>Start time:
</b>17:58:23
</div>
<div>
<b>End time:
</b>17:58:23
</div>
<div>
<b>Duration:
</b>47 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byClass_PASS9_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byClass_PASS9_details')">testGetAccountStatusFail(a20fbc720181498645238ff01908be66
,
dsgdrgr54654654
,
tonytian
,
4)
</a>
</td>
<td nowrap="true">17:58:23</td>
<td nowrap="true" align="right">46 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byClass_PASS9_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testGetAccountStatusFail
</div>
<div>
<b>Signature:
</b>testGetAccountStatusFail(java.lang.String, java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus@3a1adbf]
</div>
<div>
<b>Parameters:
</b>
a20fbc720181498645238ff01908be66
,
dsgdrgr54654654
,
tonytian
,
4
</div>
<div>
<b>Start time:
</b>17:58:23
</div>
<div>
<b>End time:
</b>17:58:23
</div>
<div>
<b>Duration:
</b>46 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byClass_PASS10_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byClass_PASS10_details')">testGetAccountStatusFail(,
1b77b7111e2980e9001c5f459dc13a5a
,
tonytian
,
1)
</a>
</td>
<td nowrap="true">17:58:23</td>
<td nowrap="true" align="right">49 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byClass_PASS10_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testGetAccountStatusFail
</div>
<div>
<b>Signature:
</b>testGetAccountStatusFail(java.lang.String, java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus@3a1adbf]
</div>
<div>
<b>Parameters:
</b>
,
1b77b7111e2980e9001c5f459dc13a5a
,
tonytian
,
1
</div>
<div>
<b>Start time:
</b>17:58:23
</div>
<div>
<b>End time:
</b>17:58:23
</div>
<div>
<b>Duration:
</b>49 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byClass_PASS11_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byClass_PASS11_details')">testGetAccountStatusFail(a20fbc720181498645238ff01908be66
,
,
tonytian
,
1)
</a>
</td>
<td nowrap="true">17:58:23</td>
<td nowrap="true" align="right">51 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byClass_PASS11_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testGetAccountStatusFail
</div>
<div>
<b>Signature:
</b>testGetAccountStatusFail(java.lang.String, java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus@3a1adbf]
</div>
<div>
<b>Parameters:
</b>
a20fbc720181498645238ff01908be66
,
,
tonytian
,
1
</div>
<div>
<b>Start time:
</b>17:58:23
</div>
<div>
<b>End time:
</b>17:58:23
</div>
<div>
<b>Duration:
</b>51 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byClass_PASS12_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byClass_PASS12_details')">testGetAccountStatusFail(a20fbc720181498645238ff01908be66
,
1b77b7111e2980e9001c5f459dc13a5a
,
,
1)
</a>
</td>
<td nowrap="true">17:58:23</td>
<td nowrap="true" align="right">46 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_testGetAccountStatusFail_byClass_PASS12_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testGetAccountStatusFail
</div>
<div>
<b>Signature:
</b>testGetAccountStatusFail(java.lang.String, java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus@3a1adbf]
</div>
<div>
<b>Parameters:
</b>
a20fbc720181498645238ff01908be66
,
1b77b7111e2980e9001c5f459dc13a5a
,
,
1
</div>
<div>
<b>Start time:
</b>17:58:23
</div>
<div>
<b>End time:
</b>17:58:23
</div>
<div>
<b>Duration:
</b>46 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_afterTest_byClass_PASS13_row"
class="testMethodStatusPASS testMethodStatusCONF">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_afterTest_byClass_PASS13_details')">afterTest()</a>
</td>
<td nowrap="true">17:58:28</td>
<td nowrap="true" align="right">2 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_afterTest_byClass_PASS13_details"
class="testMethodDetails">
<div>
<b>Name:
</b>afterTest
</div>
<div>
<b>Signature:
</b>afterTest()[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus@3a1adbf]
</div>
<div>
<b>Start time:
</b>17:58:28
</div>
<div>
<b>End time:
</b>17:58:28
</div>
<div>
<b>Duration:
</b>2 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_afterTest_byClass_PASS14_row"
class="testMethodStatusPASS testMethodStatusCONF">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_afterTest_byClass_PASS14_details')">afterTest()</a>
</td>
<td nowrap="true">17:58:28</td>
<td nowrap="true" align="right">1 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_afterTest_byClass_PASS14_details"
class="testMethodDetails">
<div>
<b>Name:
</b>afterTest
</div>
<div>
<b>Signature:
</b>afterTest()[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus@38135151]
</div>
<div>
<b>Start time:
</b>17:58:28
</div>
<div>
<b>End time:
</b>17:58:28
</div>
<div>
<b>Duration:
</b>1 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_afterTest_byClass_PASS15_row"
class="testMethodStatusPASS testMethodStatusCONF">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_afterTest_byClass_PASS15_details')">afterTest()</a>
</td>
<td nowrap="true">17:58:28</td>
<td nowrap="true" align="right">1 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_afterTest_byClass_PASS15_details"
class="testMethodDetails">
<div>
<b>Name:
</b>afterTest
</div>
<div>
<b>Signature:
</b>afterTest()[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getTokenList.cases.TestGetTokenList@691ef005]
</div>
<div>
<b>Start time:
</b>17:58:28
</div>
<div>
<b>End time:
</b>17:58:28
</div>
<div>
<b>Duration:
</b>1 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_afterTest_byClass_PASS16_row"
class="testMethodStatusPASS testMethodStatusCONF">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_afterTest_byClass_PASS16_details')">afterTest()</a>
</td>
<td nowrap="true">17:58:28</td>
<td nowrap="true" align="right">1 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getAccountStatus.cases.TestGetAccountStatus_afterTest_byClass_PASS16_details"
class="testMethodDetails">
<div>
<b>Name:
</b>afterTest
</div>
<div>
<b>Signature:
</b>afterTest()[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Start time:
</b>17:58:28
</div>
<div>
<b>End time:
</b>17:58:28
</div>
<div>
<b>Duration:
</b>1 ms
</div>
</div>
</td>
</tr>
<tr></tr>
</table>
<br></br>
<table class="testMethodsTable" cellpadding="0" cellspacing="0">
<tr>
<td colspan="4">
<h3 style="display: inline;">cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus</h3>
</td>
</tr>
<tr class="methodsTableHeader">
<td width="100%">Name</td>
<td nowrap="true">Started</td>
<td nowrap="true">Duration</td>
<td>Exception</td>
</tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus_testGetLoginStatusFail_byClass_PASS1_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus_testGetLoginStatusFail_byClass_PASS1_details')">testGetLoginStatusFail(sudfhuhf2343
,
1b77b7111e2980e9001c5f459dc13a5a
,
01221)
</a>
</td>
<td nowrap="true">17:58:27</td>
<td nowrap="true" align="right">44 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus_testGetLoginStatusFail_byClass_PASS1_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testGetLoginStatusFail
</div>
<div>
<b>Signature:
</b>testGetLoginStatusFail(java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus@38135151]
</div>
<div>
<b>Parameters:
</b>
sudfhuhf2343
,
1b77b7111e2980e9001c5f459dc13a5a
,
01221
</div>
<div>
<b>Start time:
</b>17:58:27
</div>
<div>
<b>End time:
</b>17:58:27
</div>
<div>
<b>Duration:
</b>44 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus_testGetLoginStatusFail_byClass_PASS2_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus_testGetLoginStatusFail_byClass_PASS2_details')">testGetLoginStatusFail(a20fbc720181498645238ff01908be66
,
34654gfdgdt
,
01221)
</a>
</td>
<td nowrap="true">17:58:27</td>
<td nowrap="true" align="right">53 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus_testGetLoginStatusFail_byClass_PASS2_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testGetLoginStatusFail
</div>
<div>
<b>Signature:
</b>testGetLoginStatusFail(java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus@38135151]
</div>
<div>
<b>Parameters:
</b>
a20fbc720181498645238ff01908be66
,
34654gfdgdt
,
01221
</div>
<div>
<b>Start time:
</b>17:58:27
</div>
<div>
<b>End time:
</b>17:58:27
</div>
<div>
<b>Duration:
</b>53 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus_testGetLoginStatusFail_byClass_PASS3_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus_testGetLoginStatusFail_byClass_PASS3_details')">testGetLoginStatusFail(,
1b77b7111e2980e9001c5f459dc13a5a
,
01223)
</a>
</td>
<td nowrap="true">17:58:27</td>
<td nowrap="true" align="right">44 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus_testGetLoginStatusFail_byClass_PASS3_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testGetLoginStatusFail
</div>
<div>
<b>Signature:
</b>testGetLoginStatusFail(java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus@38135151]
</div>
<div>
<b>Parameters:
</b>
,
1b77b7111e2980e9001c5f459dc13a5a
,
01223
</div>
<div>
<b>Start time:
</b>17:58:27
</div>
<div>
<b>End time:
</b>17:58:27
</div>
<div>
<b>Duration:
</b>44 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus_testGetLoginStatusFail_byClass_PASS4_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus_testGetLoginStatusFail_byClass_PASS4_details')">testGetLoginStatusFail(a20fbc720181498645238ff01908be66
,
,
01223)
</a>
</td>
<td nowrap="true">17:58:27</td>
<td nowrap="true" align="right">43 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus_testGetLoginStatusFail_byClass_PASS4_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testGetLoginStatusFail
</div>
<div>
<b>Signature:
</b>testGetLoginStatusFail(java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus@38135151]
</div>
<div>
<b>Parameters:
</b>
a20fbc720181498645238ff01908be66
,
,
01223
</div>
<div>
<b>Start time:
</b>17:58:27
</div>
<div>
<b>End time:
</b>17:58:27
</div>
<div>
<b>Duration:
</b>43 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus_testGetLoginStatusSuccess_byClass_PASS5_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus_testGetLoginStatusSuccess_byClass_PASS5_details')">testGetLoginStatusSuccess(a20fbc720181498645238ff01908be66
,
1b77b7111e2980e9001c5f459dc13a5a
,
01220)
</a>
</td>
<td nowrap="true">17:58:28</td>
<td nowrap="true" align="right">91 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus_testGetLoginStatusSuccess_byClass_PASS5_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testGetLoginStatusSuccess
</div>
<div>
<b>Signature:
</b>testGetLoginStatusSuccess(java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases.TestGetLoginStatus@38135151]
</div>
<div>
<b>Parameters:
</b>
a20fbc720181498645238ff01908be66
,
1b77b7111e2980e9001c5f459dc13a5a
,
01220
</div>
<div>
<b>Start time:
</b>17:58:28
</div>
<div>
<b>End time:
</b>17:58:28
</div>
<div>
<b>Duration:
</b>91 ms
</div>
</div>
</td>
</tr>
<tr></tr>
</table>
<br></br>
<table class="testMethodsTable" cellpadding="0" cellspacing="0">
<tr>
<td colspan="4">
<h3 style="display: inline;">cn.com.digiwin.justsharecloud.testCases.getSmsVerifyCode.cases.TestGetSmsVerifyCode</h3>
</td>
</tr>
<tr class="methodsTableHeader">
<td width="100%">Name</td>
<td nowrap="true">Started</td>
<td nowrap="true">Duration</td>
<td>Exception</td>
</tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getSmsVerifyCode.cases.TestGetSmsVerifyCode_all_byClass_PASS1_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getSmsVerifyCode.cases.TestGetSmsVerifyCode_all_byClass_PASS1_details')">all(18520820075
,
01250)
</a>
</td>
<td nowrap="true">17:58:23</td>
<td nowrap="true" align="right">391 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getSmsVerifyCode.cases.TestGetSmsVerifyCode_all_byClass_PASS1_details"
class="testMethodDetails">
<div>
<b>Name:
</b>all
</div>
<div>
<b>Signature:
</b>all(java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getSmsVerifyCode.cases.TestGetSmsVerifyCode@7f6fa3f]
</div>
<div>
<b>Parameters:
</b>
18520820075
,
01250
</div>
<div>
<b>Start time:
</b>17:58:23
</div>
<div>
<b>End time:
</b>17:58:23
</div>
<div>
<b>Duration:
</b>391 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getSmsVerifyCode.cases.TestGetSmsVerifyCode_all_byClass_PASS2_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getSmsVerifyCode.cases.TestGetSmsVerifyCode_all_byClass_PASS2_details')">all(,
01251)
</a>
</td>
<td nowrap="true">17:58:23</td>
<td nowrap="true" align="right">43 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getSmsVerifyCode.cases.TestGetSmsVerifyCode_all_byClass_PASS2_details"
class="testMethodDetails">
<div>
<b>Name:
</b>all
</div>
<div>
<b>Signature:
</b>all(java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getSmsVerifyCode.cases.TestGetSmsVerifyCode@7f6fa3f]
</div>
<div>
<b>Parameters:
</b>
,
01251
</div>
<div>
<b>Start time:
</b>17:58:23
</div>
<div>
<b>End time:
</b>17:58:23
</div>
<div>
<b>Duration:
</b>43 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getSmsVerifyCode.cases.TestGetSmsVerifyCode_all_byClass_PASS3_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getSmsVerifyCode.cases.TestGetSmsVerifyCode_all_byClass_PASS3_details')">all(185
,
01254)
</a>
</td>
<td nowrap="true">17:58:23</td>
<td nowrap="true" align="right">144 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getSmsVerifyCode.cases.TestGetSmsVerifyCode_all_byClass_PASS3_details"
class="testMethodDetails">
<div>
<b>Name:
</b>all
</div>
<div>
<b>Signature:
</b>all(java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getSmsVerifyCode.cases.TestGetSmsVerifyCode@7f6fa3f]
</div>
<div>
<b>Parameters:
</b>
185
,
01254
</div>
<div>
<b>Start time:
</b>17:58:23
</div>
<div>
<b>End time:
</b>17:58:23
</div>
<div>
<b>Duration:
</b>144 ms
</div>
</div>
</td>
</tr>
<tr></tr>
</table>
<br></br>
<table class="testMethodsTable" cellpadding="0" cellspacing="0">
<tr>
<td colspan="4">
<h3 style="display: inline;">cn.com.digiwin.justsharecloud.testCases.getTokenList.cases.TestGetTokenList</h3>
</td>
</tr>
<tr class="methodsTableHeader">
<td width="100%">Name</td>
<td nowrap="true">Started</td>
<td nowrap="true">Duration</td>
<td>Exception</td>
</tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getTokenList.cases.TestGetTokenList_testGetTokenListFail_byClass_PASS1_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getTokenList.cases.TestGetTokenList_testGetTokenListFail_byClass_PASS1_details')">testGetTokenListFail(,
01223)
</a>
</td>
<td nowrap="true">17:58:28</td>
<td nowrap="true" align="right">54 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getTokenList.cases.TestGetTokenList_testGetTokenListFail_byClass_PASS1_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testGetTokenListFail
</div>
<div>
<b>Signature:
</b>testGetTokenListFail(java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getTokenList.cases.TestGetTokenList@691ef005]
</div>
<div>
<b>Parameters:
</b>
,
01223
</div>
<div>
<b>Start time:
</b>17:58:28
</div>
<div>
<b>End time:
</b>17:58:28
</div>
<div>
<b>Duration:
</b>54 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getTokenList.cases.TestGetTokenList_testGetTokenListFail_byClass_PASS2_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getTokenList.cases.TestGetTokenList_testGetTokenListFail_byClass_PASS2_details')">testGetTokenListFail(dfgh345
,
01232)
</a>
</td>
<td nowrap="true">17:58:28</td>
<td nowrap="true" align="right">42 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getTokenList.cases.TestGetTokenList_testGetTokenListFail_byClass_PASS2_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testGetTokenListFail
</div>
<div>
<b>Signature:
</b>testGetTokenListFail(java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getTokenList.cases.TestGetTokenList@691ef005]
</div>
<div>
<b>Parameters:
</b>
dfgh345
,
01232
</div>
<div>
<b>Start time:
</b>17:58:28
</div>
<div>
<b>End time:
</b>17:58:28
</div>
<div>
<b>Duration:
</b>42 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.getTokenList.cases.TestGetTokenList_testGetTokenListSuccess_byClass_PASS3_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.getTokenList.cases.TestGetTokenList_testGetTokenListSuccess_byClass_PASS3_details')">testGetTokenListSuccess(a20fbc720181498645238ff01908be66
,
01230)
</a>
</td>
<td nowrap="true">17:58:28</td>
<td nowrap="true" align="right">48 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.getTokenList.cases.TestGetTokenList_testGetTokenListSuccess_byClass_PASS3_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testGetTokenListSuccess
</div>
<div>
<b>Signature:
</b>testGetTokenListSuccess(java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.getTokenList.cases.TestGetTokenList@691ef005]
</div>
<div>
<b>Parameters:
</b>
a20fbc720181498645238ff01908be66
,
01230
</div>
<div>
<b>Start time:
</b>17:58:28
</div>
<div>
<b>End time:
</b>17:58:28
</div>
<div>
<b>Duration:
</b>48 ms
</div>
</div>
</td>
</tr>
<tr></tr>
</table>
<br></br>
<table class="testMethodsTable" cellpadding="0" cellspacing="0">
<tr>
<td colspan="4">
<h3 style="display: inline;">cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin</h3>
</td>
</tr>
<tr class="methodsTableHeader">
<td width="100%">Name</td>
<td nowrap="true">Started</td>
<td nowrap="true">Duration</td>
<td>Exception</td>
</tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testDataSource_byClass_PASS1_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testDataSource_byClass_PASS1_details')">testDataSource(tonytian
,
tian123
,
01213)
</a>
</td>
<td nowrap="true">17:58:23</td>
<td nowrap="true" align="right">47 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testDataSource_byClass_PASS1_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testDataSource
</div>
<div>
<b>Signature:
</b>testDataSource(java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
tonytian
,
tian123
,
01213
</div>
<div>
<b>Start time:
</b>17:58:23
</div>
<div>
<b>End time:
</b>17:58:23
</div>
<div>
<b>Duration:
</b>47 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS2_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS2_details')">testLoginFail({account=, dataSource=1, expectRetCode=01213, password=<PASSWORD>, uniqueCode=})</a>
</td>
<td nowrap="true">17:58:23</td>
<td nowrap="true" align="right">44 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS2_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testLoginFail
</div>
<div>
<b>Signature:
</b>testLoginFail(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
{account=, dataSource=1, expectRetCode=01213, password=<PASSWORD>, uniqueCode=}
</div>
<div>
<b>Start time:
</b>17:58:23
</div>
<div>
<b>End time:
</b>17:58:23
</div>
<div>
<b>Duration:
</b>44 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS3_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS3_details')">testLoginFail({account=tonytian, dataSource=1, expectRetCode=01213, password=, uniqueCode=})</a>
</td>
<td nowrap="true">17:58:23</td>
<td nowrap="true" align="right">48 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS3_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testLoginFail
</div>
<div>
<b>Signature:
</b>testLoginFail(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, dataSource=1, expectRetCode=01213, password=, uniqueCode=}
</div>
<div>
<b>Start time:
</b>17:58:23
</div>
<div>
<b>End time:
</b>17:58:24
</div>
<div>
<b>Duration:
</b>48 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS4_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS4_details')">testLoginFail({account=tonytian, dataSource=3, expectRetCode=01131, password=<PASSWORD>, uniqueCode=})</a>
</td>
<td nowrap="true">17:58:24</td>
<td nowrap="true" align="right">47 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS4_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testLoginFail
</div>
<div>
<b>Signature:
</b>testLoginFail(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, dataSource=3, expectRetCode=01131, password=<PASSWORD>, uniqueCode=}
</div>
<div>
<b>Start time:
</b>17:58:24
</div>
<div>
<b>End time:
</b>17:58:24
</div>
<div>
<b>Duration:
</b>47 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS5_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS5_details')">testLoginFail({account=tonytian, dataSource=4, expectRetCode=01131, password=<PASSWORD>, uniqueCode=})</a>
</td>
<td nowrap="true">17:58:24</td>
<td nowrap="true" align="right">48 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS5_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testLoginFail
</div>
<div>
<b>Signature:
</b>testLoginFail(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, dataSource=4, expectRetCode=01131, password=<PASSWORD>, uniqueCode=}
</div>
<div>
<b>Start time:
</b>17:58:24
</div>
<div>
<b>End time:
</b>17:58:24
</div>
<div>
<b>Duration:
</b>48 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS6_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS6_details')">testLoginFail({account=yihj454try, dataSource=1, expectRetCode=01211, password=<PASSWORD>, uniqueCode=})</a>
</td>
<td nowrap="true">17:58:24</td>
<td nowrap="true" align="right">51 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS6_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testLoginFail
</div>
<div>
<b>Signature:
</b>testLoginFail(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
{account=yihj454try, dataSource=1, expectRetCode=01211, password=<PASSWORD>, uniqueCode=}
</div>
<div>
<b>Start time:
</b>17:58:24
</div>
<div>
<b>End time:
</b>17:58:24
</div>
<div>
<b>Duration:
</b>51 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS7_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS7_details')">testLoginFail({account=yihj454try, dataSource=2, expectRetCode=01211, password=<PASSWORD>, uniqueCode=})</a>
</td>
<td nowrap="true">17:58:24</td>
<td nowrap="true" align="right">45 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS7_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testLoginFail
</div>
<div>
<b>Signature:
</b>testLoginFail(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
{account=yihj454try, dataSource=2, expectRetCode=01211, password=<PASSWORD>, uniqueCode=}
</div>
<div>
<b>Start time:
</b>17:58:24
</div>
<div>
<b>End time:
</b>17:58:24
</div>
<div>
<b>Duration:
</b>45 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS8_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS8_details')">testLoginFail({account=yihj454try, dataSource=3, expectRetCode=01211, password=<PASSWORD>, uniqueCode=android})</a>
</td>
<td nowrap="true">17:58:24</td>
<td nowrap="true" align="right">46 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS8_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testLoginFail
</div>
<div>
<b>Signature:
</b>testLoginFail(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
{account=yihj454try, dataSource=3, expectRetCode=01211, password=<PASSWORD>, uniqueCode=android}
</div>
<div>
<b>Start time:
</b>17:58:24
</div>
<div>
<b>End time:
</b>17:58:24
</div>
<div>
<b>Duration:
</b>46 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS9_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS9_details')">testLoginFail({account=yihj454try, dataSource=4, expectRetCode=01211, password=<PASSWORD>, uniqueCode=ios})</a>
</td>
<td nowrap="true">17:58:24</td>
<td nowrap="true" align="right">44 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS9_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testLoginFail
</div>
<div>
<b>Signature:
</b>testLoginFail(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
{account=yihj454try, dataSource=4, expectRetCode=01211, password=<PASSWORD>, uniqueCode=ios}
</div>
<div>
<b>Start time:
</b>17:58:24
</div>
<div>
<b>End time:
</b>17:58:24
</div>
<div>
<b>Duration:
</b>44 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS10_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS10_details')">testLoginFail({account=tonytian, dataSource=1, expectRetCode=01211, password=<PASSWORD>, uniqueCode=})</a>
</td>
<td nowrap="true">17:58:24</td>
<td nowrap="true" align="right">45 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS10_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testLoginFail
</div>
<div>
<b>Signature:
</b>testLoginFail(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, dataSource=1, expectRetCode=01211, password=<PASSWORD>, uniqueCode=}
</div>
<div>
<b>Start time:
</b>17:58:24
</div>
<div>
<b>End time:
</b>17:58:24
</div>
<div>
<b>Duration:
</b>45 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS11_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS11_details')">testLoginFail({account=tonytian, dataSource=2, expectRetCode=01211, password=<PASSWORD>, uniqueCode=})</a>
</td>
<td nowrap="true">17:58:24</td>
<td nowrap="true" align="right">43 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS11_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testLoginFail
</div>
<div>
<b>Signature:
</b>testLoginFail(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, dataSource=2, expectRetCode=01211, password=<PASSWORD>, uniqueCode=}
</div>
<div>
<b>Start time:
</b>17:58:24
</div>
<div>
<b>End time:
</b>17:58:24
</div>
<div>
<b>Duration:
</b>43 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS12_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS12_details')">testLoginFail({account=tonytian, dataSource=3, expectRetCode=01211, password=<PASSWORD>, uniqueCode=android})</a>
</td>
<td nowrap="true">17:58:24</td>
<td nowrap="true" align="right">44 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS12_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testLoginFail
</div>
<div>
<b>Signature:
</b>testLoginFail(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, dataSource=3, expectRetCode=01211, password=<PASSWORD>, uniqueCode=android}
</div>
<div>
<b>Start time:
</b>17:58:24
</div>
<div>
<b>End time:
</b>17:58:24
</div>
<div>
<b>Duration:
</b>44 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS13_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS13_details')">testLoginFail({account=tonytian, dataSource=4, expectRetCode=01211, password=<PASSWORD>, uniqueCode=ios})</a>
</td>
<td nowrap="true">17:58:24</td>
<td nowrap="true" align="right">46 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginFail_byClass_PASS13_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testLoginFail
</div>
<div>
<b>Signature:
</b>testLoginFail(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, dataSource=4, expectRetCode=01211, password=<PASSWORD>, uniqueCode=ios}
</div>
<div>
<b>Start time:
</b>17:58:24
</div>
<div>
<b>End time:
</b>17:58:24
</div>
<div>
<b>Duration:
</b>46 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginSuccess_byClass_PASS14_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginSuccess_byClass_PASS14_details')">testLoginSuccess({account=tonytian, dataSource=1, expectRetCode=01210, password=<PASSWORD>, uniqueCode=})</a>
</td>
<td nowrap="true">17:58:24</td>
<td nowrap="true" align="right">134 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginSuccess_byClass_PASS14_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testLoginSuccess
</div>
<div>
<b>Signature:
</b>testLoginSuccess(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, dataSource=1, expectRetCode=01210, password=<PASSWORD>, uniqueCode=}
</div>
<div>
<b>Start time:
</b>17:58:24
</div>
<div>
<b>End time:
</b>17:58:24
</div>
<div>
<b>Duration:
</b>134 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginSuccess_byClass_PASS15_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginSuccess_byClass_PASS15_details')">testLoginSuccess({account=tonytian, dataSource=2, expectRetCode=01210, password=<PASSWORD>, uniqueCode=})</a>
</td>
<td nowrap="true">17:58:24</td>
<td nowrap="true" align="right">95 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginSuccess_byClass_PASS15_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testLoginSuccess
</div>
<div>
<b>Signature:
</b>testLoginSuccess(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, dataSource=2, expectRetCode=01210, password=<PASSWORD>, uniqueCode=}
</div>
<div>
<b>Start time:
</b>17:58:24
</div>
<div>
<b>End time:
</b>17:58:24
</div>
<div>
<b>Duration:
</b>95 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginSuccess_byClass_PASS16_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginSuccess_byClass_PASS16_details')">testLoginSuccess({account=tonytian, dataSource=3, expectRetCode=01210, password=<PASSWORD>, uniqueCode=android })</a>
</td>
<td nowrap="true">17:58:24</td>
<td nowrap="true" align="right">109 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginSuccess_byClass_PASS16_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testLoginSuccess
</div>
<div>
<b>Signature:
</b>testLoginSuccess(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, dataSource=3, expectRetCode=01210, password=<PASSWORD>, uniqueCode=android }
</div>
<div>
<b>Start time:
</b>17:58:24
</div>
<div>
<b>End time:
</b>17:58:24
</div>
<div>
<b>Duration:
</b>109 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginSuccess_byClass_PASS17_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginSuccess_byClass_PASS17_details')">testLoginSuccess({account=tonytian, dataSource=4, expectRetCode=01210, password=<PASSWORD>, uniqueCode=ios})</a>
</td>
<td nowrap="true">17:58:24</td>
<td nowrap="true" align="right">115 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin_testLoginSuccess_byClass_PASS17_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testLoginSuccess
</div>
<div>
<b>Signature:
</b>testLoginSuccess(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.login.cases.TestLogin@6e200e2d]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, dataSource=4, expectRetCode=01210, password=<PASSWORD>, uniqueCode=ios}
</div>
<div>
<b>Start time:
</b>17:58:24
</div>
<div>
<b>End time:
</b>17:58:25
</div>
<div>
<b>Duration:
</b>115 ms
</div>
</div>
</td>
</tr>
<tr></tr>
</table>
<br></br>
<table class="testMethodsTable" cellpadding="0" cellspacing="0">
<tr>
<td colspan="4">
<h3 style="display: inline;">cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister</h3>
</td>
</tr>
<tr class="methodsTableHeader">
<td width="100%">Name</td>
<td nowrap="true">Started</td>
<td nowrap="true">Duration</td>
<td>Exception</td>
</tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_beforeSuite_byClass_PASS1_row"
class="testMethodStatusPASS testMethodStatusCONF">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_beforeSuite_byClass_PASS1_details')">beforeSuite()</a>
</td>
<td nowrap="true">17:58:11</td>
<td nowrap="true" align="right">9 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_beforeSuite_byClass_PASS1_details"
class="testMethodDetails">
<div>
<b>Name:
</b>beforeSuite
</div>
<div>
<b>Signature:
</b>beforeSuite()[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister@277ddc70]
</div>
<div>
<b>Start time:
</b>17:58:11
</div>
<div>
<b>End time:
</b>17:58:11
</div>
<div>
<b>Duration:
</b>9 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testDataSource_byClass_PASS2_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testDataSource_byClass_PASS2_details')">testDataSource(18520820075
,
tiantony
,
,
tian123
,
tonytian
,
,
1
,
tian123
,
,
,
,
01131)
</a>
</td>
<td nowrap="true">17:58:12</td>
<td nowrap="true" align="right">287 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testDataSource_byClass_PASS2_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testDataSource
</div>
<div>
<b>Signature:
</b>testDataSource(java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String,
int, java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister@277ddc70]
</div>
<div>
<b>Parameters:
</b>
18520820075
,
tiantony
,
,
tian123
,
tonytian
,
,
1
,
tian123
,
,
,
,
01131
</div>
<div>
<b>Start time:
</b>17:58:12
</div>
<div>
<b>End time:
</b>17:58:12
</div>
<div>
<b>Duration:
</b>287 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testSourceSystem_byClass_PASS3_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testSourceSystem_byClass_PASS3_details')">testSourceSystem(18520820075
,
tiantony
,
,
tian123
,
tonytian
,
,
1
,
tian123
,
,
,
,
01131)
</a>
</td>
<td nowrap="true">17:58:12</td>
<td nowrap="true" align="right">45 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testSourceSystem_byClass_PASS3_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testSourceSystem
</div>
<div>
<b>Signature:
</b>testSourceSystem(java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String,
int, java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister@277ddc70]
</div>
<div>
<b>Parameters:
</b>
18520820075
,
tiantony
,
,
tian123
,
tonytian
,
,
1
,
tian123
,
,
,
,
01131
</div>
<div>
<b>Start time:
</b>17:58:12
</div>
<div>
<b>End time:
</b>17:58:12
</div>
<div>
<b>Duration:
</b>45 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byClass_PASS4_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byClass_PASS4_details')">testRegisterSuccess({account=tonytian, confirmPasswd=<PASSWORD>, dataSource=1, expectRetCode=01130, mobile=18520820075, mobileModel=,
mobileSysVersion=, nickName=tiantony, password=<PASSWORD>, smsVerifyKey=, sourceSystem=1, verifyCode=, verifyKey=})
</a>
</td>
<td nowrap="true">17:58:12</td>
<td nowrap="true" align="right">3s
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byClass_PASS4_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testRegisterSuccess
</div>
<div>
<b>Signature:
</b>testRegisterSuccess(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister@277ddc70]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, confirmPasswd=<PASSWORD>, dataSource=1, expectRetCode=01130, mobile=18520820075, mobileModel=, mobileSysVersion=,
nickName=tiantony, password=<PASSWORD>, smsVerifyKey=, sourceSystem=1, verifyCode=, verifyKey=}
</div>
<div>
<b>Start time:
</b>17:58:12
</div>
<div>
<b>End time:
</b>17:58:15
</div>
<div>
<b>Duration:
</b>3s
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byClass_PASS5_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byClass_PASS5_details')">testRegisterSuccess({account=tonytian, confirmPasswd=<PASSWORD>, dataSource=1, expectRetCode=01130, mobile=18520820075, mobileModel=,
mobileSysVersion=, nickName=tiantony, password=<PASSWORD>, smsVerifyKey=, sourceSystem=2, verifyCode=, verifyKey=})
</a>
</td>
<td nowrap="true">17:58:15</td>
<td nowrap="true" align="right">1s
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byClass_PASS5_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testRegisterSuccess
</div>
<div>
<b>Signature:
</b>testRegisterSuccess(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister@277ddc70]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, confirmPasswd=<PASSWORD>, dataSource=1, expectRetCode=01130, mobile=18520820075, mobileModel=, mobileSysVersion=,
nickName=tiantony, password=<PASSWORD>, smsVerifyKey=, sourceSystem=2, verifyCode=, verifyKey=}
</div>
<div>
<b>Start time:
</b>17:58:15
</div>
<div>
<b>End time:
</b>17:58:16
</div>
<div>
<b>Duration:
</b>1s
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byClass_PASS6_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byClass_PASS6_details')">testRegisterSuccess({account=tonytian, confirmPasswd=<PASSWORD>23, dataSource=1, expectRetCode=01130, mobile=18520820075, mobileModel=,
mobileSysVersion=, nickName=tiantony, password=<PASSWORD>, smsVerifyKey=, sourceSystem=3, verifyCode=, verifyKey=})
</a>
</td>
<td nowrap="true">17:58:16</td>
<td nowrap="true" align="right">1s
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byClass_PASS6_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testRegisterSuccess
</div>
<div>
<b>Signature:
</b>testRegisterSuccess(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister@277ddc70]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, confirmPasswd=<PASSWORD>, dataSource=1, expectRetCode=01130, mobile=18520820075, mobileModel=, mobileSysVersion=,
nickName=tiantony, password=<PASSWORD>, smsVerifyKey=, sourceSystem=3, verifyCode=, verifyKey=}
</div>
<div>
<b>Start time:
</b>17:58:16
</div>
<div>
<b>End time:
</b>17:58:17
</div>
<div>
<b>Duration:
</b>1s
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byClass_PASS7_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byClass_PASS7_details')">testRegisterSuccess({account=tonytian, confirmPasswd=<PASSWORD>, dataSource=2, expectRetCode=01130, mobile=18520820075, mobileModel=,
mobileSysVersion=, nickName=tiantony, password=<PASSWORD>, smsVerifyKey=, sourceSystem=1, verifyCode=, verifyKey=})
</a>
</td>
<td nowrap="true">17:58:17</td>
<td nowrap="true" align="right">1s
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byClass_PASS7_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testRegisterSuccess
</div>
<div>
<b>Signature:
</b>testRegisterSuccess(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister@277ddc70]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, confirmPasswd=<PASSWORD>, dataSource=2, expectRetCode=01130, mobile=18520820075, mobileModel=, mobileSysVersion=,
nickName=tiantony, password=<PASSWORD>, smsVerifyKey=, sourceSystem=1, verifyCode=, verifyKey=}
</div>
<div>
<b>Start time:
</b>17:58:17
</div>
<div>
<b>End time:
</b>17:58:19
</div>
<div>
<b>Duration:
</b>1s
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byClass_PASS8_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byClass_PASS8_details')">testRegisterSuccess({account=tonytian, confirmPasswd=<PASSWORD>, dataSource=3, expectRetCode=01130, mobile=18520820075, mobileModel=,
mobileSysVersion=android, nickName=tiantony, password=<PASSWORD>, smsVerifyKey=, sourceSystem=1, verifyCode=, verifyKey=})
</a>
</td>
<td nowrap="true">17:58:19</td>
<td nowrap="true" align="right">2s
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byClass_PASS8_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testRegisterSuccess
</div>
<div>
<b>Signature:
</b>testRegisterSuccess(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister@277ddc70]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, confirmPasswd=<PASSWORD>, dataSource=3, expectRetCode=01130, mobile=18520820075, mobileModel=, mobileSysVersion=android,
nickName=tiantony, password=<PASSWORD>, smsVerifyKey=, sourceSystem=1, verifyCode=, verifyKey=}
</div>
<div>
<b>Start time:
</b>17:58:19
</div>
<div>
<b>End time:
</b>17:58:20
</div>
<div>
<b>Duration:
</b>2s
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byClass_PASS9_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byClass_PASS9_details')">testRegisterSuccess({account=tonytian, confirmPasswd=<PASSWORD>, dataSource=4, expectRetCode=01130, mobile=18520820075, mobileModel=,
mobileSysVersion=ios, nickName=tiantony, password=<PASSWORD>, smsVerifyKey=, sourceSystem=1, verifyCode=, verifyKey=})
</a>
</td>
<td nowrap="true">17:58:20</td>
<td nowrap="true" align="right">1s
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRegisterSuccess_byClass_PASS9_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testRegisterSuccess
</div>
<div>
<b>Signature:
</b>testRegisterSuccess(java.util.Map)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister@277ddc70]
</div>
<div>
<b>Parameters:
</b>
{account=tonytian, confirmPasswd=<PASSWORD>, dataSource=4, expectRetCode=01130, mobile=18520820075, mobileModel=, mobileSysVersion=ios,
nickName=tiantony, password=<PASSWORD>, smsVerifyKey=, sourceSystem=1, verifyCode=, verifyKey=}
</div>
<div>
<b>Start time:
</b>17:58:20
</div>
<div>
<b>End time:
</b>17:58:22
</div>
<div>
<b>Duration:
</b>1s
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRepeatRegisterData_byClass_PASS10_row"
class="testMethodStatusPASS">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRepeatRegisterData_byClass_PASS10_details')">testRepeatRegisterData(18520820075
,
tiantony
,
,
tian123
,
tonytian
,
,
1
,
1
,
tian123
,
,
,
,
01137)
</a>
</td>
<td nowrap="true">17:58:22</td>
<td nowrap="true" align="right">277 ms
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_testRepeatRegisterData_byClass_PASS10_details"
class="testMethodDetails">
<div>
<b>Name:
</b>testRepeatRegisterData
</div>
<div>
<b>Signature:
</b>testRepeatRegisterData(java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String,
int, int, java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String)[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister@277ddc70]
</div>
<div>
<b>Parameters:
</b>
18520820075
,
tiantony
,
,
tian123
,
tonytian
,
,
1
,
1
,
tian123
,
,
,
,
01137
</div>
<div>
<b>Start time:
</b>17:58:22
</div>
<div>
<b>End time:
</b>17:58:22
</div>
<div>
<b>Duration:
</b>277 ms
</div>
</div>
</td>
</tr>
<tr></tr>
<tr id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_afterSuite_byClass_PASS11_row"
class="testMethodStatusPASS testMethodStatusCONF">
<td width="100%" class="firstMethodCell">
<a onclick="toggleDetailsVisibility('cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_afterSuite_byClass_PASS11_details')">afterSuite()</a>
</td>
<td nowrap="true">17:58:28</td>
<td nowrap="true" align="right">2s
</td>
<td nowrap="true">
</td>
</tr>
<tr>
<td colspan="4"
style="padding: 0; background-color: white; border-style: none; height: 0px;">
<div id="cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister_afterSuite_byClass_PASS11_details"
class="testMethodDetails">
<div>
<b>Name:
</b>afterSuite
</div>
<div>
<b>Signature:
</b>afterSuite()[pri:0, instance:cn.com.digiwin.justsharecloud.testCases.register.cases.TestRegister@277ddc70]
</div>
<div>
<b>Start time:
</b>17:58:28
</div>
<div>
<b>End time:
</b>17:58:30
</div>
<div>
<b>Duration:
</b>2s
</div>
</div>
</td>
</tr>
<tr></tr>
</table>
<br></br>
</div>
<div style="margin-top: 15px; color: gray; text-align: center; font-size: 9px;">
Generated with
<a href="http://code.google.com/p/testng-xslt/" style="color: #8888aa;"
target="_blank">
TestNG XSLT
</a>
</div>
</body>
</html><file_sep>/test-output/old/test token/test token.properties
[SuiteResult context=test token]<file_sep>/test-output/old/xiaozhiyun.usersystem.autotest/usersystem.autotest.properties
[SuiteResult context=usersystem.autotest]<file_sep>/src/cn/com/digiwin/justsharecloud/testCases/getLoginStatus/cases/TestGetLoginStatus.java
package cn.com.digiwin.justsharecloud.testCases.getLoginStatus.cases;
import org.testng.annotations.Test;
import cn.com.digiwin.justsharecloud.commonfunctions.HttpRequestMethod;
import cn.com.digiwin.justsharecloud.constants.Constants;
import cn.com.digiwin.justsharecloud.dataProvider.GetLoginStatusDataProvider;
import cn.com.digiwin.justsharecloud.testCases.getLoginStatus.bean.JsonParseBean;
import cn.com.digiwin.justsharecloud.testCases.getLoginStatus.requestParams.RequestParams;
import org.testng.annotations.BeforeTest;
import org.apache.log4j.Logger;
import org.testng.Assert;
import org.testng.annotations.AfterTest;
public class TestGetLoginStatus{
private static Logger logger = Logger.getLogger(TestGetLoginStatus.class);
private static String url = Constants.LOCAL_TEST_HOST + Constants.GETLOGINSTATUS_LOCATION;
/**
* @date 2016年6月24日下午12:00:01
* @param account
* @param password
* @param dataSource
* @param expectedRetCode
* @return_type void
* @description 成功的测试用例 ,先调用Login接口获取token和secretNumber , 再调用getLoginStatus
*/
@Test(dataProvider="testGetLoginStatusSuccess" , dataProviderClass=GetLoginStatusDataProvider.class)
public void testGetLoginStatusSuccess(String token , String secretNumber ,String expectedRetCode) {
String requestParams = RequestParams.requestParams(token , secretNumber);
logger.info(requestParams);
String response = HttpRequestMethod.sendPost(url, requestParams);
logger.info(response);
//断言响应retCode和retMsg不为空
JsonParseBean jpb = JsonParseBean.httpResponseParse(response);
Assert.assertNotNull(jpb.getRetMsg());
Assert.assertNotNull(jpb.getRetCode());
//断言响应retCode为01210
Assert.assertEquals(jpb.getRetCode(), expectedRetCode);
}
/**
* @date 2016年6月24日下午12:02:19
* @param token
* @param expectedRetCode
* @return_type void
* @description 失败的测试用例 , token入参传入空字符串
*/
@Test(dataProvider="testGetLoginStatusFail" , dataProviderClass=GetLoginStatusDataProvider.class)
public void testGetLoginStatusFail (String token ,String secretNumber, String expectedRetCode){
String requestParams = RequestParams.requestParams(token , secretNumber);
logger.info(requestParams);
String response = HttpRequestMethod.sendPost(url, requestParams);
logger.info(response);
//断言响应retCode和retMsg不为空
JsonParseBean jpb = JsonParseBean.httpResponseParse(response);
Assert.assertNotNull(jpb.getRetMsg());
Assert.assertNotNull(jpb.getRetCode());
//断言响应retCode为01233
Assert.assertEquals(jpb.getRetCode(), expectedRetCode);
}
@BeforeTest
public void beforeTest() {
}
@AfterTest
public void afterTest() {
}
}
<file_sep>/src/cn/com/digiwin/justsharecloud/testCases/resendPassword/requestParams/RequestParams.java
package cn.com.digiwin.justsharecloud.testCases.resendPassword.requestParams;
import net.sf.json.JSONObject;
public class RequestParams {
public static String requestParams(Object account, Object mobile, Object token, Object secretNumber,
Object emplName, Object companyName) {
// 封装JSON请求参数
JSONObject params = new JSONObject();
params.put("account", account);
params.put("mobile", mobile);
params.put("token", token);
params.put("secretNumber", secretNumber);
params.put("emplName", emplName);
params.put("companyName", companyName);
// 将JSON请求参数转为String
String requestParams = params.toString();
return requestParams;
}
}
<file_sep>/src/cn/com/digiwin/justsharecloud/dataProvider/GetSmsVerifyCodeDataProvider.java
package cn.com.digiwin.justsharecloud.dataProvider;
import org.testng.annotations.DataProvider;
public class GetSmsVerifyCodeDataProvider{
/**
* @date 2016年6月24日上午11:20:14
* @return
* @return_type Object[][]
* @description
*/
@DataProvider(name = "all" )
public static Object[][] All(){
return new Object[][]{
{"18520820075" ,"01250"},
{"" ,"01251"},
{"185" ,"01254"}
};
}
}
<file_sep>/src/cn/com/digiwin/justsharecloud/testCases/getAccountStatus/requestParams/RequestParams.java
package cn.com.digiwin.justsharecloud.testCases.getAccountStatus.requestParams;
import net.sf.json.JSONObject;
public class RequestParams {
public static String requestParams(String token , String secretNumber , String account) {
// 封装JSON请求参数
JSONObject params = new JSONObject();
params.put("token", token);
params.put("secretNumber", secretNumber);
params.put("account", account);
// 将JSON请求参数转为String
String requestParams = params.toString();
return requestParams;
}
}
<file_sep>/src/cn/com/digiwin/justsharecloud/testCases/getSmsVerifyCode/cases/TestGetSmsVerifyCode.java
package cn.com.digiwin.justsharecloud.testCases.getSmsVerifyCode.cases;
import org.testng.annotations.Test;
import cn.com.digiwin.justsharecloud.commonfunctions.HttpRequestMethod;
import cn.com.digiwin.justsharecloud.constants.Constants;
import cn.com.digiwin.justsharecloud.dataProvider.GetSmsVerifyCodeDataProvider;
import cn.com.digiwin.justsharecloud.testCases.getSmsVerifyCode.bean.JsonParseBean;
import cn.com.digiwin.justsharecloud.testCases.getSmsVerifyCode.requestParams.RequestParams;
import org.apache.log4j.Logger;
import org.testng.Assert;
/**
* @date 2016年6月30日下午3:26:31
* @author 田亚强
* @description
*/
public class TestGetSmsVerifyCode {
private static Logger logger = Logger.getLogger(TestGetSmsVerifyCode.class);
private static String url = Constants.LOCAL_TEST_HOST + Constants.GETSMSVERIFYCODE_LOCATION;
/**
* @date 2016年7月1日上午11:51:48
* @param mobile
* @param expectedRetCode
* @return_type void
* @description 单用户接收短信
*/
@Test(dataProvider = "all", dataProviderClass = GetSmsVerifyCodeDataProvider.class , priority=1)
public void all(String mobile ,String expectedRetCode) {
String requestParams = RequestParams.requestParams(mobile);
logger.info(requestParams);
String response = HttpRequestMethod.sendPost(url, requestParams);
logger.info(response);
// 断言响应retCode和retMsg不为空
JsonParseBean jpb = JsonParseBean.httpResponseParse(response);
Assert.assertNotNull(jpb.getRetCode());
Assert.assertNotNull(jpb.getRetMsg());
// 断言响应retCode为01210
Assert.assertEquals(jpb.getRetCode(), expectedRetCode);
}
}
<file_sep>/test-output/old/xiaozhiyun.usersystem.autotest/xiaozhiyun.usersystem.autotest.properties
[SuiteResult context=xiaozhiyun.usersystem.autotest]<file_sep>/test-output/old/xiaozhiyun.usersystem.interface_autotest/usersystem.interface_autotest.properties
[SuiteResult context=usersystem.interface_autotest]<file_sep>/src/cn/com/digiwin/justsharecloud/dataProvider/RegisterDataProvider.java
package cn.com.digiwin.justsharecloud.dataProvider;
import org.testng.annotations.DataProvider;
public class RegisterDataProvider {
/**
* @return_type Iterator<Object[]>
* @description 注册成功的测试数据
*/
@DataProvider(name = "testRegisterSuccess")
public static Object[][] data() {
return new Object[][]{
{"18520820075" , "tiantony" , "" , "tian123" , "tonytian" , "" , 1 , 1 , "tian123" , "" , "" , "" , "01130"},
{"18520820075" , "tiantony" , "" , "tian123" , "tonytian" , "" , 2 , 1 , "tian123" , "" , "" , "" , "01130"},
{"18520820075" , "tiantony" , "" , "tian123" , "tonytian" , "" , 3 , 1 , "tian123" , "" , "" , "" , "01130"},
{"18520820075" , "tiantony" , "" , "tian123" , "tonytian" , "" , 1 , 2 , "tian123" , "" , "" , "" , "01130"},
{"18520820075" , "tiantony" , "" , "tian123" , "tonytian" , "" , 1 , 3 , "tian123" , "" , "android" , "" , "01130"},
{"18520820075" , "tiantony" , "" , "tian123" , "tonytian" , "" , 1 , 4 , "tian123" , "" , "ios" , "" , "01130"}
};
}
/**
* @return_type Iterator<Object[]>
* @description 注册失败的测试用例 , 各入参的必填性校验,包括密码的2次确认 ,
* 其中未测试dataSource和sourceSystem的必填性
*/
@DataProvider(name = "testRegisterFail")
public static Object[][] failData() {
return new Object[][]{
{"" , "tiantony" , "" , "tian123" , "tonytian" , "" , 1 , 1 , "tian123" , "" , "" , "" , "01131"},
{"18520820075" , "" , "" , "tian123" , "tonytian" , "" , 1 , 1 , "tian123" , "" , "" , "" , "01131"},
{"18520820075" , "tiantony" , "" , "" , "tonytian" , "" , 1 , 1 , "tian123" , "" , "" , "" , "01131"},
{"18520820075" , "tiantony" , "" , "tian123" , "" , "" , 1 , 1 , "tian123" , "" , "" , "" , "01131"},
{"18520820075" , "tiantony" , "" , "tian123" , "tonytian" , "" , 1 , 1 , "" , "" , "" , "" , "01131"},
{"18520820075" , "tiantony" , "" , "tian123" , "tonytian" , "" , 1 , 1 , "tian321" , "" , "" , "" , "01134"}
};
}
/**
* @return_type Object[][]
* @description TODO 补充注册失败的测试用例 , 测试dataSource和sourceSystem的必填性
*/
@DataProvider(name = "sourceSystemData")
public static Object[][] sourceSystemData() {
return new Object[][] {
{ "18520820075", "tiantony", "", "tian123", "tonytian", "", 1, "tian123", "", "", "", "01131" } };
}
@DataProvider(name = "dataSourceData")
public static Object[][] dataSourceData() {
return new Object[][] {
{ "18520820075", "tiantony", "", "tian123", "tonytian", "", 1, "tian123", "", "", "", "01131" } };
}
/**
* @return_type Object[][]
* @description 测试重复注册同一个account
*/
@DataProvider(name = "repeatRegisterData")
public static Object[][] repeatRegisterData() {
return new Object[][] {
{ "18520820075", "tiantony", "", "tian123", "tonytian", "", 1, 1, "tian123", "", "", "", "01137" } };
}
}
<file_sep>/src/cn/com/digiwin/justsharecloud/testCases/companyRegister/requestParams/RequestParams.java
package cn.com.digiwin.justsharecloud.testCases.companyRegister.requestParams;
import com.alibaba.fastjson.JSONArray;
import net.sf.json.JSONObject;
public class RequestParams {
/*
* public static String requestParams(Map<String, String> data) { JSONObject
* jsparams = new JSONObject(); int a = -1; for (Map.Entry<String, String>
* aaa : data.entrySet()) { if(aaa.getKey().equals("dataSource")) { if(null
* != aaa.getValue() && !"".equals(aaa.getValue())) { a =
* ParamsParseInt.paramsParseInt(aaa.getValue()); jsparams.put(aaa.getKey(),
* a); continue; } } jsparams.put(aaa.getKey(), aaa.getValue()); } return
* jsparams.toString(); }
*/
/*
* public static String requestParams(Map<String, String> data){ JSONObject
* requestParams = new JSONObject(); for (Map.Entry<String, String> d :
* data.entrySet()) { if (d.getKey().equals("dataSource")) { int dataSource
* = ParamsParseInt.paramsParseInt(d.getValue());
* requestParams.put(d.getKey(), dataSource); }else if
* (d.getKey().equals("sourceSystem")) { int sourceSystem =
* ParamsParseInt.paramsParseInt(d.getValue());
* requestParams.put(d.getKey(), sourceSystem); }
* requestParams.put(d.getKey(), d.getValue()); } return
* requestParams.toString(); }
*/
public static String requestParams(String mobileSysVersion, String mobileModel, String sourcePage,
String dataSource, String sourceSystem, String interfaceVersion, String fileUrl, String fileKey,
String fileSize, String fileName, String lineNo1, String mCode, String sName, String sCode, String mName,
String bCode, String bName, String lineNo2, String address, String countryCode, String districtName,
String bankCardNo, String countryName, String cityName, String cityCode, String districtCode,
String provinceName, String provinceCode, String tel, String mobile, int businessPattern, int businessRange,
int capitalUnit, String email, int endBusinessDate, String legalPerson, String licenseNo, String logoFileId,
String logoUrl, String mainProducts, String openAccountBank, String organizationCode, int parentId,
int registeredCapital, int setUpDate, int startBusinessDate, String taxPayerNo, int turnover,
String companyName, String contactPerson, String companyAbbr, int companyScale, int memberId,
int companyType, int registeredModel, String token, String secretNumber) {
JSONObject commonParam = new JSONObject();
commonParam.put("mobileSysVersion", mobileSysVersion);
commonParam.put("mobileModel", mobileModel);
commonParam.put("sourcePage", sourcePage);
commonParam.put("dataSource", dataSource); // 必填
commonParam.put("sourceSystem", sourceSystem); // 必填
commonParam.put("interfaceVersion", interfaceVersion);
JSONObject fileListObject = new JSONObject();
fileListObject.put("fileUrl", fileUrl);
fileListObject.put("fileKey", fileKey);
fileListObject.put("fileSize", fileSize);
fileListObject.put("fileName", fileName);
fileListObject.put("lineNo", lineNo1);
JSONArray fileList = new JSONArray();
fileList.add(0, fileListObject);
JSONObject industryListObject = new JSONObject();
industryListObject.put("mCode", mCode);
industryListObject.put("sName", sName);
industryListObject.put("sCode", sCode);
industryListObject.put("mName", mName);
industryListObject.put("bCode", bCode);
industryListObject.put("bName", bName);
industryListObject.put("lineNo", lineNo2);
JSONArray industryList = new JSONArray();
industryList.add(0, industryListObject);
JSONObject compInfo = new JSONObject();
compInfo.put("address", address);
compInfo.put("countryCode", countryCode);
compInfo.put("districtName", districtName);
compInfo.put("bankCardNo", bankCardNo);
compInfo.put("countryName", countryName);
compInfo.put("cityName", cityName);
compInfo.put("cityCode", cityCode);
compInfo.put("districtCode", districtCode);
compInfo.put("provinceName", provinceName);
compInfo.put("provinceCode", provinceCode);
compInfo.put("tel", tel);
compInfo.put("mobile", mobile); // 必填
compInfo.put("businessPattern", businessPattern); // number
compInfo.put("businessRange", businessRange); // number
// compInfo.put("capitalUnit", capitalUnit); // number
compInfo.put("email", email);
compInfo.put("endBusinessDate", endBusinessDate); // number
compInfo.put("fileList", fileList);
compInfo.put("industryList", industryList);
compInfo.put("legalPerson", legalPerson);
compInfo.put("licenseNo", licenseNo);
compInfo.put("logoFileId", logoFileId);
compInfo.put("logoUrl", logoUrl);
compInfo.put("mainProducts", mainProducts);
compInfo.put("openAccountBank", openAccountBank);
compInfo.put("organizationCode", organizationCode);
compInfo.put("registeredCapital", registeredCapital); // number
compInfo.put("setUpDate", setUpDate); // number
compInfo.put("startBusinessDate", startBusinessDate); // number
compInfo.put("taxPayerNo", taxPayerNo);
compInfo.put("turnover", turnover); // number
compInfo.put("companyName", companyName); // 必填
compInfo.put("contactPerson", contactPerson); // 必填
compInfo.put("companyAbbr", companyAbbr);
compInfo.put("companyScale", companyScale); // number
compInfo.put("memberId", memberId); // number
compInfo.put("companyType", companyType); // number
compInfo.put("registeredModel", registeredModel); // number
// 封装JSON请求参数
JSONObject params = new JSONObject();
params.put("compInfo", compInfo);
params.put("commonParam", commonParam);
params.put("token", token);
params.put("secretNumber", secretNumber);
// 将JSON请求参数转为String
String requestParams = params.toString();
return requestParams;
}
public static String requestParams(String dataSource, String sourceSystem, String mobile , String companyName, String contactPerson,
int memberId, String token, String secretNumber) {
JSONObject commonParam = new JSONObject();
commonParam.put("dataSource", dataSource); // 必填
commonParam.put("sourceSystem", sourceSystem); // 必填
JSONObject compInfo = new JSONObject();
compInfo.put("mobile", mobile); // 必填
compInfo.put("companyName", companyName); // 必填
compInfo.put("contactPerson", contactPerson); // 必填
compInfo.put("memberId", memberId); // number
// 封装JSON请求参数
JSONObject params = new JSONObject();
params.put("compInfo", compInfo);
params.put("commonParam", commonParam);
params.put("token", token);
params.put("secretNumber", secretNumber);
// 将JSON请求参数转为String
String requestParams = params.toString();
return requestParams;
}
}
<file_sep>/src/cn/com/digiwin/justsharecloud/dataProvider/LoginDataProvider.java
package cn.com.digiwin.justsharecloud.dataProvider;
import java.util.Iterator;
import org.testng.annotations.DataProvider;
import cn.com.digiwin.justsharecloud.commonfunctions.ExcelDataProvider;
public class LoginDataProvider {
/**
* @return_type Iterator<Object[]>
* @description 登录成功的测试数据
*/
@DataProvider(name = "testLoginSuccess")
public static Object[][] successData() {
return new Object[][]{
{"tonytian" , "tian123" , 1 , "" , "01210"},
{"tonytian" , "tian123" , 2 , "" , "01210"},
{"tonytian" , "tian123" , 3 , "android" , "01210"},
{"tonytian" , "tian123" , 4 , "ios" , "01210"}
};
}
/**
* @return_type Iterator<Object[]>
* @description 登录失败的测试数据
*/
@DataProvider(name = "testLoginFail")
public static Object[][] failData() {
return new Object[][]{
{"" , "tian123" , 1 , "" , "01213"},
{"tonytian" , "" , 1 , "" , "01213"},
{"tonytian" , "tian123" , 3 , "" , "01131"},
{"tonytian" , "tian123" , 4 , "" , "01131"},
{"yihj454try" , "tian123" , 1 , "" , "01211"},
{"yihj454try" , "tian123" , 2 , "" , "01211"},
{"yihj454try" , "tian123" , 3 , "android" , "01211"},
{"yihj454try" , "tian123" , 3 , "ios" , "01211"},
{"tonytian" , "yihj454try" , 1 , "" , "01211"},
{"tonytian" , "yihj454try" , 2 , "" , "01211"},
{"tonytian" , "yihj454try" , 3 , "android" , "01211"},
{"tonytian" , "yihj454try" , 4 , "ios" , "01211"}
};
}
/**
* @return_type Object[][]
* @description 补充登录失败的测试用例 , 验证dataSource的必填性
*/
@DataProvider(name = "dataSourceData")
public static Object[][] dataSourceData() {
return new Object[][]{
{"tonytian" , "tian123" ,"01213"}
};
}
@DataProvider(name = "needVerifyData")
public static Object[][] needVerifyData(){
return new Object[][]{
{"tonytia" , "tian123" ,1 , "01211","01213" }
};
}
@DataProvider(name = "pressureTestData")
public static Iterator<Object[]> data() {
return (Iterator<Object[]>) new ExcelDataProvider("pressureTestData", "pressureTestData");
}
}
<file_sep>/src/cn/com/digiwin/justsharecloud/dataProvider/CompanyRegisterDataProvider.java
package cn.com.digiwin.justsharecloud.dataProvider;
import org.testng.annotations.DataProvider;
import cn.com.digiwin.justsharecloud.commonfunctions.ParamsParseInt;
import cn.com.digiwin.justsharecloud.initialization.JsonParseBean;
import cn.com.digiwin.justsharecloud.initialization.Login;
public class CompanyRegisterDataProvider extends Login {
private static JsonParseBean jpb = login();
private static String token = jpb.getToken();
private static String secretNumber = jpb.getSecretNumber();
private static String memberIdResponse = jpb.getMemberInfo().getMemberId();
private static int memberId = ParamsParseInt.paramsParseInt(memberIdResponse);
/**
* @date 2016年6月24日上午11:20:14
* @return
* @return_type Object[][]
* @description 注册完之后立刻调用getAccountStatus接口 , token与secretNumber使用体验账号的 ,
* 验证未激活
*/
@DataProvider(name = "success")
public static Object[][] Success() {
return new Object[][] { {"1","1","18520820075" , "CHINA" ,"TIANYAQIANG" , memberId ,token , secretNumber, "01160" , 1} };
}
}
<file_sep>/src/cn/com/digiwin/justsharecloud/dataProvider/GetLoginStatusDataProvider.java
package cn.com.digiwin.justsharecloud.dataProvider;
import org.testng.annotations.DataProvider;
import cn.com.digiwin.justsharecloud.initialization.Login;
public class GetLoginStatusDataProvider extends Login{
/**
* @date 2016年6月24日上午11:20:14
* @return
* @return_type Object[][]
* @description 提供正确的登录账号数据 , 测试用例调用登录接口获取响应的token , 再去调用getTokenList接口
*/
@DataProvider(name = "testGetLoginStatusSuccess")
public static Object[][] successData(){
return new Object[][]{
{login().getToken() ,login().getSecretNumber() , "01220"}
};
}
/**
* @date 2016年6月24日上午11:56:07
* @return
* @return_type Object[][]
* @description 异常测试 , 1.传入空字符串 2.传入错误格式的token
*/
@DataProvider(name = "testGetLoginStatusFail")
public static Object[][] failData(){
return new Object[][]{
{"sudfhuhf2343" ,login().getSecretNumber() , "01221"},
{login().getToken() , "<PASSWORD>" , "01221"},
{"" , login().getSecretNumber(), "01223"}, //1.传入空字符串 retCode=01223 "参数为空"
{login().getToken(), "" , "01223"}, //2.传入错误格式的token retCode=01232 "该token失效"
};
}
}
<file_sep>/src/cn/com/digiwin/justsharecloud/testCases/getSmsVerifyCode/requestParams/RequestParams.java
package cn.com.digiwin.justsharecloud.testCases.getSmsVerifyCode.requestParams;
import net.sf.json.JSONObject;
public class RequestParams {
public static String requestParams(String mobile) {
// 封装JSON请求参数
JSONObject params = new JSONObject();
params.put("mobile", mobile);
// 将JSON请求参数转为String
String requestParams = params.toString();
return requestParams;
}
}
<file_sep>/src/cn/com/digiwin/justsharecloud/commonfunctions/RedisDelete.java
package cn.com.digiwin.justsharecloud.commonfunctions;
import org.apache.log4j.Logger;
import cn.com.digiwin.justsharecloud.constants.Constants;
public class RedisDelete {
private static Logger logger = Logger.getLogger(RedisDelete.class);
private static String url = Constants.LOCAL_TEST_HOST + Constants.LOCAL_TEST_REDISDELETE_LOCATION;
public static void redisDelete(String key){
String param = "key=" + key;
String response = HttpRequestMethod.sendGet(url, param);
logger.info(response);
}
}
<file_sep>/src/cn/com/digiwin/justsharecloud/testCases/getTokenList/cases/TestGetTokenList.java
package cn.com.digiwin.justsharecloud.testCases.getTokenList.cases;
import org.testng.annotations.Test;
import cn.com.digiwin.justsharecloud.commonfunctions.HttpRequestMethod;
import cn.com.digiwin.justsharecloud.constants.Constants;
import cn.com.digiwin.justsharecloud.dataProvider.GetTokenListDataProvider;
import cn.com.digiwin.justsharecloud.testCases.getTokenList.bean.JsonParseBean;
import cn.com.digiwin.justsharecloud.testCases.getTokenList.requestParams.RequestParams;
import org.testng.annotations.BeforeTest;
import org.apache.log4j.Logger;
import org.testng.Assert;
import org.testng.annotations.AfterTest;
public class TestGetTokenList {
private static Logger logger = Logger.getLogger(TestGetTokenList.class);
private static String url = Constants.LOCAL_TEST_HOST + Constants.GERTOKENLIST_LOCATION;
/**
* @date 2016年6月24日下午12:00:01
* @param account
* @param password
* @param dataSource
* @param expectedRetCode
* @return_type void
* @description 成功的测试用例 ,先调用Login接口获取token , 再调用getTokenList接口获取响应
*/
@Test(dataProvider="testGetTokenListSuccess" , dataProviderClass=GetTokenListDataProvider.class)
public void testGetTokenListSuccess(String token ,String expectedRetCode) {
String requestParams = RequestParams.requestParams(token);
logger.info(requestParams);
String response = HttpRequestMethod.sendPost(url, requestParams);
logger.info(response);
//断言响应retCode和retMsg不为空
JsonParseBean jpb = JsonParseBean.httpResponseParse(response);
Assert.assertNotNull(jpb.getRetMsg());
Assert.assertNotNull(jpb.getRetCode());
//断言响应retCode为01210
Assert.assertEquals(jpb.getRetCode(), expectedRetCode);
}
/**
* @date 2016年6月24日下午12:02:19
* @param token
* @param expectedRetCode
* @return_type void
* @description 失败的测试用例 , token入参传入空字符串
*/
@Test(dataProvider="testGetTokenListFail" , dataProviderClass=GetTokenListDataProvider.class)
public void testGetTokenListFail (String token , String expectedRetCode){
String requestParams = RequestParams.requestParams(token);
logger.info(requestParams);
String response = HttpRequestMethod.sendPost(url, requestParams);
logger.info(response);
//断言响应retCode和retMsg不为空
JsonParseBean jpb = JsonParseBean.httpResponseParse(response);
Assert.assertNotNull(jpb.getRetMsg());
Assert.assertNotNull(jpb.getRetCode());
//断言响应retCode为01233
Assert.assertEquals(jpb.getRetCode(), expectedRetCode);
}
@BeforeTest
public void beforeTest() {
}
@AfterTest
public void afterTest() {
}
}
<file_sep>/src/cn/com/digiwin/justsharecloud/commonfunctions/ReadExcel.java
package cn.com.digiwin.justsharecloud.commonfunctions;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
public class ReadExcel {
public static void prmap(Map<String, String> arr) {
Set<String> set = arr.keySet();
Iterator<String> it = set.iterator();
while (it.hasNext()) {
it.next();
}
}
}
<file_sep>/src/cn/com/digiwin/justsharecloud/testCases/sendSms/requestParams/RequestParams.java
package cn.com.digiwin.justsharecloud.testCases.sendSms.requestParams;
import net.sf.json.JSONObject;
public class RequestParams {
public static String requestParams(String interfaceVersion, String sourceSystem, String[] mobileArrays ,String content) {
// 封装JSON请求参数
JSONObject commonParam = new JSONObject();
commonParam.put("interfaceVersion", interfaceVersion); //可选
commonParam.put("sourceSystem", sourceSystem);
JSONObject params = new JSONObject();
params.put("commonParam", commonParam);
params.put("content", content); //必须,字符长度大于0
params.put("mobiles", mobileArrays); //元素个数大于0,元素为字符串
// 将JSON请求参数转为String
String requestParams = params.toString();
return requestParams;
}
}
<file_sep>/src/cn/com/digiwin/justsharecloud/testCases/companyRegister/cases/TestCompanyRegister.java
package cn.com.digiwin.justsharecloud.testCases.companyRegister.cases;
import org.apache.log4j.Logger;
import org.testng.Assert;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.Test;
import cn.com.digiwin.justsharecloud.commonfunctions.HttpRequestMethod;
import cn.com.digiwin.justsharecloud.commonfunctions.JDBCMySQL;
import cn.com.digiwin.justsharecloud.constants.Constants;
import cn.com.digiwin.justsharecloud.dataProvider.CompanyRegisterDataProvider;
import cn.com.digiwin.justsharecloud.initialization.Login;
import cn.com.digiwin.justsharecloud.testCases.companyRegister.bean.JsonParseBean;
import cn.com.digiwin.justsharecloud.testCases.companyRegister.requestParams.RequestParams;
/**
* @date 2016年6月20日下午3:59:34
* @author 田亚强
* @description 测试注册接口 , 包括注册成功和注册失败的测试用例
*/
public class TestCompanyRegister {
private static Logger logger = Logger.getLogger(TestCompanyRegister.class);
private static String url = Constants.LOCAL_TEST_HOST + Constants.COMPANYREGISTER_LOCATION;
/**
* @date 2016年6月20日下午3:58:39
* @param data
* @return_type void
* @description 注册成功的测试用例
*/
@Test(dataProvider = "success", dataProviderClass = CompanyRegisterDataProvider.class, enabled = true)
public void testCompantRegisterSuccess(String dataSource, String sourceSystem, String mobile , String companyName, String contactPerson,
int memberId, String token, String secretNumber , String expectRetCode ,int expectBusiness_type) {
String requestParams = RequestParams.requestParams(dataSource, sourceSystem, mobile, companyName, contactPerson, memberId, token, secretNumber);
logger.info(requestParams);
// 发送请求,获得响应数据
String response = HttpRequestMethod.sendPost(url, requestParams);
logger.info(response);
// 断言返回的数据不为Null
JsonParseBean jpb = JsonParseBean.ResponseParse(response);
Assert.assertNotNull(jpb.getRetMsg());
Assert.assertNotNull(jpb.getRetCode());
Assert.assertNotNull(jpb.getCompanyId());
Assert.assertNotNull(jpb.getCompanyCode());
logger.info("所有返回参数不为空");
// 断言retCode的值为01130
Assert.assertEquals(jpb.getRetCode(), expectRetCode);
String sql = "select * from t_member where id=(select member_id from t_authentication where account='tonytian' limit 1)";
Assert.assertEquals(JDBCMySQL.query(sql), expectBusiness_type);
logger.info("测试用例: " + "测试用例" + " , 通过");
System.out.println("");
}
@AfterMethod
public void afterMethod(){
logger.info(Login.login());
}
}
<file_sep>/src/cn/com/digiwin/justsharecloud/testCases/sendSms/cases/TestSendSms.java
package cn.com.digiwin.justsharecloud.testCases.sendSms.cases;
import org.testng.annotations.Test;
import cn.com.digiwin.justsharecloud.commonfunctions.HttpRequestMethod;
import cn.com.digiwin.justsharecloud.constants.Constants;
import cn.com.digiwin.justsharecloud.dataProvider.SendSmsDataProvider;
import cn.com.digiwin.justsharecloud.testCases.sendSms.bean.JsonParseBean;
import cn.com.digiwin.justsharecloud.testCases.sendSms.requestParams.RequestParams;
import org.apache.log4j.Logger;
import org.testng.Assert;
/**
* @date 2016年6月30日下午3:26:31
* @author 田亚强
* @description
*/
public class TestSendSms {
private static Logger logger = Logger.getLogger(TestSendSms.class);
private static String url = Constants.LOCAL_TEST_HOST + Constants.SENDSMS_LOCATION;
/**
* @date 2016年7月1日上午11:51:48
* @param mobile
* @param expectedRetCode
* @return_type void
* @description 多用户接收短信
*/
/*@Test(dataProvider = "all", dataProviderClass = SendSmsDataProvider.class , priority=1)
public void all(String mobile ,String requestParams , String[] content , String expectedRetCode) {
String requestParams = RequestParams.requestParams(mobile, requestParams, null, requestParams);
logger.info(requestParams);
String response = HttpRequestMethod.sendPost(url, requestParams);
logger.info(response);
// 断言响应retCode和retMsg不为空
JsonParseBean jpb = JsonParseBean.httpResponseParse(response);
Assert.assertNotNull(jpb.getErrorCode());
Assert.assertNotNull(jpb.getErrorMsg());
// 断言响应retCode为01210
Assert.assertEquals(jpb.getErrorCode(), expectedRetCode);
}*/
}
| e2cf357149702a78fe6f35777049458478976eb3 | [
"Java",
"HTML",
"INI"
] | 27 | Java | aqiangtester/xiaozhiyuan.autotest | 124135b0bd0da7e378d03489dc8bc1d82d3d6ff9 | 126fb4a2b2c7eec10b6cfbfd7cb67d7213135d03 |
refs/heads/master | <file_sep><h2> <%= @artist.name %> </h2>
<h2>Songs: </h2>
<% @artist.songs.each do |song| %>
<h3><a href="/songs/<%= song.slug %>"><%= song.name %></a> </h3>
<% end %>
<h2>Genres: </h2>
<% @artist.genres.each do |genre| %>
<h3><a href="/genres/<%= genre.slug %>"> <%= genre.name %> </a> </h3>
<% end %>
<file_sep>class SongsController < ApplicationController
use Rack::Flash
get "/songs" do
@songs=Song.all
erb :"songs/index"
end
get "/songs/:slug" do
@song=Song.find_by_slug(@params[:slug])
erb :"songs/show"
end
get "/songs/new" do
@genres=Genre.all
erb :"songs/new"
end
post "/songs" do
@song=Song.create({name:@params[:song_name]})
if !@params[:artist_name].nil?
@song.artist=Artist.find_or_create_by({name:@params[:artist_name]})
end
#binding.pry
@song.genres << Genre.find_or_create_by({name:@params[:genre_name]})
@song.save
flash[:message] = "Successfully created song."
redirect :"/songs/#{@song.slug}"
end
get "/songs/:slug/edit" do
@song=Song.find_by_slug(@params[:slug])
#binding.pry
erb :"songs/edit"
end
patch "/songs/:slug" do
@song=Song.find_by_slug(@params[:slug])
@song.name=@params[:song][:name]
@song.artist=Artist.find_or_create_by(@params[:artist])
@song.genre_ids=@params[:song][:genre_ids]
@song.save
flash[:message]="Successfully updated song."
redirect "songs/#{@song.slug}"
end
end
| 9c9b1ee4c3764e148ff1575c0b060b8041bc1b99 | [
"Ruby"
] | 2 | Ruby | AD0791/playlister-sinatra-noukod-000 | e5712ceb1571b62635f3c18d2dc166d25e51fd11 | 5b6f990ed4bd7f27b52d4e4dde8b69018c71d01f |
refs/heads/master | <repo_name>salman-automation/Selenium-Automation<file_sep>/src/Objectclasses/Register.java
package Objectclasses;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.support.PageFactory;
public class Register {
public Register(WebDriver driver) {
PageFactory.initElements(driver, this);
}
}
<file_sep>/src/MainPackage/MainClass.java
package MainPackage;
import java.io.FileInputStream;
import java.io.IOException;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.chrome.ChromeDriver;
import jxl.Sheet;
import jxl.Workbook;
import jxl.read.biff.BiffException;
import Objectclasses.LoginClass;
import Objectclasses.LogoutClass;
import Objectclasses.OrderProcess;
public class MainClass {
public static void main(String[] args) throws IOException, InterruptedException, BiffException {
//From Initializing Excel Sheet
// FileInputStream fs = new FileInputStream("D:\\Selenium Course\\Third Session\\Third Session\\Test.xls");
// Workbook wb = Workbook.getWorkbook(fs);
// Sheet AddressSheet = wb.getSheet("User_Login");
//
//End Excel
System.setProperty("webdriver.chrome.driver", "D:\\Selenium Course\\Java 7 Lib\\chromedriver.exe");
WebDriver driver = new ChromeDriver();
driver.manage().window().maximize();
driver.get("https://demo.opencart.com/index.php?route=account/login");
//Getting usernanme and password from excel sheet
// LoginClass log1 = new LoginClass(driver);
// log1.Login(AddressSheet.getCell(0, 1).getContents(), AddressSheet.getCell(1, 1).getContents());
//End getting username password
//For Loop login and logout
for(int x=0; x<3; x++) {
LoginClass loginobjects = new LoginClass (driver);
loginobjects.Login("<EMAIL>","123456789");
LogoutClass logout = new LogoutClass (driver);
logout.logout();
driver.get("https://demo.opencart.com/index.php?route=account/login");
}
// driver.get("https://demo.opencart.com/index.php?route=account/login");
// LoginClass loginobjects1 = new LoginClass (driver);
// loginobjects1.Login("<EMAIL>","@@sam@@");
// Thread.sleep(2000);
// OrderProcess processorder = new OrderProcess (driver);
// processorder.processorder();
//
Thread.sleep(2000);
// LogoutClass logout1 = new LogoutClass (driver);
// logout1.logout();
//
driver.close();
driver.quit();
}
}
| 21d6a0cf15a9918334fae70edc0e119c7d2f2d1c | [
"Java"
] | 2 | Java | salman-automation/Selenium-Automation | dca550ada69a04f93f608eb8c0eb992e4ba32e87 | 30cc54644bb9be2d99dc4c9d7a1621180ff14d0a |
refs/heads/master | <file_sep>package latihan_uas;
public class Kalkulator {
int jumlah;
public int Tambah(int bil1, int bil2){
jumlah = bil1 + bil2;
return jumlah;
}
public int Kurang(int bil1, int bil2){
jumlah = bil1 - bil2;
return jumlah;
}
public int Kali(int bil1, int bil2){
jumlah = bil1 * bil2;
return jumlah;
}
public float Bagi(int bil1, int bil2){
jumlah = bil1 / bil2;
return jumlah;
}
}
<file_sep>package latihan_uas;
import java.util.Scanner;
public class TestKalkulator {
public static void main(String[] args) {
Kalkulator operasi = new Kalkulator();
Scanner scan = new Scanner(System.in);
System.out.println("Kalkulator Sederhana");
System.out.println("Masukan Bilangan 1 = ");
int bil1 = scan.nextInt();
System.out.println("Masukan Bilangan 2 = ");
int bil2 = scan.nextInt();
System.out.println("\nSilahkan Pilih Operasi");
System.out.println("1.\t Perkalian (*)");
System.out.println("2.\t Pembagian (/)");
System.out.println("3.\t Pertambahan (+)");
System.out.println("4.\t Pengurangan (-)");
int pilih = scan.nextInt();
switch(pilih)
{
case 1:
System.out.println("Hasil \t"+operasi.Kali(bil1, bil2));
break;
case 2:
System.out.println("Hasil \t"+operasi.Bagi(bil1, bil2));
break;
case 3:
System.out.println("Hasil \t"+operasi.Tambah(bil1, bil2));
break;
case 4:
System.out.println("Hasil \t"+operasi.Kurang(bil1, bil2));
break;
}
}
}
<file_sep>package latihan_uas;
public class Latihan_UAS {
public static void main(String[] args) {
//Soal No.3 A
System.out.println("A");
for (int a = 1; a <= 3; a++) {
for (int b = a; b <= 3; b++) {
System.out.print("*");}
System.out.println();}
//Soal No.3 B
System.out.println("B");
for (int a = 1; a<=3 ; a++) {
for (int b = a; b<=3; b++) {
System.out.print(" ");}
for (int c=1; c<=a; c++) {
System.out.print("*");}
System.out.println();}
//Soal No.3 C
System.out.println("C");
for (int a = 3; a >= 1; a--) {
for (int b = a; b <= 3; b++) {
System.out.print(" ");}
for(int c = 1; c <= a; c++){
System.out.print("*");}
System.out.println("");}
}
}
<file_sep>package latihan_uas;
import java.util.Scanner;
public class nomor_1 {
public static void main(String[] args) {
Scanner scan = new Scanner(System.in);
nomor_1_proses[] data = new nomor_1_proses[10];
String Jawab = "";
int i = 0;
do {
System.out.println("NIS Siswa");
String nis = scan.next();
System.out.println("Nama Siswa");
String nama_siswa = scan.next();
System.out.println("Nilai UAS");
double uas = scan.nextDouble();
System.out.println("Nilai UTS");
double uts = scan.nextDouble();
nomor_1_proses masuk = new nomor_1_proses();
masuk.setNama(nama_siswa);
masuk.setNis(nis);
masuk.setUas(uas);
masuk.setUts(uts);
masuk.setNilai_akhir(uts, uas);
data[i] = masuk;
System.out.println("Mau input lagi? ");
Jawab = scan.next();
i++;
} while (Jawab.equalsIgnoreCase("Y"));
System.out.println("====================================================");
System.out.println("No\t NIS\t Nama\t UTS\t UAS\t Nilai Akhir\t Index");
System.out.println("====================================================");
for(int j=0;j<i;j++){
nomor_1_proses s = data[j];
System.out.print((j+1)+"\t");
System.out.print(s.getNis()+"\t");
System.out.print(s.getNama()+"\t");
System.out.print(s.getUts()+"\t");
System.out.print(s.getUas()+"\t");
System.out.print(s.getNilai_akhir()+"\t\t");
if (s.getNilai_akhir() >= 80) {
System.out.println("A\t");
}
else if(s.getNilai_akhir() >= 77.5){
System.out.println("B\t");
}
else{
System.out.println("C\t");
}
}
}
}
| 6d8f932a406ab806f26b08d79dcfef19434eb3dc | [
"Java"
] | 4 | Java | hegaelanda/uas | 8b178507caa81a41bce23c65d5bbe0b5bb84f021 | 0c028041956df2feaeaf33549309f3e504f9d69a |
refs/heads/master | <file_sep>import sys, os, re, PIL
import numpy as np
from scipy.signal import savgol_filter as sf
import matplotlib.pyplot as plt
from chxanalys.chx_generic_functions import show_img, plot1D
from chxanalys.DataGonio import convert_Qmap
def get_base_all_filenames( inDir, base_filename_cut_length = -7 ):
'''YG Sep 26, 2017 @SMI
Get base filenames and their related all filenames
Input:
inDir, str, input data dir
base_filename_cut_length: to which length the base name is unique
Output:
dict: keys, base filename
vales, all realted filename
'''
from os import listdir
from os.path import isfile, join
tifs = np.array( [f for f in listdir(inDir) if isfile(join(inDir, f))] )
tifsc = list(tifs.copy())
utifs = np.sort( np.unique( np.array([ f[:base_filename_cut_length] for f in tifs] ) ) )[::-1]
files = {}
for uf in utifs:
files[uf] = []
i = 0
reName = []
for i in range(len(tifsc)):
if uf in tifsc[i]:
files[uf].append( inDir + tifsc[i] )
reName.append(tifsc[i])
for fn in reName:
tifsc.remove(fn)
return files
def check_stitch_two_imgs( img1, img2, overlap_width, badpixel_width =10 ):
"""YG Check the stitched two imgs
For SMI, overlap_width = 85 #for calibration data--Kevin gives one, 4 degree
overlap_width = 58 #for TWD data, 5 degree step
For SMI:
d0 = np.array( PIL.Image.open(infiles[0]).convert('I') ).T
d1 = np.array( PIL.Image.open(infiles[1]).convert('I') ).T
Example:
img1 = np.array( PIL.Image.open(infiles[0]).convert('I') ).T
img2 = np.array( PIL.Image.open(infiles[1]).convert('I') ).T
img12 = check_stitch_two_imgs( img1, img2, overlap_width=58, badpixel_width =10 )
show_img(img12[200:800, 120:250], logs = False, cmap = cmap_vge_hdr, vmin=0.8*img12.min(),
vmax= 1.2*img12.max(), aspect=1,image_name = 'Check_Overlap')
"""
w = overlap_width
ow = badpixel_width
M,N = img1.shape[0], img1.shape[1]
d0 =img1
d1 = img2
d01 = np.zeros( [ M, N*2 - w*( 2 -1) ] )
d01[:, :N ] = d0
i=1
a1,a2, b1, b2 = N*i - w*(i-1) - ow, N*(i+1) - w*i, w - ow, N
d01[:,a1:a2] = d1[:, b1:b2 ]
return d01
def Correct_Overlap_Images_Intensities( infiles, window_length=101, polyorder=5,
overlap_width=58, badpixel_width =10 ):
"""YG Correct WAXS Images intensities by using overlap area intensity
Image intensity keep same for the first image
Other image intensity is scaled by a pixel-width intensity array, which is averaged in the overlap area and then smoothed by
scipy.signal import savgol_filter with parameters as window_length=101, polyorder=5,
from scipy.signal import savgol_filter as sf
Return: data: array, stitched image with corrected intensity
dataM: dict, each value is the image with correted intensity
scale: scale for each image, the first scale=1 by defination
scale_smooth: smoothed scale
Exampe:
data, dataM, scale,scale_smooth = Correct_Overlap_Images_Intensities( infiles, window_length=101, polyorder=5,
overlap_width=58, badpixel_width =10 )
show_img(data, logs = True, vmin=0.8* data.min(), vmax= 1.2*data.max()
cmap = cmap_vge_hdr, aspect=1, image_name = 'Merged_Sca_Img')
fig = plt.figure()# figsize=[2,8])
for i in range(len(infiles)):
#print(i)
ax = fig.add_subplot(1,8, i+1)
d = process.load( infiles[i] )
show_img( dataM[i], logs = True, show_colorbar= False,show_ticks =False,
ax= [fig, ax], image_name= '%02d'%(i+1), cmap = cmap_vge_hdr, vmin=100, vmax=2e3,
aspect=1, save=True, path=ResDir)
"""
w = overlap_width
ow = badpixel_width
Nf = len(infiles)
dataM = {}
for i in range(len(infiles)):
d = np.array( PIL.Image.open(infiles[i]).convert('I') ).T/1.0
if i ==0:
M,N = d.shape[0], d.shape[1]
data = np.zeros( [ M, N*Nf - w*( Nf -1) ] )
data[:, :N ] = d
scale=np.zeros( [len(infiles), M] )
scale_smooth=np.zeros( [len(infiles), M] )
overlap_int = np.zeros( [ 2* len(infiles) - 1, M] )
overlap_int[0] = np.average( d[:, N - w: N-ow ], axis=1)
scale[0] = 1
scale_smooth[0] = 1
dataM[0] = d
else:
a1,a2, b1, b2 = N*i - w*(i-1) - ow, N*(i+1) - w*i, w - ow, N
overlap_int[2*i-1] = np.average( d[:, 0: w - ow ], axis=1 )
overlap_int[2*i] = np.average( d[:, N - w: N-ow ] , axis=1 )
scale[i] = overlap_int[2*i-2]/overlap_int[2*i-1] *scale[i-1]
scale_smooth[i] = sf( scale[i], window_length=window_length, polyorder= polyorder, deriv=0, delta=1.0, axis=-1,
mode='mirror', cval=0.0)
data[:,a1:a2] = d[:, b1:b2 ] * np.repeat(scale_smooth[i], b2-b1, axis=0).reshape([M,b2-b1])
dataM[i] = np.zeros_like( dataM[i-1])
dataM[i][:,0:w-ow] =dataM[i-1][:,N-w:N-ow]
dataM[i][:,w-ow:] = data[:,a1:a2]
return data, dataM, scale,scale_smooth
def check_overlap_scaling_factor( scale,scale_smooth, i=1, filename=None, save=False ):
"""check_overlap_scaling_factor( scale,scale_smooth, i=1 )"""
fig,ax=plt.subplots()
plot1D( scale[i], m='o', c='k',ax=ax, title='Scale_averaged_line_intensity_%s'%i,
ls='', legend = 'Data')
plot1D( scale_smooth[i], ax=ax, title='Scale_averaged_line_intensity_%s'%i, m='',c='r',
ls='-', legend='Smoothed')
if save:
fig.savefig( filename)
def stitch_WAXS_in_Qspace( dataM, phis, calibration,
dx= 0, dy = 22, dz = 0, dq=0.015, mask = None ):
"""YG Octo 11, 2017 stitch waxs scattering images in qspace
dataM: the data (with corrected intensity), dict format (todolist, make array also avialable)
phis: for SMI, the rotation angle around z-aixs
For SMI
dx= 0 #in pixel unit
dy = 22 #in pixel unit
dz = 0
calibration: class, for calibration
Return: Intensity_map, qxs, qzs
Example:
phis = np.array( [get_phi(infile,
phi_offset=4.649, phi_start=1.0, phi_spacing=5.0,) for infile in infiles] ) # For TWD data
calibration = CalibrationGonio(wavelength_A=0.619920987) # 20.0 keV
#calibration.set_image_size( data.shape[1], data.shape[0] )
calibration.set_image_size(195, height=1475) # Pilatus300kW vertical
calibration.set_pixel_size(pixel_size_um=172.0)
calibration.set_beam_position(97.0, 1314.0)
calibration.set_distance(0.275)
Intensity_map, qxs, qzs = stitch_WAXS_in_Qspace( dataM, phis, calibration)
#Get center of the qmap
bx,by = np.argmin( np.abs(qxs) ), np.argmin( np.abs(qzs) )
print( bx, by )
"""
q_range = get_qmap_range( calibration, phis[0], phis[-1] )
if q_range[0] >0:
q_range[0]= 0
if q_range[2]>0:
q_range[2]= 0
qx_min=q_range[0]
qx_max=q_range[1]
qxs = np.arange(q_range[0], q_range[1], dq)
qzs = np.arange(q_range[2], q_range[3], dq)
QXs, QZs = np.meshgrid(qxs, qzs)
num_qx=len(qxs)
qz_min=q_range[2]
qz_max=q_range[3]
num_qz=len(qzs)
phi = phis[0]
Intensity_map = np.zeros( (len(qzs), len(qxs)) )
count_map = np.zeros( (len(qzs), len(qxs)) )
#Intensity_mapN = np.zeros( (8, len(qzs), len(qxs)) )
for i in range( len( phis ) ):
dM = np.rot90( dataM[i].T )
D = dM.ravel()
phi = phis[i]
calibration.set_angles(det_phi_g=phi, det_theta_g=0.,
offset_x = dx, offset_y = dy, offset_z= dz)
calibration.clear_maps()
QZ = calibration.qz_map().ravel() #[pixel_list]
QX = calibration.qx_map().ravel() #[pixel_list]
bins = [num_qz, num_qx]
rangeq = [ [qz_min,qz_max], [qx_min,qx_max] ]
#Nov 7,2017 using new func to qmap
remesh_data, zbins, xbins = convert_Qmap(dM, QZ, QX, bins=bins, range=rangeq, mask=mask)
# Normalize by the binning
num_per_bin, zbins, xbins = convert_Qmap(np.ones_like(dM), QZ, QX, bins=bins, range=rangeq, mask=mask)
#remesh_data, zbins, xbins = np.histogram2d(QZ, QX, bins=bins, range=rangeq, normed=False, weights=D)
# Normalize by the binning
#num_per_bin, zbins, xbins = np.histogram2d(QZ, QX, bins=bins, range=rangeq, normed=False, weights=None)
Intensity_map += remesh_data
count_map += num_per_bin
#Intensity_mapN[i] = np.nan_to_num( remesh_data/num_per_bin )
Intensity_map = np.nan_to_num( Intensity_map/count_map )
return Intensity_map, qxs, qzs
def plot_qmap_in_folder( inDir ):
'''YG. Sep 27@SMI
Plot Qmap data from inDir, which contains qmap data and extent data
'''
from chxanalys.chx_generic_functions import show_img
from chxanalys.chx_libs import cmap_vge_hdr, plt
import pickle as cpl
fp = get_base_all_filenames(inDir,base_filename_cut_length=-10)
print('There will %s samples and totally %s files to be analyzed.'%( len(fp.keys()), len( np.concatenate( list(fp.values()) ))))
for k in list(fp.keys()):
for s in fp[k]:
if 'npy' in s:
d = np.load(s) #* qmask
if 'pkl' in s:
xs, zs = cpl.load( open( s, 'rb'))
show_img( d, logs= False, show_colorbar= True, show_ticks= True,
xlabel='$q_x \, (\AA^{-1})$', ylabel='$q_z \, (\AA^{-1})$',
cmap=cmap_vge_hdr, #vmin= np.min(d), vmax = np.max(d),
aspect=1, vmin= -1, vmax = np.max(d) * 0.5,
extent=[xs[0], xs[-1], zs[0],zs[-1]], image_name = k[:-1],
path= inDir, save=True)
plt.close('all')
def get_qmap_range( calibration, phi_min, phi_max ):
'''YG Sep 27@SMI
Get q_range, [ qx_start, qx_end, qz_start, qz_end ] for SMI WAXS qmap
(only rotate around z-axis, so det_theta_g=0.,actually being the y-axis for beamline conventional defination)
based on calibration on Sep 22, offset_x= 0, offset_y= 22
Input:
calibration: class, See SciAnalysis.XSAnalysis.DataGonio.CalibrationGonio
phi_min: min of phis
phi_max: max of phis
Output:
qrange: np.array([ qx_start, qx_end, qz_start, qz_end ])
'''
calibration.set_angles(det_phi_g= phi_max, det_theta_g=0., offset_x= 0, offset_y= 22 )
calibration._generate_qxyz_maps()
qx_end = np.max(calibration.qx_map_data)
qz_start = np.min(calibration.qz_map_data)
qz_end = np.max(calibration.qz_map_data)
calibration.set_angles(det_phi_g= phi_min, det_theta_g=0., offset_x= 0, offset_y= 22 )
calibration._generate_qxyz_maps()
qx_start = np.min(calibration.qx_map_data)
return np.array([ qx_start, qx_end, qz_start, qz_end ])
def get_phi(filename, phi_offset= 0, phi_start= 4.5, phi_spacing= 4.0, polarity=-1,ext='_WAXS.tif'):
pattern_re='^.+\/?([a-zA-Z0-9_]+_)(\d\d\d\d\d\d)(\%s)$'%ext
#print( pattern_re )
#pattern_re='^.+\/?([a-zA-Z0-9_]+_)(\d\d\d)(\.tif)$'
phi_re = re.compile(pattern_re)
phi_offset = phi_offset
m = phi_re.match(filename)
if m:
idx = float(m.groups()[1])
#print(idx)
#phi_c = polarity*( phi_offset + phi_start + (idx-1)*phi_spacing )
phi_c = polarity*( phi_offset + phi_start + (idx-0)*phi_spacing )
else:
print("ERROR: File {} doesn't match phi_re".format(filename))
phi_c = 0.0
return phi_c
############For CHX beamline
def get_qmap_qxyz_range( calibration, det_theta_g_lim, det_phi_g_lim,
sam_phi_lim, sam_theta_lim, sam_chi_lim,
offset_x= 0, offset_y= 0, offset_z= 0
):
'''YG Nov 8, 2017@CHX
Get q_range, [ qx_start, qx_end, qz_start, qz_end ] for SMI WAXS qmap
(only rotate around z-axis, so det_theta_g=0.,actually being the y-axis for beamline conventional defination)
based on calibration on Sep 22, offset_x= 0, offset_y= 22
Input:
calibration: class, See SciAnalysis.XSAnalysis.DataGonio.CalibrationGonio
phi_min: min of phis
phi_max: max of phis
Output:
qrange: np.array([ qx_start, qx_end, qz_start, qz_end ])
'''
i=0
calibration.set_angles( det_theta_g= det_theta_g_lim[i], det_phi_g= det_phi_g_lim[i],
sam_phi= sam_phi_lim[i], sam_theta = sam_theta_lim[i], sam_chi= sam_chi_lim[i],
offset_x= offset_x, offset_y= offset_y, offset_z= offset_z
)
calibration._generate_qxyz_maps()
qx_start = np.min(calibration.qx_map_data)
qy_start = np.min(calibration.qy_map_data)
qz_start = np.min(calibration.qz_map_data)
i= 1
calibration.set_angles( det_theta_g= det_theta_g_lim[i], det_phi_g= det_phi_g_lim[i],
sam_phi= sam_phi_lim[i], sam_theta = sam_theta_lim[i], sam_chi= sam_chi_lim[i],
offset_x= offset_x, offset_y= offset_y, offset_z= offset_z
)
calibration._generate_qxyz_maps()
qx_end = np.min(calibration.qx_map_data)
qy_end = np.min(calibration.qy_map_data)
qz_end = np.min(calibration.qz_map_data)
return np.array([ qx_start, qx_end]),np.array([ qy_start, qy_end]),np.array([ qz_start, qz_end ])
def stitch_WAXS_in_Qspace_CHX( data, angle_dict, calibration, vary_angle='phi',
qxlim=None, qylim=None, qzlim=None,
det_theta_g= 0, det_phi_g= 0.,
sam_phi= 0, sam_theta= 0, sam_chi=0,
dx= 0, dy = 0, dz = 0, dq=0.0008 ):
"""YG Octo 11, 2017 stitch waxs scattering images in qspace
dataM: the data (with corrected intensity), dict format (todolist, make array also avialable)
phis: for SMI, the rotation angle around z-aixs
For SMI
dx= 0 #in pixel unit
dy = 22 #in pixel unit
dz = 0
calibration: class, for calibration
Return: Intensity_map, qxs, qzs
Example:
phis = np.array( [get_phi(infile,
phi_offset=4.649, phi_start=1.0, phi_spacing=5.0,) for infile in infiles] ) # For TWD data
calibration = CalibrationGonio(wavelength_A=0.619920987) # 20.0 keV
#calibration.set_image_size( data.shape[1], data.shape[0] )
calibration.set_image_size(195, height=1475) # Pilatus300kW vertical
calibration.set_pixel_size(pixel_size_um=172.0)
calibration.set_beam_position(97.0, 1314.0)
calibration.set_distance(0.275)
Intensity_map, qxs, qzs = stitch_WAXS_in_Qspace( dataM, phis, calibration)
#Get center of the qmap
bx,by = np.argmin( np.abs(qxs) ), np.argmin( np.abs(qzs) )
print( bx, by )
"""
qx_min, qx_max = qxlim[0], qxlim[1]
qy_min, qy_max = qylim[0], qylim[1]
qz_min, qz_max = qzlim[0], qzlim[1]
qxs = np.arange(qxlim[0], qxlim[1], dq)
qys = np.arange(qylim[0], qylim[1], dq)
qzs = np.arange(qzlim[0], qzlim[1], dq)
QXs, QYs = np.meshgrid(qxs, qys)
QZs, QYs = np.meshgrid(qzs, qys)
QZs, QXs = np.meshgrid(qzs, qxs)
num_qx=len(qxs)
num_qy=len(qys)
num_qz=len(qzs)
Intensity_map_XY = np.zeros( (len(qxs), len(qys)) )
count_map_XY = np.zeros( (len(qxs), len(qys)) )
Intensity_map_ZY = np.zeros( (len(qzs), len(qys)) )
count_map_ZY = np.zeros( (len(qzs), len(qys)) )
Intensity_map_ZX = np.zeros( (len(qzs), len(qxs)) )
count_map_ZX = np.zeros( (len(qzs), len(qxs)) )
N = len(data)
N = len( angle_dict[vary_angle] )
print(N)
#Intensity_mapN = np.zeros( (8, len(qzs), len(qxs)) )
for i in range( N ):
dM = data[i]
D = dM.ravel()
sam_phi = angle_dict[vary_angle][i]
print(i, sam_phi )
calibration.set_angles(
det_theta_g= det_theta_g, det_phi_g= det_phi_g,
sam_phi= sam_phi, sam_theta = sam_theta, sam_chi= sam_chi,
offset_x = dx, offset_y = dy, offset_z= dz)
calibration.clear_maps()
calibration._generate_qxyz_maps()
QZ = calibration.qz_map_lab_data.ravel() #[pixel_list]
QX = calibration.qx_map_lab_data.ravel() #[pixel_list]
QY = calibration.qy_map_lab_data.ravel() #[pixel_list]
bins_xy = [num_qx, num_qy]
bins_zy = [num_qz, num_qy]
bins_zx = [num_qz, num_qx]
rangeq_xy = [ [qx_min,qx_max], [qy_min,qy_max] ]
rangeq_zy = [ [qz_min,qz_max], [qy_min,qy_max] ]
rangeq_zx = [ [qz_min,qz_max], [qx_min,qx_max] ]
print( rangeq_xy, rangeq_zy, rangeq_zx )
remesh_dataxy, xbins, ybins = np.histogram2d(QX, QY, bins=bins_xy, range=rangeq_xy, normed=False, weights=D)
# Normalize by the binning
num_per_binxy, xbins, ybins = np.histogram2d(QX, QY, bins=bins_xy, range=rangeq_xy, normed=False, weights=None)
Intensity_map_XY += remesh_dataxy
count_map_XY += num_per_binxy
remesh_datazy, zbins, ybins = np.histogram2d(QZ, QY, bins=bins_zy, range=rangeq_zy, normed=False, weights=D)
# Normalize by the binning
num_per_binzy, zbins, ybins = np.histogram2d(QZ, QY, bins=bins_zy, range=rangeq_zy, normed=False, weights=None)
Intensity_map_ZY += remesh_datazy
count_map_ZY += num_per_binzy
remesh_datazx, zbins, xbins = np.histogram2d(QZ, QX, bins=bins_zx, range=rangeq_zx, normed=False, weights=D)
# Normalize by the binning
num_per_binzx, zbins, xbins = np.histogram2d(QZ, QX, bins=bins_zx, range=rangeq_zx, normed=False, weights=None)
Intensity_map_ZX += remesh_datazx
count_map_ZX += num_per_binzx
#Intensity_mapN[i] = np.nan_to_num( remesh_data/num_per_bin )
Intensity_map_XY = np.nan_to_num( Intensity_map_XY/count_map_XY )
Intensity_map_ZY = np.nan_to_num( Intensity_map_ZY/count_map_ZY )
Intensity_map_ZX = np.nan_to_num( Intensity_map_ZX/count_map_ZX )
return Intensity_map_XY,Intensity_map_ZY,Intensity_map_ZX, qxs, qys, qzs
<file_sep>"""
Temporary Function developed during Aug2-Aug5
"""
def create_crop_images(imgs, w, cen, mask):
cx,cy = np.int_(cen)
return pims.pipeline(lambda img: (mask*img)[ cx-w//2:cx+ w//2, cy-w//2:cy+ w//2, ])(imgs)
def get_reconstruction_pixel_size( wavelength, num_pixel, dis_det_sam,pixel_size=75.0 ):
'''Get reconstruction_pixel_size
Input:
wavelength: A (float)
num_pixel: pixel number (int)
pixel_size: 75 um (Eiger detector)
dis_det_sam: meter (float)
Output:
rec pixel size in nm
'''
NA = num_pixel*pixel_size * 10**(-6) /(2*dis_det_sam)
return wavelength/ (2*NA) /10.
def do_reconstruction( uid, data_dir0, mask, probe = None, num_iterations =5 ):
#load data
data_dir = os.path.join(data_dir0, '%s/'%uid)
os.makedirs(data_dir, exist_ok=True)
print('Results from this analysis will be stashed in the directory %s' % data_dir)
uidstr = 'uid=%s'%uid
#md = get_meta_data( uid )
imgs = load_data( uid, 'eiger4m_single_image', reverse= True )
md = imgs.md
#md.update( imgs.md );Nimg = len(imgs);
#imgsa = apply_mask( imgs, mask )
inc_x0 = imgs[0].shape[0] - md['beam_center_y']
inc_y0= md['beam_center_x']
pixel_mask = 1- np.int_( np.array( imgs.md['pixel_mask'], dtype= bool) )
mask *= pixel_mask
#create diff_patterns
imgsc = create_crop_images(imgs, w=effective_det_size, cen=[inc_x0, inc_y0], mask=mask)
diff_patterns = imgsc # np.array( )
# Scan pattern and get positions
tab = get_table(db[uid])
posx = tab['diff_xh']
posy = tab['diff_yh']
positions0 = np.vstack( [posx, posy ]).T
positions_ = positions0 - np.array( [positions0[:,0].min(),positions0[:,1].min() ] )
positions = np.int_( positions_ *10**6/ rec_pix_size )
positions -= positions[positions.shape[0]//2]
# Get Probe
if probe is None:
probe = makeCircle(radius = probe_radius, img_size = effective_det_size,
cenx = effective_det_size//2 , ceny = effective_det_size//2)
## Create ptychography reconstruction object
ptyc_rec_obj = PtychographyReconstruction(
diffraction_pattern_stack = diff_patterns,
aperture_guess = probe,
initial_object = None,
num_iterations = num_iterations,
aperture_positions = positions,
reconstructed_pixel_size = 1,
show_progress = True,
#diffraction_pattern_type='images'
)
# reconstruct ptychography
reconstruction = ptyc_rec_obj.reconstruct()
np.save( data_dir + 'uid=%s_probe'%uid, ptyc_rec_obj.aperture)
np.save( data_dir + 'uid=%s_reconstruction'%uid, reconstruction)
return reconstruction, ptyc_rec_obj.aperture
<file_sep>def bin_1D(x, y, nx=None, min_x=None, max_x=None):
"""
Bin the values in y based on their x-coordinates
Parameters
----------
x : array
position
y : array
intensity
nx : integer, optional
number of bins to use defaults to default bin value
min_x : float, optional
Left edge of first bin defaults to minimum value of x
max_x : float, optional
Right edge of last bin defaults to maximum value of x
Returns
-------
edges : array
edges of bins, length nx + 1
val : array
sum of values in each bin, length nx
count : array
The number of counts in each bin, length nx
"""
# handle default values
if min_x is None:
min_x = np.min(x)
if max_x is None:
max_x = np.max(x)
if nx is None:
nx = int(max_x - min_x)
# use a weighted histogram to get the bin sum
bins = np.linspace(start=min_x, stop=max_x, num=nx+1, endpoint=True)
val, _ = np.histogram(a=x, bins=bins, weights=y)
# use an un-weighted histogram to get the counts
count, _ = np.histogram(a=x, bins=bins)
# return the three arrays
return bins, val, count
def circular_average(image, calibrated_center, threshold=0, nx=None,
pixel_size=(1, 1), min_x=None, max_x=None, mask=None):
"""Circular average of the the image data
The circular average is also known as the radial integration
Parameters
----------
image : array
Image to compute the average as a function of radius
calibrated_center : tuple
The center of the image in pixel units
argument order should be (row, col)
threshold : int, optional
Ignore counts above `threshold`
default is zero
nx : int, optional
number of bins in x
defaults is 100 bins
pixel_size : tuple, optional
The size of a pixel (in a real unit, like mm).
argument order should be (pixel_height, pixel_width)
default is (1, 1)
min_x : float, optional number of pixels
Left edge of first bin defaults to minimum value of x
max_x : float, optional number of pixels
Right edge of last bin defaults to maximum value of x
Returns
-------
bin_centers : array
The center of each bin in R. shape is (nx, )
ring_averages : array
Radial average of the image. shape is (nx, ).
"""
radial_val = utils.radial_grid(calibrated_center, image.shape, pixel_size)
if mask is not None:
mask = np.array( mask, dtype = bool)
binr = radial_val[mask]
image_mask = np.array( image )[mask]
else:
binr = np.ravel( radial_val )
image_mask = np.ravel(image)
binr_ = binr /(np.sqrt(pixel_size[1]*pixel_size[0] ))
bin_edges, sums, counts = bin_1D( binr_,
image_mask,
nx=nx,
min_x=min_x,
max_x=max_x)
th_mask = counts > threshold
ring_averages = sums[th_mask] / counts[th_mask]
bin_centers = utils.bin_edges_to_centers(bin_edges)[th_mask]
return bin_centers, ring_averages
def seek_bad_pixels(FD, uid, badlist, filename='/XF11ID/analysis/Commissioning/badpixel.csv' ):
try:
mydict = read_dict_csv( filename )
except:
mydict = {}
bp = {}
bp[uid] = [] #np.zeros( [len( badlist),2] )
#i=0
for n in badlist:
if n>= FD.beg and n<=FD.end:
f = FD.rdframe(n)
w = np.where( f == f.max())
if len(w[0])==1:
bp[uid].append( [ w[0], w[1] ] )
#i +=1
if uid not in list( mydict.keys() ):
save_dict_csv( bp, filename, mode='a')
print( 'save bad pixels to %s'%filename)
return bp <file_sep>from __future__ import absolute_import, division, print_function
from tqdm import tqdm
import struct
import matplotlib.pyplot as plt
from chxanalys.chx_libs import (np, roi, time, datetime, os, getpass, db, get_images,LogNorm,Figure, RUN_GUI)
#from chxanalys.chx_generic_functions import (get_circular_average)
#from chxanalys.XPCS_SAXS import (get_circular_average)
from chxanalys.chx_libs import ( colors, markers, colors_, markers_)
import os
from chxanalys.chx_generic_functions import ( save_arrays )
from skbeam.core.utils import multi_tau_lags
from skbeam.core.roi import extract_label_indices
from collections import namedtuple
import logging
logger = logging.getLogger(__name__)
from chxanalys.chx_compress import (compress_eigerdata, read_compressed_eigerdata,init_compress_eigerdata,
Multifile,pass_FD,get_avg_imgc,mean_intensityc, get_each_frame_intensityc)
from modest_image import ModestImage, imshow
#from chxanalys.chx_compress import *
def get_time_edge_avg_img(FD, frame_edge,show_progress =True):
'''YG Dev Nov 14, 2017@CHX
Get averaged img by giving FD and frame edges
Parameters
----------
FD: Multifile class
compressed file
frame_edge: np.array, can be created by create_time_slice( Nimg, slice_num= 3,
slice_width= 1, edges = None )
e.g., np.array([[ 5, 6],
[2502, 2503],
[4999, 5000]])
Return:
array: (N of frame_edge, averaged image) , i.e., d[0] gives the first averaged image
'''
Nt = len( frame_edge )
d = np.zeros(Nt, dtype=object)
for i in range(Nt):
t1,t2 = frame_edge[i]
d[i] = get_avg_imgc( FD, beg=t1,end=t2, sampling = 1, plot_ = False,show_progress=show_progress )
return d
def plot_imgs( imgs, image_name=None, *argv, **kwargs):
#NOT WORKing NOW....
N = len(imgs)
sx = np.ceil( np.sqrt(N) )
pass
def cal_waterfallc(FD, labeled_array, qindex=1,
bin_waterfall = False, waterfall_roi_size = None, save=False, *argv,**kwargs):
"""Compute the mean intensity for each ROI in the compressed file (FD)
Parameters
----------
FD: Multifile class
compressed file
labeled_array : array
labeled array; 0 is background.
Each ROI is represented by a nonzero integer. It is not required that
the ROI labels are contiguous
qindex : int, qindex=1, give the first ring in SAXS geometry. NOTE: qindex=0 is non-photon pixels.
The ROI's to use.
bin_waterfall: if True, will bin the waterfall along y-axis
waterfall_roi_size: the size of waterfall roi, (x-size, y-size), if bin, will bin along y
save: save the waterfall
Returns
-------
waterfall : array
The mean intensity of each ROI for all `images`
Dimensions:
len(mean_intensity) == len(index)
len(mean_intensity[0]) == len(images)
index : list
The labels for each element of the `mean_intensity` list
"""
sampling =1
labeled_array_ = np.array( labeled_array == qindex, dtype= np.int64)
qind, pixelist = roi.extract_label_indices( labeled_array_ )
if labeled_array_.shape != ( FD.md['ncols'],FD.md['nrows']):
raise ValueError(
" `image` shape (%d, %d) in FD is not equal to the labeled_array shape (%d, %d)" %( FD.md['ncols'],FD.md['nrows'], labeled_array_.shape[0], labeled_array_.shape[1]) )
# pre-allocate an array for performance
# might be able to use list comprehension to make this faster
watf = np.zeros( [ int( ( FD.end - FD.beg)/sampling ), len(qind)] )
#fra_pix = np.zeros_like( pixelist, dtype=np.float64)
timg = np.zeros( FD.md['ncols'] * FD.md['nrows'] , dtype=np.int32 )
timg[pixelist] = np.arange( 1, len(pixelist) + 1 )
#maxqind = max(qind)
norm = np.bincount( qind )[1:]
n= 0
#for i in tqdm(range( FD.beg , FD.end )):
for i in tqdm(range( FD.beg, FD.end, sampling ), desc= 'Get waterfall for q index=%s'%qindex ):
(p,v) = FD.rdrawframe(i)
w = np.where( timg[p] )[0]
pxlist = timg[ p[w] ] -1
watf[n][pxlist] = v[w]
n +=1
if bin_waterfall:
watf_ = watf.copy()
watf = np.zeros( [ watf_.shape[0], waterfall_roi_size[0] ])
for i in range(waterfall_roi_size[1] ):
watf += watf_[:, waterfall_roi_size[0]*i: waterfall_roi_size[0]*(i+1) ]
watf /= waterfall_roi_size[0]
if save:
path = kwargs['path']
uid = kwargs['uid']
np.save( path + '%s_waterfall'%uid, watf)
return watf
def plot_waterfallc(wat, qindex=1, aspect = None,vmax=None, vmin=None, interpolation = 'none',
save=False, return_fig=False, cmap='viridis',*argv,**kwargs):
'''plot waterfall for a giving compressed file
FD: class object, the compressed file handler
labeled_array: np.array, a ROI mask
qindex: the index number of q, will calculate where( labeled_array == qindex)
aspect: the aspect ratio of the plot
Return waterfall
Plot the waterfall
'''
#wat = cal_waterfallc( FD, labeled_array, qindex=qindex)
if RUN_GUI:
fig = Figure(figsize=(8,6))
ax = fig.add_subplot(111)
else:
fig, ax = plt.subplots(figsize=(8,6))
if 'uid' in kwargs:
uid = kwargs['uid']
else:
uid = 'uid'
#fig, ax = plt.subplots(figsize=(8,6))
ax.set_ylabel('Pixel')
ax.set_xlabel('Frame')
ax.set_title('%s_Waterfall_Plot_@qind=%s'%(uid, qindex) )
if 'beg' in kwargs:
beg = kwargs['beg']
else:
beg=0
extent = [ beg, len(wat)+beg, 0, len( wat.T) ]
if vmax is None:
vmax=wat.max()
if vmin is None:
vmin = wat.min()
if aspect is None:
aspect = wat.shape[0]/wat.shape[1]
im = imshow(ax, wat.T, cmap=cmap, vmax=vmax,extent= extent,interpolation = interpolation )
#im = ax.imshow(wat.T, cmap='viridis', vmax=vmax,extent= extent,interpolation = interpolation )
fig.colorbar( im )
ax.set_aspect( aspect)
if save:
#dt =datetime.now()
#CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute)
path = kwargs['path']
#fp = path + "uid= %s--Waterfall-"%uid + CurTime + '.png'
fp = path + "%s_waterfall"%uid + '.png'
plt.savefig( fp, dpi=fig.dpi)
#plt.show()
if return_fig:
return fig,ax, im
def get_waterfallc(FD, labeled_array, qindex=1, aspect = 1.0,
vmax=None, save=False, *argv,**kwargs):
'''plot waterfall for a giving compressed file
FD: class object, the compressed file handler
labeled_array: np.array, a ROI mask
qindex: the index number of q, will calculate where( labeled_array == qindex)
aspect: the aspect ratio of the plot
Return waterfall
Plot the waterfall
'''
wat = cal_waterfallc( FD, labeled_array, qindex=qindex)
fig, ax = plt.subplots(figsize=(8,6))
ax.set_ylabel('Pixel')
ax.set_xlabel('Frame')
ax.set_title('Waterfall_Plot_@qind=%s'%qindex)
im = ax.imshow(wat.T, cmap='viridis', vmax=vmax)
fig.colorbar( im )
ax.set_aspect( aspect)
if save:
#dt =datetime.now()
#CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute)
path = kwargs['path']
if 'uid' in kwargs:
uid = kwargs['uid']
else:
uid = 'uid'
#fp = path + "uid= %s--Waterfall-"%uid + CurTime + '.png'
fp = path + "uid=%s--Waterfall-"%uid + '.png'
fig.savefig( fp, dpi=fig.dpi)
#plt.show()
return wat
def cal_each_ring_mean_intensityc( FD, ring_mask, sampling=1, timeperframe=None, multi_cor= False,
*argv,**kwargs):
"""
get time dependent mean intensity of each ring
"""
mean_int_sets, index_list = mean_intensityc(FD, ring_mask, sampling, index=None, multi_cor=multi_cor)
if timeperframe is None:
times = np.arange( FD.end - FD.beg ) + FD.beg # get the time for each frame
else:
times = ( FD.beg + np.arange( FD.end - FD.beg ) )*timeperframe
num_rings = len( np.unique( ring_mask)[1:] )
return times, mean_int_sets
def plot_each_ring_mean_intensityc( times, mean_int_sets, xlabel= 'Frame',save=False, *argv,**kwargs):
"""
Plot time dependent mean intensity of each ring
"""
num_rings = mean_int_sets.shape[1]
fig, ax = plt.subplots(figsize=(8, 8))
uid = 'uid'
if 'uid' in kwargs.keys():
uid = kwargs['uid']
ax.set_title("%s--Mean intensity of each ROI"%uid)
for i in range(num_rings):
#print( markers[i], colors[i] )
ax.plot( times, mean_int_sets[:,i], label="ROI "+str(i+1),marker = markers[i], color=colors[i], ls='-')
ax.set_xlabel(xlabel)
ax.set_ylabel("Mean Intensity")
ax.legend(loc = 'best',fontsize='x-small', fancybox=True, framealpha=0.5)
if save:
path = kwargs['path']
fp = path + "%s_t_ROIs"%uid + '.png'
fig.savefig( fp, dpi=fig.dpi)
save_arrays( np.hstack( [times.reshape(len(times),1), mean_int_sets]),
label= ['frame']+ ['ROI_%d'%i for i in range( num_rings ) ],
filename='%s_t_ROIs'%uid, path= path )
#plt.show()
def get_each_ring_mean_intensityc( FD, ring_mask, sampling=1, timeperframe=None, plot_ = False, save=False, *argv,**kwargs):
"""
get time dependent mean intensity of each ring
"""
mean_int_sets, index_list = mean_intensityc(FD, ring_mask, sampling, index=None)
if timeperframe is None:
times = np.arange( FD.end - FD.beg ) + FD.beg # get the time for each frame
else:
times = ( FD.beg + np.arange( FD.end - FD.beg ) )*timeperframe
num_rings = len( np.unique( ring_mask)[1:] )
if plot_:
fig, ax = plt.subplots(figsize=(8, 8))
uid = 'uid'
if 'uid' in kwargs.keys():
uid = kwargs['uid']
ax.set_title("%s--Mean intensity of each ROI"%uid)
for i in range(num_rings):
ax.plot( times, mean_int_sets[:,i], label="ROI "+str(i+1),marker = 'o', ls='-')
if timeperframe is not None:
ax.set_xlabel("Time, sec")
else:
ax.set_xlabel("Frame")
ax.set_ylabel("Mean Intensity")
ax.legend(loc = 'best',fontsize='x-small')
if save:
#dt =datetime.now()
#CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute)
path = kwargs['path']
#fp = path + "uid= %s--Mean intensity of each ring-"%uid + CurTime + '.png'
fp = path + "%s_Mean_intensity_of_each_ROI"%uid + '.png'
fig.savefig( fp, dpi=fig.dpi)
save_arrays( np.hstack( [times.reshape(len(times),1), mean_int_sets]),
label= ['frame']+ ['ROI_%d'%i for i in range( num_rings ) ],
filename='%s_t_ROIs'%uid, path= path )
#plt.show()
return times, mean_int_sets
<file_sep>chxanalys--CHX XPCS Data Analysis Packages
========
Repository for data collection and analysis scripts that are useful at the
CHX beamline at NSLS-II (11-id).
Installation instructions on Linux:
Install miniconda from https://conda.io/miniconda.html. Then create the environment for chxanalys:
conda create --name chxanalys python=3.6 numpy scipy matplotlib
source activate databroker
pip install -r https://raw.githubusercontent.com/yugangzhang/chxanalys/master/requirements.txt
pip install git+https://github.com/yugangzhang/chxanalys
<file_sep>"""
Phase Retrieval Codes for NSLS-II Coherent Hard X-ray beamline
Adapted from:
M180G Phase Retrieval
Author: <NAME>
Jianwei (John) Miao Coherent Imaging Group
University of California, Los Angeles
Copyright (c) 2016. All Rights Reserved.
Modified by: <NAME> (2017)
"""
from PIL import Image
import numpy as np
import time
import matplotlib.pyplot as plt
from IPython import display
#from mask_maker import *
# This defines a decorator to time other functions
def timeThisFunction(function):
from time import time
def inner(*args, **kwargs):
t0 = time()
value = function(*args,**kwargs)
print("Function call to \"{0:s}\" completed in {1:.3f} seconds".format(function.__name__,time()-t0))
return value
return inner
class Initializer(object):
"""File loader class for initializing data for reconstruction at CHX."""
import h5py
def __init__(self, fdir, px_size, probe_radius, arr_size):
self.fdir = fdir
self.px_size = px_size
self.probe_radius = probe_radius
self.length = arr_size
self.cen = int(np.floor(self.length/2))
# def loadPatterns(self):
# """ import data """
# self.data =
# self.length = self.data.shape[0]
# def loadPositions(self):
def makeProbeGuess(self):
return makeCircle(self.probe_radius, self.length, self.cen, self.cen)
class DiffractionPattern(object):
"Base class for containing a diffraction pattern"
def __init__(self, data):
"Build a diffraction pattern object from an existing numpy array representing the diffraction data"
self.data = data
dims = np.shape(self.data) # get initial dimensions of array
@staticmethod # static method means the following is just a regular function inside of the class definition
def fromTIFF(filename):
"Factory method for constructing a DiffractionPattern object from a tiff image"
from PIL import Image
data = np.array(Image.open(filename), dtype=float) # load in data
# return DiffractionPattern(np.array(data,dtype=float)) #return a new DiffractionPattern object
return data # return a new DiffractionPattern object
@staticmethod
def fromTIFFstack(directory_name, filestart):
import os
import glob
from PIL import Image
# data = np.array([np.array(Image.open(filename)) for filename in os.listdir(directory_name)], dtype=float)
data = np.array([np.array(Image.open(filename)) for filename in glob.glob(directory_name + filestart + '*.tiff')])
return data
@timeThisFunction # adds a timer to the function
def maskBrightValues(self, threshold_fraction = 0.98):
# flag pixels at or near saturation limit as unmeasured
self.data [self.data > (threshold_fraction * np.amax(self.data))] = -1
@timeThisFunction # adds a timer to the function
def hermitianSymmetrize(self):
"""Enforce Hermitian symmetry (centrosymmetry) on the diffraction pattern"""
print ("Using the slower version of hermitianSymmetrize , this may take a while....")
# define center, "//" performs floor division. This idiom works regardless
# of whether the dimension is odd or even.
dimX, dimY = np.shape[ self.data]
centerX = dimX//2
centerY = dimY//2
# Now we want to loop over the array and combine each pixel with its symmetry mate. If
# the dimension of our array is even, then the pixels at position "0" do not have a symmetry mate,
# so we must check for that. Otherwise we will get an error for trying to index out of bounds
if dimX % 2 == 0: # the "%" performs modular division, which gets the remainder
startX = 1
else:
startX = 0
if dimY % 2 == 0:
startY = 1
else:
startY = 0
# Now that we have the housekeeping stuff out of the way, we can actually do the loop
# We have to keep up with two sets of coordinates -> (X,Y) refers to the
# position of where the value is located in the array and counts 0, 1, 2, etc.
# On the other hand,(centeredX, centeredY) gives the coordinates relative to the origin
# so that we can find the Hermitian symmetry mate, which is at (-centeredX, -centeredY)
for X in range(startX, dimX):
for Y in range(startY, dimY):
# for each pixel X, Y, get the centered coordinate
centeredX = X - centerX
centeredY = Y - centerY
# get the array coordinate of the symmetry mate and shift back by the center
symmetry_X = (-1 * centeredX) + centerX
symmetry_Y = (-1 * centeredY) + centerY
# get the values from the array
val1 = self.data[X, Y]
val2 = self.data[symmetry_X, symmetry_Y]
# if both values exist, take their average. If only one exists, use it for both. If
# neither exists, then the final value is unknown (so we do nothing)
if (val1 != -1) and (val2 != -1): #if both exist, take the average
self.data[X, Y] = (val1 + val2) / 2
self.data[symmetry_X, symmetry_Y] = (val1 + val2) / 2
elif (val1 == -1): #if val1 does not exist, use val2 for both
self.data[X, Y] = val2
self.data[symmetry_X, symmetry_Y] = val2
else: #then val2 must not exist
self.data[X, Y] = val1
self.data[symmetry_X, symmetry_Y] = val1
self.data[self.data == 0] = -1
@timeThisFunction # adds a timer to the function
def hermitianSymmetrize_express(self):
"""
Functions the same as hermitianSymmetrize, except is ~10,000 times faster, but more cryptic
Applies Hermitian symmetry (centrosymmetry) to the diffraction pattern. If one symmetry mate is not equal to the complex conjugate of the other
their average is taken. If only one of them exists (is nonzero), then the one value is used. If neither exists
the value remains 0. In terms of implementation, this function produces Hermitian symmetry by adding the object
to its complex conjugate with the indices reversed. This requires the array to be odd, so there is also a check
to make the array odd and then take back the original size at the end, if necessary.
"""
startDims = np.shape(self.data) # initial dimensions
# remember the initial dimensions for the end
dimx = startDims[0]
dimy = startDims[1]
flag = False # flag to trigger copying to new odd dimensioned array
#check if any dimension is odd
if dimx % 2 == 0:
dimx += 1
flag = True
if dimy % 2 == 0:
dimy += 1
flag = True
if flag: # if any dimensions are even, create a new with all odd dimensions and copy array
newInput = np.zeros((dimx,dimy), dtype=float) #new array
newInput[:startDims[0], :startDims[1]] = self.data # copy values
newInput[newInput == -1] = 0
numberOfValues = (newInput != 0).astype(float) #track number of values for averaging
newInput = newInput + newInput[::-1, ::-1] # combine Hermitian symmetry mates
numberOfValues = numberOfValues + numberOfValues[::-1, ::-1] # track number of points included in each sum
newInput[numberOfValues != 0] = newInput[numberOfValues != 0] / numberOfValues[numberOfValues != 0] # take average where two values existed
self.data = newInput[:startDims[0], :startDims[1]] # return original dimensions
else: # otherwise, save yourself the trouble of copying the matrix over. See previous comments for line-by-line
self.data[self.data == -1] = 0 #temporarily remove flags
numberOfValues = (self.data != 0).astype(int)
self.data = self.data + self.data[::-1, ::-1]
numberOfValues = numberOfValues + numberOfValues[::-1, ::-1]
self.data[numberOfValues != 0] = self.data[numberOfValues != 0] / numberOfValues[numberOfValues != 0]
self.data[self.data == 0] = -1 # reflag
@timeThisFunction # adds a timer to the function
def correctCenter(self,search_box_half_size = 5):
"This method optimizes the location of the diffraction pattern's center and shifts it accordingly \
It does so by searching a range of centers determined by search_box_half_size. For each center, the \
error between centrosymmetric partners is checked. The optimized center is the position which \
minimizes this error"
import matplotlib.pyplot as plt
plt.figure()
plt.imshow(self.data)
plt.title('Double-click the center')
# plot.show()
center_guess_y, center_guess_x = (plt.ginput(1)[0])
center_guess_x = int(center_guess_x)
center_guess_y = int(center_guess_y)
plt.close()
dimX, dimY = np.shape(self.data)
# If guesses for the center aren't provided, use the center of the array as a guess
originalDimx = dimX
originalDimy = dimY
if center_guess_x is None:
center_guess_x = dimX // 2
if center_guess_y is None:
center_guess_y = dimY // 2
bigDimX = max(center_guess_x,originalDimx-center_guess_x-1)
bigDimY = max(center_guess_y,originalDimy-center_guess_y-1)
padding_1_x = abs(center_guess_x-bigDimX) + search_box_half_size
padding_2_x = abs( (originalDimx - center_guess_x - 1) - bigDimX)+ search_box_half_size
padding_1_y = abs(center_guess_y-bigDimY)+ search_box_half_size
padding_2_y = abs( (originalDimy - center_guess_y - 1) - bigDimY)+ search_box_half_size
self.data = np.pad(self.data,((padding_1_x, padding_2_x),(padding_1_y, padding_2_y)),mode='constant')
dimx, dimy = np.shape(self.data) # initial dimensions
startDims = (dimx, dimy)
center_guess_x = dimx//2
center_guess_y = dimy//2
flag = False # flag to trigger copying to new odd dimensioned array
#check if any dimension is odd
if dimx % 2 == 0:
dimx += 1
flag = True
if dimy % 2 == 0:
dimy += 1
flag = True
if flag: # if any dimensions are even, create a new with all odd dimensions and copy array
temp_data = np.zeros((dimx,dimy), dtype=float) #new array
temp_data[:startDims[0], :startDims[1]] = self.data # copy values
input = temp_data
else:
temp_data = self.data
temp_data[temp_data == -1 ] = 0 # remove flags
#initialize minimum error to a large value
best_error = 1e30
#initialize the best shifts to be 0
bestShiftX = 0
bestShiftY = 0
#loop over the various center positions
for xShift in range(-search_box_half_size,search_box_half_size+1):
for yShift in range(-search_box_half_size,search_box_half_size+1):
#shift the data
temp_array = np.roll(temp_data,xShift,axis=0)
temp_array = np.roll(temp_array,yShift,axis=1)
temp_array_reversed = temp_array[::-1, ::-1]
numberOfValues = (temp_array != 0).astype(float)
numberOfValues = numberOfValues + numberOfValues[::-1, ::-1]
difference_map = np.abs(temp_array - temp_array_reversed)
normalization_term = np.sum(abs(temp_array[numberOfValues == 2]))
error_between_symmetry_mates = np.sum(difference_map[numberOfValues == 2]) / normalization_term
if error_between_symmetry_mates < best_error:
best_error = error_between_symmetry_mates
bestShiftX = xShift
bestShiftY = yShift
self.data = np.roll(self.data, bestShiftX, axis=0)
self.data = np.roll(self.data, bestShiftY, axis=1)
self.data = self.data[ search_box_half_size : -search_box_half_size, search_box_half_size:-search_box_half_size ]
@timeThisFunction
def makeArraySquare(self):
""" Pad image to square array size that is the nearest even number greater than or equal to the current dimensions"""
dimx, dimy = np.shape(self.data)
new_dim = max(dimx,dimy) + (max(dimx,dimy)%2) # Take the ceiling even value above the larger dimension
padding_x = ((new_dim - dimx)//2, (new_dim - dimx)//2 + (new_dim - dimx)%2 )
padding_y = ((new_dim - dimy)//2, (new_dim - dimy)//2 + (new_dim - dimy)%2 )
self.data = np.pad(self.data,(padding_x, padding_y), mode='constant')
self.data [ self.data ==0] = -1
@timeThisFunction # adds a timer to the function
def subtractBackground(self, bg_filename):
from PIL import Image
BG = Image.open(bg_filename)
BG = np.array(BG,dtype=float)
self.data -= BG # shorthand for self.data = self.data - BG
self.data[(self.data <= 0 )] = -1 # any negative values are to be flagged as "missing" with a -1
@timeThisFunction # adds a timer to the function
def convertToFourierModulus(self):
self.data[self.data != -1] = np.sqrt(self.data[self.data != -1])
@timeThisFunction
def binImage(self, bin_factor_x=1, bin_factor_y=1, fraction_required_to_keep = 0.5):
# bin an image by bin_factor_x in X and bin_factor_y in Y by averaging all pixels in an bin_factor_x by bin_factor_y rectangle
# This is accomplished using convolution followed by downsampling, with the downsampling chosen so that the center
# of the binned image coincides with the center of the original unbinned one.
from scipy.signal import convolve2d
self.data [self.data <0 ] = 0 # temporarily remove flags
numberOfValues = (self.data != 0).astype(int) # record positions that have a value
binning_kernel = np.ones((bin_factor_x, bin_factor_y), dtype=float) # create binning kernel (all values within this get averaged)
self.data = convolve2d(self.data, binning_kernel, mode='same') # perform 2D convolution
numberOfValues = convolve2d(numberOfValues, binning_kernel, mode='same') # do the same with the number of values
self.data[ numberOfValues > 1 ] = self.data[ numberOfValues > 1 ] / numberOfValues[ numberOfValues > 1 ] # take average, accounting for how many datapoints went into each point
self.data[ numberOfValues < (bin_factor_x * bin_factor_y * fraction_required_to_keep)] = -1 # if too few values existed for averaging because too many of the pixels were unknown, make the resulting pixel unknown
dimx, dimy = np.shape(self.data) # get dimensions
centerX = dimx//2 # get center in X direction
centerY = dimy//2 # get center in Y direction
# Now take the smaller array from the smoothed large one to obtain the final binned image. The phrase "centerX % bin_factor_x"
# is to ensure that the subarray we take includes the exact center of the big array. For example if our original image is
# 1000x1000 then the central pixel is at position 500 (starting from 0). If we are binning this by 5 we want a 200x200 array
# where the new central pixel at x=100 corresponds to the old array at x=500, so "centerX % bin_factor_x" ->
# 500 % 5 = 0, so we would be indexing fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b = [0, 5, 10, 15..., 500, 505...] which is what we want. The same scenario with a
# 1004x1004 image needs the center of the 200x200 array to be at x=502, and 502 % 5 = 2 and we index
# fc00:e968:6179::de52:7100 = [2,7,12..., 502, 507 ...]
self.data = self.data[ centerX % bin_factor_x :: bin_factor_x, centerY % bin_factor_y :: bin_factor_y ]
class PtychographyReconstruction(object):
''' ptychography reconstruction object for reconstruction using ePIE '''
def __init__(self, diffraction_pattern_stack, aperture_positions, reconstructed_pixel_size = 1, num_iterations = 100, aperture_guess = None, initial_object = None, show_progress = True, diffraction_pattern_type='generator' ):
'''diffraction_pattern_type='generator': diffraction_pattern_stack is a generator, shape will be N, Dimx, Dimy
diffraction_pattern_type='images': diffraction_pattern_stack is a numpy array, shape will be Dimx, Dimy, N
'''
self.diffraction_pattern_type = diffraction_pattern_type
self.diffraction_pattern_stack = diffraction_pattern_stack # NumPy array of dimension N x N x number_of_patterns
self.num_iterations = num_iterations
self.aperture_guess = aperture_guess # initial guess of the aperture
self.show_progress = show_progress
if self.diffraction_pattern_type == 'images':
dp_dimX, dp_dimY, number_of_patterns = np.shape(self.diffraction_pattern_stack)
else:
img = self.diffraction_pattern_stack[0]
dp_dimX, dp_dimY, number_of_patterns = img.shape[1], img.shape[0], len( self.diffraction_pattern_stack)
self.dp_dimX,self.dp_dimY,self.number_of_patterns = dp_dimX, dp_dimY, number_of_patterns
# Adjust the aperture positions. Convert into pixels, center the origin at 0 and
# add an offset of size (dp_dimX, dp_dimY) as a small buffer
aperture_pos_X, aperture_pos_Y = zip(*aperture_positions)
min_x_pos = min(aperture_pos_X) / reconstructed_pixel_size
min_y_pos = min(aperture_pos_Y) / reconstructed_pixel_size
aperture_pos_X = [int(pos/reconstructed_pixel_size - min_x_pos) + dp_dimX for pos in aperture_pos_X]
aperture_pos_Y = [int(pos/reconstructed_pixel_size - min_y_pos) + dp_dimY for pos in aperture_pos_Y]
self.aperture_positions = [pair for pair in zip(aperture_pos_X, aperture_pos_Y)]
self.number_of_apertures = len(self.aperture_positions)
# determine size of the macro reconstruction
big_dim_X, big_dim_Y = max(aperture_pos_X) + dp_dimX, max(aperture_pos_Y)+ dp_dimY
# Initialize array to hold reconstruction
self.reconstruction = np.zeros((big_dim_X, big_dim_Y), dtype=complex)
self.display_results_during_reconstruction = False
if aperture_guess is None:
self.aperture = np.ones((dp_dimX, dp_dimX), dtype=complex)
else:
self.aperture = aperture_guess
# If no initial object was provided, default to zeros
if initial_object is None:
self.initial_object = np.zeros((big_dim_X, big_dim_Y), dtype=complex)
else:
self.initial_object = initial_object
def reconstruct(self):
aperture_update_start = 5 # start updating aperture on iteration 5
beta_object = 0.9
beta_aperture = 0.9
dp_dimX, dp_dimY, number_of_patterns = self.dp_dimX,self.dp_dimY,self.number_of_patterns
x_crop_vector = np.arange(dp_dimX) - dp_dimX//2
minX,maxX = np.min(x_crop_vector), np.max(x_crop_vector) + 1
y_crop_vector = np.arange(dp_dimY) - dp_dimY//2
minY,maxY = np.min(y_crop_vector), np.max(y_crop_vector) + 1
for iteration in range(self.num_iterations):
# randomly loop over the apertures each iteration
for cur_apert_num in np.random.permutation(range(self.number_of_apertures)):
# crop out the relevant sub-region of the reconstruction
x_center, y_center = self.aperture_positions[cur_apert_num][0] , self.aperture_positions[cur_apert_num][1]
r_space = self.reconstruction[ minX+x_center:maxX+x_center, minY+y_center:maxY+y_center ]
buffer_r_space = np.copy(r_space)
buffer_exit_wave = r_space * self.aperture
update_exit_wave = my_fft(np.copy(buffer_exit_wave))
if self.diffraction_pattern_type == 'images':
current_dp = self.diffraction_pattern_stack[:, :, cur_apert_num]
else:
current_dp = self.diffraction_pattern_stack[ cur_apert_num ]
current_dp = np.sqrt( current_dp ) #from intensity to amplitude
update_exit_wave[ current_dp != -1 ] = abs(current_dp[current_dp != -1])\
* np.exp(1j*np.angle(update_exit_wave[current_dp != -1]))
update_exit_wave = my_ifft(update_exit_wave)
# max_ap = np.max(np.abs(self.aperture))
# norm_factor = beta_object / max_ap**2
diff_wave = (update_exit_wave - buffer_exit_wave)
new_r_space = buffer_r_space + diff_wave * \
np.conjugate(self.aperture) * beta_object / np.max(np.abs(self.aperture))**2
self.reconstruction[ minX+x_center:maxX+x_center, minY+y_center:maxY+y_center ] = new_r_space
if iteration > aperture_update_start:
# norm_factor_apert = beta_aperture / np.max(np.abs(r_space))**2
print('Update probe here')
self.aperture = self.aperture + beta_aperture / np.max(np.abs(r_space))**2 * \
np.conjugate(buffer_r_space)*diff_wave
if iteration % 5 == 0 and iteration != self.num_iterations - 1:
print("Iteration {}".format(iteration))
self.displayResult()
return self.reconstruction
def displayResult(self):
fig = plt.figure(101)
plt.subplot(221)
plt.imshow(np.abs(self.reconstruction),origin='lower')
plt.draw()
plt.title('Reconstruction Magnitude')
plt.subplot(222)
plt.imshow(np.angle(self.reconstruction),origin='lower')
plt.draw()
plt.title('Reconstruction Phase')
plt.subplot(223)
plt.imshow(np.abs(self.aperture),origin='lower')
plt.title("Aperture Magnitude")
plt.draw()
plt.subplot(224)
plt.imshow(np.angle(self.aperture),origin='lower')
plt.title("Aperture Phase")
plt.draw()
fig.canvas.draw()
#fig.tight_layout()
# display.display(fig)
# display.clear_output(wait=True)
# time.sleep(.00001)
def displayResult2(self):
fig = plt.figure()
ax = fig.add_subplot(2,2,1 )
ax.imshow(np.abs(self.reconstruction))
ax.set_title('Reconstruction Magnitude')
ax = fig.add_subplot(2,2,2 )
ax.imshow(np.angle(self.reconstruction))
ax.set_title('Reconstruction Phase')
ax = fig.add_subplot(2,2,3 )
ax.imshow(np.abs(self.aperture))
ax.set_title("Aperture Magnitude")
ax = fig.add_subplot(2,2,4 )
ax.imshow(np.angle(self.aperture))
ax.set_title("Aperture Phase")
fig.tight_layout()
class PostProcesser(object):
''' class for visualizing and selecting area of interest for in situ CDI measurements after obtaining the
ptychography results '''
def __init__(self, fov, positions, probe_radius, arr_size):
self.fov = fov
self.positions = positions
self.probe_radius = probe_radius
self.arr_size = arr_size
def fovPicker(self):
fig = plt.figure()
ax = plt.gca()
plt.imshow(abs(self.fov))
for n in np.arange(self.positions.shape[0]):
circle = plt.Circle(self.positions[n, :], self.probe_radius, color='r', fill=False)
ax.add_artist(circle)
plt.title("After closing figure, enter desired position for in situ CDI measurement")
fig.canvas.draw()
index = input("Enter desired position for in situ CDI measurement: ")
#print(index)
#index = index.astype(int);
index = int(index)
self.fov_roi = crop_roi(image = self.fov,
crop_size = self.arr_size,
cenx = self.positions[index,1],
ceny = self.positions[index,0])
return self.fov_roi
def defineVerts(self):
coords = []
fig = plt.figure()
ax = fig.add_subplot(111)
plt.imshow(self.fov_roi)
ax.set_title('click to build line segments')
line, = ax.plot([], []) # empty line
LineBuilder(line)
plt.show()
def makeStatSupp(self, coords):
h, w = (self.arr_size, self.arr_size)
y, x = np.mgrid[:h, :w]
points = np.transpose((x.ravel(), y.ravel()))
p = path.Path(coords)
mask = p.contains_points(points)
return mask.reshape(h, w)
class InSituCDIReconstruction(object):
""" in situ CDI reconstruction class """
def __init__(self,diffraction_patterns, num_iterations = 200, stat_support = None, probe = None, initial_object = None):
self.diffraction_patterns = diffraction_patterns
self.num_iterations = num_iterations
self.probe = probe
self.stat_support = stat_support
print(np.shape(self.diffraction_patterns))
dimZ, dimY, dimX = np.shape(self.diffraction_patterns.data)
self.reconstructions = np.zeros((dimZ,dimY,dimX), dtype=float)
self.display_results_during_reconstruction = True
if initial_object is None:
self.initial_object = np.array(np.random.rand(dimZ,dimY,dimX), dtype=complex)
# self.initial_object = np.array(np.zeros((dimZ,dimY,dimX)), dtype=complex)
else:
self.initial_object = initial_object
def reconstruct(self):
# np.seterr(divide='ignore', invalid='ignore')
print("Reconstructing dynamic information with in situ CDI...")
# Get dimensions of diffraction patterns
dimZ, dimY, dimX = np.shape(self.diffraction_patterns)
# set measured data points mask
measured_data_mask = self.diffraction_patterns != -1
ref_data_mask = self.stat_support == 1
probe = np.copy(self.probe)
# initialize object
R = self.initial_object * 1e-10
# initialize reference area
ref_area = np.ones((dimY,dimX)) * 1e-6
kerr_top = np.zeros(dimZ)
kerr_bottom = np.zeros(dimZ)
self.errK = np.zeros(self.num_iterations, dtype=float)
best_error = 1e30
beta = 0.9
beta_ref = 0.8
alpha = 1e-10
for itr in range(self.num_iterations):
if itr % 10 == 1:
print("Iteration{0} \t\t Minimum Error = {1:.2f}".format(itr, best_error))
for frame in range(10):
current_frame = np.copy(R[frame, :, :])
current_data_mask = measured_data_mask[frame, :, :]
current_frame[ref_data_mask == 1] = beta_ref * ref_area[ref_data_mask == 1] + (1-beta_ref) * current_frame[ref_data_mask == 1]
current_exit_wave = current_frame * probe
updated_k = my_fft(current_exit_wave)
check_k = abs(updated_k)
measured_k = np.copy(self.diffraction_patterns[frame, :, :])
updated_k[current_data_mask] = measured_k[current_data_mask] * np.exp(1j * np.angle(updated_k[current_data_mask]))
# updated_k[current_data_mask] = measured_k[current_data_mask] * updated_k[current_data_mask] / np.abs(updated_k[current_data_mask])
updated_exit_wave = my_ifft(updated_k)
diff_exit_wave = updated_exit_wave - current_exit_wave
update_fn = (np.abs(probe) / np.amax(abs(probe))) * (np.conj(probe) / (alpha + np.power(abs(probe),2)))
updated_object = current_frame + update_fn * diff_exit_wave
ref_area = np.copy(updated_object)
R[frame, :, :] = np.copy(updated_object)
kerr_top[frame] = np.sum(abs(measured_k[current_data_mask] - check_k[current_data_mask]))
kerr_bottom[frame] = np.sum(abs(measured_k[current_data_mask]))
# check R-factor
errK = np.sum(kerr_top)/np.sum(kerr_bottom)
self.errK[itr] = errK
if errK < best_error:
best_error = errK
self.reconstructions = np.copy(R)
if self.display_results_during_reconstruction & (itr % 10 == 0):
self.displayResults()
def displayResults(self):
from matplotlib import pyplot as plt
plt.figure(2)
plt.subplot(121)
plt.imshow(abs(self.reconstructions[5, :, :]), cmap='gray')
plt.subplot(122)
plt.plot(range(self.num_iterations), self.errK, 'ko')
plt.draw()
plt.pause(1e-12)
def my_fft(arr):
#computes forward FFT of arr and shifts origin to center of array
return np.fft.fftshift(np.fft.fftn(np.fft.ifftshift(arr)))
def my_ifft(arr):
#computes inverse FFT of arr and shifts origin to center of array
return np.fft.fftshift(np.fft.ifftn(np.fft.ifftshift(arr)))
def combine_images(directory_name, output_filename = None):
"""loop over all tiff images in directory_name and average them"""
import os
from PIL import Image
image_count = 0
average_image = None
for filename in os.listdir(directory_name):
file,ext = os.path.splitext(filename)
if ext==".tif" or ext==".tiff":
# print(directory_name + filename)
if image_count == 0:
average_image = np.array(Image.open(directory_name + filename),dtype=float)
else:
average_image += np.array(Image.open(directory_name + filename),dtype=float)
image_count+=1
try:
average_image/=image_count # take average
except TypeError:
print ("\n\n\n\nNO VALID TIFF IMAGES IN DIRECTORY!\n\n\n")
raise
if output_filename is not None:
np.save(output_filename,average_image)
return average_image
def makeCircle(radius, img_size, cenx, ceny):
"""Make binary circle with specified radius and image size (img_size) centered at
(cenx,ceny)"""
cen = np.floor((img_size / 2)).astype(int)
xx, yy = np.mgrid[-cen:cen, -cen:cen]
out = np.sqrt(xx ** 2 + yy ** 2) <= radius
out = np.roll(out, cenx - cen, axis=1)
out = np.roll(out, ceny - cen, axis=0)
return out
def crop_roi(image, crop_size, cenx, ceny):
crop_size_x = crop_size
crop_size_y = crop_size
# cenx = int(np.floor(image.shape[0]/2))
# ceny = int(np.floor(image.shape[1]/2))
cenx = int(cenx)
ceny = int(ceny)
half_crop_size_x = np.floor(crop_size_x//2)
half_crop_size_y = np.floor(crop_size_y//2)
if crop_size % 2 ==0:
cropped_im = image[ int(ceny - half_crop_size_y): int(ceny + half_crop_size_y),
int(cenx - half_crop_size_x): int(cenx + half_crop_size_x)]
else:
cropped_im = image[ int(ceny - half_crop_size_y): int(ceny + half_crop_size_y + 1),
int(cenx - half_crop_size_x): int(cenx + half_crop_size_x + 1)]
return cropped_im
def createSupp(arr_size, coords):
h, w = arr_size
y, x = np.mgrid[:h, :w]
points = np.transpose((x.ravel(), y.ravel()))
p = path.Path(coords)
mask = p.contains_points(points)
return mask.reshape(h, w)
def ptychography_demo():
# create simulated ptychography data
obj = Image.open('images/lena.png')
obj = np.pad(obj, pad_width=50, mode='constant')
ptychography_obj = simulate.make_ptychography_data(big_obj=obj, ccd_size=300, scan_dim=6, offset=0.15) # pixels
positions = ptychography_obj.gridscan()
probe = ptychography_obj.make_probe(probe_radius=70, dx=4, dy=4, z=-1000, wavelength=.635) # units in microns
obj_stack = ptychography_obj.make_obj_stack(positions)
diff_patterns = ptychography_obj.make_dps(obj_stack, probe)
# display simulation data
fig = plt.figure(1)
plt.subplot(221)
plt.imshow(obj)
plt.title('Object full field of view')
plt.subplot(222)
plt.imshow(abs(probe))
plt.title('Probe (magnitude)')
for n in range(max(positions.shape)):
plt.subplot(223)
plt.imshow(abs(obj_stack[:, :, n] * probe))
plt.title('Exit wave at position %d' % (n))
plt.subplot(224)
plt.imshow(np.log(diff_patterns[:, :, n]))
plt.title('Diffraction intensity')
plt.pause(.1)
plt.draw()
plt.pause(1)
input("<Hit Enter Twice To Close Figure and Continue>")
plt.close(fig)
# create ptychography reconstruction object
ptyc_reconstruction = PtychographyReconstruction(
diffraction_pattern_stack=diff_patterns,
aperture_guess=probe,
initial_object=None,
num_iterations=100,
aperture_positions=positions,
reconstructed_pixel_size=1,
show_progress=True)
# reconstruct ptychography
ptyc_reconstruction.reconstruct()
ptyc_reconstruction.displayResults()
if __name__ == '__main__':
ptychography_demo()
<file_sep>from pyOlog import LogEntry, Attachment,OlogClient
from pyOlog import SimpleOlogClient
from pyOlog.OlogDataTypes import Logbook
def update_olog_uid_with_file( uid, text, filename, append_name='', try_time = 10):
'''YG developed at July 18, 2017, attached text and file (with filename) to CHX olog
with entry defined by uid
uid: string of unique id
text: string, put into olog book
filename: string,
First try to attach olog with the file, if there is already a same file in attached file,
copy the file with different filename (append append_name), and then attach to olog
'''
atch=[ Attachment(open(filename, 'rb')) ]
try:
update_olog_uid( uid= uid, text= text, attachments= atch )
except:
from shutil import copyfile
npname = filename[:-4] + append_name +'_'+ '.pdf'
copyfile( filename, npname )
atch=[ Attachment(open(npname, 'rb')) ]
print("Append %s to the filename."%append_name)
update_olog_uid( uid= uid, text= text, attachments= atch )
def update_olog_id( logid, text, attachments):
'''Update olog book logid entry with text and attachments files
logid: integer, the log entry id
text: the text to update, will add this text to the old text
attachments: add new attachment files
An example:
filename1 = '/XF11ID/analysis/2016_2/yuzhang/Results/August/af8f66/Report_uid=af8f66.pdf'
atch=[ Attachment(open(filename1, 'rb')) ]
update_olog_id( logid=29327, text='add_test_atch', attachmenents= atch )
'''
url='https://logbook.nsls2.bnl.gov/Olog-11-ID/Olog'
olog_client=SimpleOlogClient( url= url,
username= 'xf11id', password= '<PASSWORD>' )
client = OlogClient( url='https://logbook.nsls2.bnl.gov/Olog-11-ID/Olog',
username= 'xf11id', password= '<PASSWORD>' )
old_text = olog_client.find( id = logid )[0]['text']
upd = LogEntry( text= old_text + '\n'+text, attachments= attachments,
logbooks= [Logbook( name = 'Operations', owner=None, active=True)]
)
upL = client.updateLog( logid, upd )
print( 'The url=%s was successfully updated with %s and with the attachments'%(url, text))
def update_olog_uid( uid, text, attachments):
'''Update olog book logid entry cotaining uid string with text and attachments files
uid: string, the uid of a scan or a specficial string (only gives one log entry)
text: the text to update, will add this text to the old text
attachments: add new attachment files
An example:
filename1 = '/XF11ID/analysis/2016_2/yuzhang/Results/August/af8f66/Report_uid=af8f66.pdf'
atch=[ Attachment(open(filename1, 'rb')) ]
update_olog_uid( uid='af8f66', text='Add xpcs pdf report', attachments= atch )
'''
olog_client=SimpleOlogClient( url='https://logbook.nsls2.bnl.gov/Olog-11-ID/Olog',
username= 'xf11id', password= '<PASSWORD>' )
client = OlogClient( url='https://logbook.nsls2.bnl.gov/Olog-11-ID/Olog',
username= 'xf11id', password= '<PASSWORD>' )
logid = olog_client.find( search= uid )[0]['id']
#print(attachments)
update_olog_id( logid, text, attachments)
<file_sep>#import sys
import re # Regular expressions
import numpy as np
import pylab as plt
import matplotlib as mpl
#from scipy.optimize import leastsq
#import scipy.special
import PIL # Python Image Library (for opening PNG, etc.)
import sys, os
from skbeam.core.accumulators.binned_statistic import BinnedStatistic2D,BinnedStatistic1D
def convert_Qmap( img, qx_map, qy_map=None, bins=None, rangeq=None, origin=None, mask=None, statistic='mean'):
"""<NAME> 3@CHX
Convert a scattering image to a qmap by giving qx_map and qy_map
Return converted qmap, x-coordinates and y-coordinates
"""
if qy_map is not None:
if rangeq is None:
qx_min,qx_max = qx_map.min(), qx_map.max()
qy_min,qy_max = qy_map.min(), qy_map.max()
rangeq = [ [qx_min,qx_max], [qy_min,qy_max] ]
#rangeq = [qx_min,qx_max , qy_min,qy_max]
if bins is None:
bins = qx_map.shape
b2d = BinnedStatistic2D( qx_map.ravel(), qy_map.ravel(),
statistic=statistic, bins=bins, mask=mask.ravel(),
range=rangeq)
remesh_data, xbins, ybins = b2d( img.ravel() ), b2d.bin_centers[0], b2d.bin_centers[1]
else:
if rangeq is None:
qx_min,qx_max = qx_map.min(), qx_map.max()
rangeq = [qx_min,qx_max]
if bins is None:
bins = [ qx_map.size ]
#print( rangeq, bins )
if mask is not None:
m = mask.ravel()
else:
m = None
b1d = BinnedStatistic1D( qx_map.ravel(), bins= bins, mask=m )
remesh_data = b1d( img.ravel() )
#print('Here')
xbins= b1d.bin_centers
ybins=None
return remesh_data, xbins, ybins
def qphiavg(image, q_map=None, phi_map=None, mask=None, bins= None,
origin=None, range=None, statistic='mean'):
''' Octo 20, 2017 Yugang According to Julien's Suggestion
Get from https://github.com/CFN-softbio/SciStreams/blob/master/SciStreams/processing/qphiavg.py
With a small revision --> return three array rather than dict
quick qphi average calculator.
ignores bins for now
'''
# TODO : replace with method that takes qphi maps
# TODO : also return q and phi of this...
# print("In qphi average stream")
shape = image.shape
if bins is None:
bins=shape
#print(bins)
if origin is None:
origin = (shape[0] - 1) / 2., (shape[1] - 1) / 2.
from skbeam.core.utils import radial_grid, angle_grid
if q_map is None:
q_map = radial_grid(origin, shape)
if phi_map is None:
phi_map = angle_grid(origin, shape)
expected_shape = tuple(shape)
if mask is not None:
if mask.shape != expected_shape:
raise ValueError('"mask" has incorrect shape. '
' Expected: ' + str(expected_shape) +
' Received: ' + str(mask.shape))
mask = mask.reshape(-1)
rphibinstat = BinnedStatistic2D(q_map.reshape(-1), phi_map.reshape(-1),
statistic=statistic, bins=bins, mask=mask,
range=range)
sqphi = rphibinstat(image.ravel())
qs = rphibinstat.bin_centers[0]
phis = rphibinstat.bin_centers[1]
return sqphi, qs, phis
def convert_Qmap_old( img, qx_map, qy_map=None, bins=None, rangeq=None):
"""<NAME> 3@CHX
Convert a scattering image to a qmap by giving qx_map and qy_map
Return converted qmap, x-coordinates and y-coordinates
"""
if qy_map is not None:
if rangeq is None:
qx_min,qx_max = qx_map.min(), qx_map.max()
qy_min,qy_max = qy_map.min(), qy_map.max()
rangeq = [ [qx_min,qx_max], [qy_min,qy_max] ]
if bins is None:
bins = qx_map.shape
remesh_data, xbins, ybins = np.histogram2d(qx_map.ravel(), qy_map.ravel(),
bins=bins, range= rangeq, normed=False, weights= img.ravel() )
else:
if rangeq is None:
qx_min,qx_max = qx_map.min(), qx_map.max()
rangeq = [qx_min,qx_max]
if bins is None:
bins = qx_map.size
else:
if isinstance( bins, list):
bins = bins[0]
print( rangeq, bins )
remesh_data, xbins = np.histogram(qx_map.ravel(),
bins=bins, range= rangeq, normed=False, weights= img.ravel() )
ybins=None
return remesh_data, xbins, ybins
# Mask
################################################################################
class Mask(object):
'''Stores the matrix of pixels to be excluded from further analysis.'''
def __init__(self, infile=None, format='auto'):
'''Creates a new mask object, storing a matrix of the pixels to be
excluded from further analysis.'''
self.data = None
if infile is not None:
self.load(infile, format=format)
def load(self, infile, format='auto', invert=False):
'''Loads a mask from a a file. If this object already has some masking
defined, then the new mask is 'added' to it. Thus, one can load multiple
masks to exlude various pixels.'''
if format=='png' or infile[-4:]=='.png':
self.load_png(infile, invert=invert)
elif format=='hdf5' or infile[-3:]=='.h5' or infile[-4:]=='.hd5':
self.load_hdf5(infile, invert=invert)
else:
print("Couldn't identify mask format for %s."%(infile))
def load_blank(self, width, height):
'''Creates a null mask; i.e. one that doesn't exlude any pixels.'''
# TODO: Confirm that this is the correct order for x and y.
self.data = np.ones((height, width))
def load_png(self, infile, threshold=127, invert=False):
'''Load a mask from a PNG image file. High values (white) are included,
low values (black) are exluded.'''
# Image should be black (0) for excluded pixels, white (255) for included pixels
img = PIL.Image.open(infile).convert("L") # black-and-white
img2 = img.point(lambda p: p > threshold and 255)
data = np.asarray(img2)/255
data = data.astype(int)
if invert:
data = -1*(data-1)
if self.data is None:
self.data = data
else:
self.data *= data
def load_hdf5(self, infile, invert=False):
with h5py.File(infile, 'r') as f:
data = np.asarray( f['mask'] )
if invert:
data = -1*(data-1)
if self.data is None:
self.data = data
else:
self.data *= data
def invert(self):
'''Inverts the mask. Can be used if the mask file was written using the
opposite convention.'''
self.data = -1*(self.data-1)
# End class Mask(object)
########################################
# Calibration
################################################################################
class Calibration(object):
'''Stores aspects of the experimental setup; especially the calibration
parameters for a particular detector. That is, the wavelength, detector
distance, and pixel size that are needed to convert pixel (x,y) into
reciprocal-space (q) value.
This class may also store other information about the experimental setup
(such as beam size and beam divergence).
'''
def __init__(self, wavelength_A=None, distance_m=None, pixel_size_um=None):
self.wavelength_A = wavelength_A
self.distance_m = distance_m
self.pixel_size_um = pixel_size_um
# Data structures will be generated as needed
# (and preserved to speedup repeated calculations)
self.clear_maps()
# Experimental parameters
########################################
def set_wavelength(self, wavelength_A):
'''Set the experimental x-ray wavelength (in Angstroms).'''
self.wavelength_A = wavelength_A
def get_wavelength(self):
'''Get the x-ray beam wavelength (in Angstroms) for this setup.'''
return self.wavelength_A
def set_energy(self, energy_keV):
'''Set the experimental x-ray beam energy (in keV).'''
energy_eV = energy_keV*1000.0
energy_J = energy_eV/6.24150974e18
h = 6.626068e-34 # m^2 kg / s
c = 299792458 # m/s
wavelength_m = (h*c)/energy_J
self.wavelength_A = wavelength_m*1e+10
def get_energy(self):
'''Get the x-ray beam energy (in keV) for this setup.'''
h = 6.626068e-34 # m^2 kg / s
c = 299792458 # m/s
wavelength_m = self.wavelength_A*1e-10 # m
E = h*c/wavelength_m # Joules
E *= 6.24150974e18 # electron volts
E /= 1000.0 # keV
return E
def get_k(self):
'''Get k = 2*pi/lambda for this setup, in units of inverse Angstroms.'''
return 2.0*np.pi/self.wavelength_A
def set_distance(self, distance_m):
'''Sets the experimental detector distance (in meters).'''
self.distance_m = distance_m
def set_pixel_size(self, pixel_size_um=None, width_mm=None, num_pixels=None):
'''Sets the pixel size (in microns) for the detector. Pixels are assumed
to be square.'''
if pixel_size_um is not None:
self.pixel_size_um = pixel_size_um
else:
if num_pixels is None:
num_pixels = self.width
pixel_size_mm = width_mm*1./num_pixels
self.pixel_size_um = pixel_size_mm*1000.0
def set_beam_position(self, x0, y0):
'''Sets the direct beam position in the detector images (in pixel
coordinates).'''
self.x0 = x0
self.y0 = y0
def set_image_size(self, width, height=None):
'''Sets the size of the detector image, in pixels.'''
self.width = width
if height is None:
# Assume a square detector
self.height = width
else:
self.height = height
def get_q_per_pixel(self):
'''Gets the delta-q associated with a single pixel. This is computed in
the small-angle limit, so it should only be considered approximate.
For instance, wide-angle detectors will have different delta-q across
the detector face.'''
if self.q_per_pixel is not None:
return self.q_per_pixel
c = (self.pixel_size_um/1e6)/self.distance_m
twotheta = np.arctan(c) # radians
self.q_per_pixel = 2.0*self.get_k()*np.sin(twotheta/2.0)
return self.q_per_pixel
# Maps
########################################
def clear_maps(self):
self.r_map_data = None
self.q_per_pixel = None
self.q_map_data = None
self.angle_map_data = None
self.qx_map_data = None
self.qy_map_data = None
self.qz_map_data = None
self.qr_map_data = None
def r_map(self):
'''Returns a 2D map of the distance from the origin (in pixel units) for
each pixel position in the detector image.'''
if self.r_map_data is not None:
return self.r_map_data
x = np.arange(self.width) - self.x0
y = np.arange(self.height) - self.y0
X, Y = np.meshgrid(x, y)
R = np.sqrt(X**2 + Y**2)
self.r_map_data = R
return self.r_map_data
def q_map(self):
'''Returns a 2D map of the q-value associated with each pixel position
in the detector image.'''
if self.q_map_data is not None:
return self.q_map_data
c = (self.pixel_size_um/1e6)/self.distance_m
twotheta = np.arctan(self.r_map()*c) # radians
self.q_map_data = 2.0*self.get_k()*np.sin(twotheta/2.0)
return self.q_map_data
def angle_map(self):
'''Returns a map of the angle for each pixel (w.r.t. origin).
0 degrees is vertical, +90 degrees is right, -90 degrees is left.'''
if self.angle_map_data is not None:
return self.angle_map_data
x = (np.arange(self.width) - self.x0)
y = (np.arange(self.height) - self.y0)
X,Y = np.meshgrid(x,y)
#M = np.degrees(np.arctan2(Y, X))
# Note intentional inversion of the usual (x,y) convention.
# This is so that 0 degrees is vertical.
#M = np.degrees(np.arctan2(X, Y))
# TODO: Lookup some internal parameter to determine direction
# of normal. (This is what should befine the angle convention.)
M = np.degrees(np.arctan2(X, -Y))
self.angle_map_data = M
return self.angle_map_data
def qx_map(self):
if self.qx_map_data is not None:
return self.qx_map_data
self._generate_qxyz_maps()
return self.qx_map_data
def qy_map(self):
if self.qy_map_data is not None:
return self.qy_map_data
self._generate_qxyz_maps()
return self.qy_map_data
def qz_map(self):
if self.qz_map_data is not None:
return self.qz_map_data
self._generate_qxyz_maps()
return self.qz_map_data
def qr_map(self):
if self.qr_map_data is not None:
return self.qr_map_data
self._generate_qxyz_maps()
return self.qr_map_data
def _generate_qxyz_maps(self):
# Conversion factor for pixel coordinates
# (where sample-detector distance is set to d = 1)
c = (self.pixel_size_um/1e6)/self.distance_m
x = np.arange(self.width) - self.x0
y = np.arange(self.height) - self.y0
X, Y = np.meshgrid(x, y)
R = np.sqrt(X**2 + Y**2)
#twotheta = np.arctan(self.r_map()*c) # radians
theta_f = np.arctan2( X*c, 1 ) # radians
#alpha_f_prime = np.arctan2( Y*c, 1 ) # radians
alpha_f = np.arctan2( Y*c*np.cos(theta_f), 1 ) # radians
self.qx_map_data = self.get_k()*np.sin(theta_f)*np.cos(alpha_f)
self.qy_map_data = self.get_k()*( np.cos(theta_f)*np.cos(alpha_f) - 1 ) # TODO: Check sign
self.qz_map_data = -1.0*self.get_k()*np.sin(alpha_f)
self.qr_map_data = np.sign(self.qx_map_data)*np.sqrt(np.square(self.qx_map_data) + np.square(self.qy_map_data))
# End class Calibration(object)
########################################
# CalibrationGonio
################################################################################
class CalibrationGonio(Calibration):
"""
The geometric claculations used here are described:
http://gisaxs.com/index.php/Geometry:WAXS_3D
"""
# Experimental parameters
########################################
def set_angles(self, det_phi_g=0., det_theta_g=0.,
sam_phi=0, sam_chi=0, sam_theta=0,
offset_x = 0, offset_y =0, offset_z=0):
'''
YG. Add sample rotation angles that convert qmap from lab frame to sample frame
All the angles are given in degrees
sam_phi, rotate along lab-frame x, CHX phi
sam_chi, rotate along lab-frame z, CHX chi
sam_theta, rotate along lab-frame y, CHX theta
YG add offset corrections at Sep 21, 2017
det_phi_g, rotate along y-axis, delta at CHX
det_theta_g, away from z-plane, gamma at CHX
For SMI, because only rotate along y-axis, (det_theta_g=0.), only care about
offset_x, offset_z '''
#print('Set angles here')
self.det_phi_g = det_phi_g
self.det_theta_g = det_theta_g
self.offset_x = offset_x
self.offset_y = offset_y
self.offset_z = offset_z
self.sam_phi=sam_phi
self.sam_chi= sam_chi
self.sam_theta=sam_theta
def rotation_matix(self, sam_phi, sam_theta, sam_chi, degrees=True):
'''
sam_phi, rotate along lab-frame x, CHX phi
sam_chi, rotate along lab-frame z, CHX chi
sam_theta, rotate along lab-frame y, CHX theta
'''
if degrees:
sam_phi, sam_chi, sam_theta = np.radians(sam_phi), np.radians(sam_chi), np.radians(sam_theta)
Rx = np.array( [ [1, 0, 0 ],
[0, np.cos( sam_phi ), np.sin( sam_phi ) ],
[0, -np.sin( sam_phi ), np.cos( sam_phi ) ]
]
)
Rz = np.array( [ [ np.cos( sam_chi ), np.sin( sam_chi ), 0 ],
[-np.sin( sam_chi ), np.cos( sam_chi ), 0 ],
[0, 0, 1 ]
]
)
Ry = np.array( [ [np.cos( sam_theta ), 0, np.sin( sam_theta ) ],
[0, 1, 0 ],
[-np.sin( sam_theta ), 0, np.cos( sam_theta ) ]
]
)
Rxy = np.dot(Rx,Ry)
return np.dot(Rxy,Rz)
def _generate_qxyz_map_SF_from_Lab(self,qx,qy,qz,
sam_phi, sam_theta, sam_chi,
degrees=True):
'''
Convert qmap from Lab frame to sample frame
'''
self.Rot = self.rotation_matix( sam_phi, sam_theta, sam_chi, degrees=degrees )
qsx, qsy, qsz = np.dot(self.Rot, [ np.ravel(qx), np.ravel(qy), np.ravel(qz)] )
return qsx.reshape( qx.shape), qsy.reshape( qy.shape),qsz.reshape( qz.shape)
def _generate_qxyz_maps_samFrame(self, degrees=True):
"""
Get lab frame qmap
"""
self._generate_qxyz_maps()
self.qx_map_lab_data,self.qy_map_lab_data,self.qz_map_lab_data= self._generate_qxyz_map_SF_from_Lab(
self.qx_map_data,self.qy_map_data,self.qz_map_data,
self.sam_phi, self.sam_theta, self.sam_chi,
degrees=degrees )
self.qr_map_lab_data = np.sqrt(np.square(self.qx_map_lab_data) + np.square(self.qy_map_lab_data))
self.q_map_lab_data = np.sqrt(np.square(self.qx_map_lab_data) +
np.square(self.qy_map_lab_data) +
np.square(self.qz_map_lab_data)
)
def get_ratioDw(self):
width_mm = self.width*self.pixel_size_um/1000.
return self.distance_m/(width_mm/1000.)
# Maps
########################################
def q_map(self):
if self.q_map_data is None:
self._generate_qxyz_maps()
return self.q_map_data
def angle_map(self):
if self.angle_map_data is not None:
self._generate_qxyz_maps()
return self.angle_map_data
def _generate_qxyz_maps_no_offest(self):
"""
The geometric claculations used here are described:
http://gisaxs.com/index.php/Geometry:WAXS_3D
"""
d = self.distance_m
pix_size = self.pixel_size_um/1e6
phi_g = np.radians(self.det_phi_g)
theta_g = np.radians(self.det_theta_g)
xs = (np.arange(self.width) - self.x0)*pix_size
ys = (np.arange(self.height) - self.y0)*pix_size
#ys = ys[::-1]
X_c, Y_c = np.meshgrid(xs, ys)
Dprime = np.sqrt( np.square(d) + np.square(X_c) + np.square(Y_c) )
k_over_Dprime = self.get_k()/Dprime
qx_c = k_over_Dprime*( X_c*np.cos(phi_g) - np.sin(phi_g)*(d*np.cos(theta_g) - Y_c*np.sin(theta_g)) )
qy_c = k_over_Dprime*( X_c*np.sin(phi_g) + np.cos(phi_g)*(d*np.cos(theta_g) - Y_c*np.sin(theta_g)) - Dprime )
qz_c = -1*k_over_Dprime*( d*np.sin(theta_g) + Y_c*np.cos(theta_g) )
qr_c = np.sqrt(np.square(qx_c) + np.square(qy_c))
q_c = np.sqrt(np.square(qx_c) + np.square(qy_c) + np.square(qz_c))
# Conversion factor for pixel coordinates
# (where sample-detector distance is set to d = 1)
c = (self.pixel_size_um/1e6)/self.distance_m
x = np.arange(self.width) - self.x0
y = np.arange(self.height) - self.y0
X, Y = np.meshgrid(x, y)
R = np.sqrt(X**2 + Y**2)
#twotheta = np.arctan(self.r_map()*c) # radians
theta_f = np.arctan2( X*c, 1 ) # radians
#alpha_f_prime = np.arctan2( Y*c, 1 ) # radians
alpha_f = np.arctan2( Y*c*np.cos(theta_f), 1 ) # radians
self.qx_map_data = self.get_k()*np.sin(theta_f)*np.cos(alpha_f)
self.qy_map_data = self.get_k()*( np.cos(theta_f)*np.cos(alpha_f) - 1 ) # TODO: Check sign
self.qz_map_data = -1.0*self.get_k()*np.sin(alpha_f)
self.qr_map_data = np.sign(self.qx_map_data)*np.sqrt(np.square(self.qx_map_data) + np.square(self.qy_map_data))
self.qx_map_data = qx_c
self.qy_map_data = qy_c
self.qz_map_data = qz_c
self.q_map_data = q_c
def _generate_qxyz_maps(self):
"""
The geometric claculations used here are described:
http://gisaxs.com/index.php/Geometry:WAXS_3D
YG add offset corrections at Sep 21, 2017
"""
#print('Here to get qmap without offset.')
d = self.distance_m #
pix_size = self.pixel_size_um/1e6 #in meter
phi_g = np.radians(self.det_phi_g)
theta_g = np.radians(self.det_theta_g)
offset_x = self.offset_x *pix_size #in meter
offset_y = self.offset_y *pix_size
offset_z = self.offset_z *pix_size
xs = (np.arange(self.width) - self.x0)*pix_size
ys = (np.arange(self.height) - self.y0)*pix_size
xsprime = xs - offset_x
dprime = d - offset_y
ysprime = ys - offset_z
#ys = ys[::-1]
X_c, Y_c = np.meshgrid(xsprime, ysprime)
#Dprime = np.sqrt( np.square(d) + np.square(X_c) + np.square(Y_c) )
#k_over_Dprime = self.get_k()/Dprime
yprime = dprime*np.cos(theta_g) - Y_c*np.sin(theta_g)
Dprime = np.sqrt( np.square(dprime) + np.square(X_c) + np.square(Y_c) +
offset_x**2 + offset_y**2 + offset_z**2 +
2*offset_x*( X_c*np.cos(phi_g) - np.sin(phi_g) * yprime ) +
2*offset_y*( X_c*np.sin(phi_g) + np.cos(phi_g) * yprime ) +
2*offset_z*( dprime*np.sin(theta_g) + Y_c*np.cos(theta_g) )
)
k_over_Dprime = self.get_k()/Dprime
qx_c = k_over_Dprime*( X_c*np.cos(phi_g) - np.sin(phi_g) * yprime + offset_x)
qy_c = k_over_Dprime*( X_c*np.sin(phi_g) + np.cos(phi_g) * yprime + offset_y - Dprime)
qz_c = -1*k_over_Dprime*( dprime*np.sin(theta_g) + Y_c*np.cos(theta_g) + offset_z )
qr_c = np.sqrt(np.square(qx_c) + np.square(qy_c))
q_c = np.sqrt(np.square(qx_c) + np.square(qy_c) + np.square(qz_c))
self.qx_map_data = qx_c
self.qy_map_data = qy_c
self.qz_map_data = qz_c
self.q_map_data = q_c
self.qr_map_data = qr_c
if False:#True:
# Conversion factor for pixel coordinates
# (where sample-detector distance is set to d = 1)
c = (self.pixel_size_um/1e6)/self.distance_m
x = np.arange(self.width) - self.x0
y = np.arange(self.height) - self.y0
X, Y = np.meshgrid(x, y)
R = np.sqrt(X**2 + Y**2)
#twotheta = np.arctan(self.r_map()*c) # radians
theta_f = np.arctan2( X*c, 1 ) # radians
#alpha_f_prime = np.arctan2( Y*c, 1 ) # radians
alpha_f = np.arctan2( Y*c*np.cos(theta_f), 1 ) # radians
self.qx_map_data1 = self.get_k()*np.sin(theta_f)*np.cos(alpha_f)
self.qy_map_data1 = self.get_k()*( np.cos(theta_f)*np.cos(alpha_f) - 1 ) # TODO: Check sign
self.qz_map_data1 = -1.0*self.get_k()*np.sin(alpha_f)
self.qr_map_data1 = np.sign(self.qx_map_data1)*np.sqrt(np.square(self.qx_map_data1) + np.square(self.qy_map_data1))
<file_sep>#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
import sys
import warnings
import versioneer
from setuptools import setup, find_packages
no_git_reqs = []
with open('requirements.txt') as f:
required = f.read().splitlines()
for r in required:
if not (r.startswith('git') or r.startswith('#')) and r.strip() != '':
no_git_reqs.append(r)
setup(
name='chxanalys',
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
author='Brookhaven National Laboratory_CHX',
packages=find_packages(),
install_requires=no_git_reqs,
#package_data={'chxtools/X-ray_database' : ['.dat']}, include_package_data=True,
)
<file_sep>#from .chx_libs import *
from .chx_libs import db, get_images, get_table, get_events, get_fields, np, plt , LogNorm
from .chx_generic_functions import show_img
def get_frames_from_dscan( hdr, detector = 'eiger4m_single_image' ):
ev = get_events(hdr, [detector])
length = int( hdr['start']['plan_args']['num'] )
shape = hdr['descriptors'][0]['data_keys'][detector]['shape'][:2]
imgs = np.zeros( [ length, shape[1],shape[0]] )
for i in range( int( hdr['start']['plan_args']['num'] ) ):
imgs[i] = next(ev)['data']['eiger4m_single_image'][0]
return np.array( imgs )
def load_metadata(hdr, name):
seq_of_img_stacks = get_images(hdr, name)
return seq_of_img_stacks[0].md
def load_images(hdr, name):
seq_of_img_stacks = get_images(hdr, name) # 1 x 21 x 2167 x 2070
return np.squeeze(np.asarray(seq_of_img_stacks))
def test():
pass
def RemoveHot( img,threshold= 1E7, plot_=True ):
mask = np.ones_like( np.array( img ) )
badp = np.where( np.array(img) >= threshold )
if len(badp[0])!=0:
mask[badp] = 0
if plot_:
show_img( mask )
return mask
<file_sep>"""
Sep 10 Developed by Y.G.@CHX
<EMAIL>
This module is for the static SAXS analysis, such as fit form factor
"""
#import numpy as np
#from lmfit import Model
#from lmfit import minimize, Parameters, Parameter, report_fit
#import matplotlib as mpl
#import matplotlib.pyplot as plt
#from matplotlib.colors import LogNorm
from chxanalys.chx_libs import *
from chxanalys.chx_generic_functions import show_img, plot1D, find_index
from scipy.special import gamma, gammaln
from scipy.optimize import leastsq
def mono_sphere_form_factor_intensity( x, radius, delta_rho=100,fit_func='G'):
'''
Input:
x/q: in A-1, array or a value
radius/R: in A
delta_rho: Scattering Length Density(SLD) difference between solvent and the scatter, A-2
Output:
The form factor intensity of the mono dispersed scatter
'''
q=x
R=radius
qR= q * R
volume = (4.0/3.0)*np.pi*(R**3)
prefactor = 36*np.pi*( ( delta_rho * volume)**2 )/(4*np.pi)
P = ( np.sin(qR) - qR*np.cos(qR) )**2/( qR**6 )
P*=prefactor
P=P.real
return P
def gaussion( x, u, sigma):
return 1/( sigma*np.sqrt(2*np.pi) )* np.exp( - ((x-u)**2)/(2*( sigma**2 ) ) )
def Schultz_Zimm(x,u,sigma):
'''http://sasfit.ingobressler.net/manual/Schultz-Zimm
See also The size distribution of ‘gold standard’ nanoparticles
Anal Bioanal Chem (2009) 395:1651–1660
DOI 10.1007/s00216-009-3049-5
'''
k = 1.0/ (sigma)**2
return 1.0/u * (x/u)**(k-1) * k**k*np.exp( -k*x/u)/gamma(k)
def distribution_func( radius=1.0, sigma=0.1, num_points=20, spread=3, func='G'):
'''
radius: the central radius
sigma: sqrt root of variance in percent
'''
if 1 - spread* sigma<=0:
spread= (1 - sigma)/sigma -1
#print( num_points )
x, rs= np.linspace( radius - radius*spread* sigma, radius+radius*spread*sigma, num_points,retstep=True)
#print(x)
if func=='G':
func=gaussion
elif func=='S':
func= Schultz_Zimm
return x, rs, func( x, radius, radius*sigma)
def poly_sphere_form_factor_intensity( x, radius, sigma=0.1, delta_rho=100, background=0, num_points=20, spread=5,
fit_func='G'):
'''
Input:
x/q: in A-1, array or a value
radius/R: in A
sigma:sqrt root of variance in percent
delta_rho: Scattering Length Density(SLD) difference between solvent and the scatter, A-2
fit_func: G: Guassian;S: Flory–Schulz distribution
Output:
The form factor intensity of the polydispersed scatter
'''
q=x
R= radius
if not hasattr(q, '__iter__'):
q=np.array( [q] )
v = np.zeros( (len(q)) )
if sigma==0:
v= mono_sphere_form_factor_intensity( q, R, delta_rho)
else:
r, rs, wt = distribution_func( radius=R, sigma=sigma,
num_points=num_points, spread=spread, func=fit_func)
for i, Ri in enumerate(r):
#print(Ri, wt[i],delta_rho, rs)
v += mono_sphere_form_factor_intensity( q, Ri, delta_rho)*wt[i]*rs
return v + background #* delta_rho
def poly_sphere_form_factor_intensity_q2( x, radius, sigma=0.1, delta_rho=1, fit_func='G'):#, scale=1, baseline=0):
'''
Input:
x/q: in A-1, array or a value
radius/R: in A
sigma:sqrt root of variance in percent
delta_rho: Scattering Length Density(SLD) difference between solvent and the scatter, A-2
Output:
The form factor intensity of the polydispersed scatter
'''
return poly_sphere_form_factor_intensity( x, radius, sigma, delta_rho, fit_func)*x**2 #* scale + baseline
def find_index_old( x,x0,tolerance= None):
#find the position of P in a list (plist) with tolerance
N=len(x)
i=0
position=None
if tolerance==None:
tolerance = (x[1]-x[0])/2.
if x0 > max(x):
position= len(x) -1
elif x0<min(x):
position=0
else:
for item in x:
if abs(item-x0)<=tolerance:
position=i
#print 'Found Index!!!'
break
i+=1
return position
def get_form_factor_fit( q, iq, guess_values, fit_range=None, fit_variables = None,function='poly_sphere',
fit_func='G',
*argv,**kwargs):
'''
Fit form factor for GUI
The support fitting functions include
poly_sphere (poly_sphere_form_factor_intensity),
mono_sphere (mono_sphere_form_factor_intensity)
Parameters
----------
q: q vector
iq: form factor
guess_values:a dict, contains keys
radius: the initial guess of spherecentral radius
sigma: the initial guess of sqrt root of variance in percent
function:
mono_sphere (mono_sphere_form_factor_intensity): fit by mono dispersed sphere model
poly_sphere (poly_sphere_form_factor_intensity): fit by poly dispersed sphere model
Returns
-------
fit resutls:
radius
sigma
an example:
result = fit_form_factor( q, iq, res_pargs=None,function='poly_sphere'
'''
if function=='poly_sphere':
mod = Model(poly_sphere_form_factor_intensity)#_q2 )
elif function=='mono_sphere':
mod = Model( mono_sphere_form_factor_intensity )
else:
print ("The %s is not supported.The supported functions include poly_sphere and mono_sphere"%function)
if fit_range is not None:
x1,x2= fit_range
q1=find_index( q,x1,tolerance= None)
q2=find_index( q,x2,tolerance= None)
else:
q1=0
q2=len(q)
q_=q[q1:q2]
iq_ = iq[q1:q2]
_r= guess_values[ 'radius']
_sigma = guess_values['sigma']
_delta_rho= guess_values['delta_rho']
_background = guess_values['background']
#_scale = guess_values['scale']
#_baseline = guess_values['baseline']
mod.set_param_hint( 'radius', min= _r/10, max=_r*10 )
mod.set_param_hint( 'sigma', min= _sigma/10, max=_sigma*10 )
#mod.set_param_hint( 'scale', min= _scale/1E3, max= _scale*1E3 )
#mod.set_param_hint( 'baseline', min= 0 )
#mod.set_param_hint( 'delta_rho', min= 0 )
#mod.set_param_hint( 'delta_rho', min= _delta_rho/1E6, max= _delta_rho*1E6 )
pars = mod.make_params( radius= _r, sigma=_sigma,delta_rho=_delta_rho,background=_background)# scale= _scale, baseline =_baseline )
if fit_variables is not None:
for var in list( fit_variables.keys()):
pars[var].vary = fit_variables[var]
#pars['delta_rho'].vary =False
fit_power = 0
result = mod.fit( iq_* q_**fit_power, pars, x = q_)#, fit_func=fit_func )
if function=='poly_sphere':
sigma = result.best_values['sigma']
elif function=='mono_sphere':
sigma=0
r = result.best_values['radius']
#scale = result.best_values['scale']
#baseline = result.best_values['baseline']
delta_rho= result.best_values['delta_rho']
return result, q_
def plot_form_factor_with_fit(q, iq, q_, result, fit_power=2, res_pargs=None, return_fig=False,
*argv,**kwargs):
if res_pargs is not None:
uid = res_pargs['uid']
path = res_pargs['path']
else:
if 'uid' in kwargs.keys():
uid = kwargs['uid']
else:
uid = 'uid'
if 'path' in kwargs.keys():
path = kwargs['path']
else:
path = ''
#fig = Figure()
#ax = fig.add_subplot(111)
fig, ax = plt.subplots()
title_qr = 'form_factor_fit'
plt.title('uid= %s:--->'%uid + title_qr,fontsize=20, y =1.02)
r = result.best_values['radius']
delta_rho= result.best_values['delta_rho']
sigma = result.best_values['sigma']
ax.semilogy( q, iq, 'ro', label='Form Factor')
ax.semilogy( q_, result.best_fit/q_**fit_power, '-b', lw=3, label='Fit')
txts = r'radius' + r' = %.2f '%( r/10.) + r'$ nm$'
ax.text(x =0.02, y=.35, s=txts, fontsize=14, transform=ax.transAxes)
txts = r'sigma' + r' = %.3f'%( sigma)
#txts = r'$\beta$' + r'$ = %.3f$'%(beta[i]) + r'$ s^{-1}$'
ax.text(x =0.02, y=.25, s=txts, fontsize=14, transform=ax.transAxes)
#txts = r'delta_rho' + r' = %.3e'%( delta_rho)
#txts = r'$\beta$' + r'$ = %.3f$'%(beta[i]) + r'$ s^{-1}$'
#ax.text(x =0.02, y=.35, s=txts, fontsize=14, transform=ax.transAxes)
ax.legend( loc = 'best' )
if 'ylim' in kwargs:
ax.set_ylim( kwargs['ylim'])
elif 'vlim' in kwargs:
vmin, vmax =kwargs['vlim']
ax.set_ylim([min(y)*vmin, max(y[1:])*vmax ])
else:
pass
if 'xlim' in kwargs:
ax.set_xlim( kwargs['xlim'])
fp = path + 'uid=%s--form_factor--fit-'%(uid ) + '.png'
plt.savefig( fp, dpi=fig.dpi)
#fig.tight_layout()
plt.show()
if return_fig:
return fig
def fit_form_factor( q, iq, guess_values, fit_range=None, fit_variables = None, res_pargs=None,function='poly_sphere', fit_func='G', return_fig=False, *argv,**kwargs):
'''
Fit form factor
The support fitting functions include
poly_sphere (poly_sphere_form_factor_intensity),
mono_sphere (mono_sphere_form_factor_intensity)
Parameters
----------
q: q vector
iq: form factor
res_pargs: a dict, contains keys, such path, uid...
guess_values:a dict, contains keys
radius: the initial guess of spherecentral radius
sigma: the initial guess of sqrt root of variance in percent
function:
mono_sphere (mono_sphere_form_factor_intensity): fit by mono dispersed sphere model
poly_sphere (poly_sphere_form_factor_intensity): fit by poly dispersed sphere model
Returns
-------
fit resutls:
radius
sigma
an example:
result = fit_form_factor( q, iq, res_pargs=None,function='poly_sphere'
'''
result, q_ = get_form_factor_fit( q, iq, guess_values, fit_range=fit_range,
fit_variables = fit_variables,function=function, fit_func=fit_func )
plot_form_factor_with_fit(q, iq, q_, result, fit_power=0, res_pargs=res_pargs, return_fig=return_fig )
return result
def fit_form_factor2( q, iq, guess_values, fit_range=None, fit_variables = None, res_pargs=None,function='poly_sphere', fit_func='G',
*argv,**kwargs):
'''
Fit form factor
The support fitting functions include
poly_sphere (poly_sphere_form_factor_intensity),
mono_sphere (mono_sphere_form_factor_intensity)
Parameters
----------
q: q vector
iq: form factor
res_pargs: a dict, contains keys, such path, uid...
guess_values:a dict, contains keys
radius: the initial guess of spherecentral radius
sigma: the initial guess of sqrt root of variance in percent
function:
mono_sphere (mono_sphere_form_factor_intensity): fit by mono dispersed sphere model
poly_sphere (poly_sphere_form_factor_intensity): fit by poly dispersed sphere model
Returns
-------
fit resutls:
radius
sigma
an example:
result = fit_form_factor( q, iq, res_pargs=None,function='poly_sphere'
'''
if res_pargs is not None:
uid = res_pargs['uid']
path = res_pargs['path']
else:
if 'uid' in kwargs.keys():
uid = kwargs['uid']
else:
uid = 'uid'
if 'path' in kwargs.keys():
path = kwargs['path']
else:
path = ''
if function=='poly_sphere':
mod = Model(poly_sphere_form_factor_intensity)#_q2 )
elif function=='mono_sphere':
mod = Model( mono_sphere_form_factor_intensity )
else:
print ("The %s is not supported.The supported functions include poly_sphere and mono_sphere"%function)
if fit_range is not None:
x1,x2= fit_range
q1=find_index( q,x1,tolerance= None)
q2=find_index( q,x2,tolerance= None)
else:
q1=0
q2=len(q)
q_=q[q1:q2]
iq_ = iq[q1:q2]
_r= guess_values[ 'radius']
_sigma = guess_values['sigma']
_delta_rho= guess_values['delta_rho']
#_scale = guess_values['scale']
#_baseline = guess_values['baseline']
mod.set_param_hint( 'radius', min= _r/10, max=_r*10 )
mod.set_param_hint( 'sigma', min= _sigma/10, max=_sigma*10 )
#mod.set_param_hint( 'scale', min= _scale/1E3, max= _scale*1E3 )
#mod.set_param_hint( 'baseline', min= 0 )
#mod.set_param_hint( 'delta_rho', min= 0 )
mod.set_param_hint( 'delta_rho', min= _delta_rho/1E6, max= _delta_rho*1E6 )
pars = mod.make_params( radius= _r, sigma=_sigma,delta_rho=_delta_rho)# scale= _scale, baseline =_baseline )
if fit_variables is not None:
for var in list( fit_variables.keys()):
pars[var].vary = fit_variables[var]
#pars['delta_rho'].vary =False
fig = plt.figure(figsize=(8, 6))
title_qr = 'form_factor_fit'
plt.title('uid= %s:--->'%uid + title_qr,fontsize=20, y =1.02)
fit_power = 0#2
result = mod.fit( iq_* q_**fit_power, pars, x = q_ )#,fit_func= fit_func )
if function=='poly_sphere':
sigma = result.best_values['sigma']
elif function=='mono_sphere':
sigma=0
r = result.best_values['radius']
#scale = result.best_values['scale']
#baseline = result.best_values['baseline']
delta_rho= result.best_values['delta_rho']
#report_fit( result )
ax = fig.add_subplot(1,1,1 )
ax.semilogy( q, iq, 'ro', label='Form Factor')
ax.semilogy( q_, result.best_fit/q_**fit_power, '-b', lw=3, label='Fit')
txts = r'radius' + r' = %.2f '%( r/10.) + r'$ nm$'
ax.text(x =0.02, y=.35, s=txts, fontsize=14, transform=ax.transAxes)
txts = r'sigma' + r' = %.3f'%( sigma)
#txts = r'$\beta$' + r'$ = %.3f$'%(beta[i]) + r'$ s^{-1}$'
ax.text(x =0.02, y=.25, s=txts, fontsize=14, transform=ax.transAxes)
#txts = r'delta_rho' + r' = %.3e'%( delta_rho)
#txts = r'$\beta$' + r'$ = %.3f$'%(beta[i]) + r'$ s^{-1}$'
#ax.text(x =0.02, y=.35, s=txts, fontsize=14, transform=ax.transAxes)
ax.legend( loc = 'best' )
if 'ylim' in kwargs:
ax.set_ylim( kwargs['ylim'])
elif 'vlim' in kwargs:
vmin, vmax =kwargs['vlim']
ax.set_ylim([min(y)*vmin, max(y[1:])*vmax ])
else:
pass
if 'xlim' in kwargs:
ax.set_xlim( kwargs['xlim'])
fp = path + '%s_form_factor_fit'%(uid ) + '.png'
fig.savefig( fp, dpi=fig.dpi)
fig.tight_layout()
#plt.show()
result = dict( radius =r, sigma = sigma, delta_rho = delta_rho )
return result
def show_saxs_qmap( img, pargs, width=200,vmin=.1, vmax=300, logs=True,image_name='',
show_colorbar=True, file_name='', show_time = False,
save=False, show_pixel=False, aspect= 1,save_format='png', cmap='viridis',):
'''
Show a SAXS q-map by giving
Parameter:
image: the frame
setup pargs, a dictionary, including
dpix #in mm, eiger 4m is 0.075 mm
lambda_ # wavelegth of the X-rays in Angstroms
Ldet # detector to sample distance (mm)
path where to save data
center: beam center in pixel, center[0] (x), should be image-y, and should be python-x
width: the showed area centered at center
Return:
None
'''
Ldet = pargs['Ldet']
dpix = pargs['dpix']
lambda_ = pargs['lambda_']
center = pargs['center']
cx,cy = center
path= pargs['path']
lx,ly = img.shape
#center = [ center[1], center[0] ] #due to python conventions
w= width
img_ = np.zeros( [w,w] )
minW, maxW = min( center[0]-w, center[1]-w ), max( center[0]-w, center[1]-w )
if w < minW:
img_ = img[cx-w//2:cx+w//2, cy+w//2:cy+w//2]
#elif w > maxW:
# img_[ cx-w//2:cx+w//2, cy+w//2:cy+w//2 ] =
ROI = [ max(0, center[0]-w), min( center[0]+w, lx), max(0, center[1]-w), min( ly, center[1]+w ) ]
#print( ROI )
ax = plt.subplots()
if not show_pixel:
#print( 'here' )
two_theta = utils.radius_to_twotheta(Ldet ,np.array( [ ( ROI[0] - cx ) * dpix,( ROI[1] - cx ) * dpix,
( ROI[2] - cy ) * dpix,( ROI[3] - cy ) * dpix,
] ))
qext = utils.twotheta_to_q(two_theta, lambda_)
#print( two_theta, qext )
show_img( 1e-15+ img[ ROI[0]:ROI[1], ROI[2]:ROI[3] ], ax=ax,
xlabel=r"$q_x$" + '('+r'$\AA^{-1}$'+')',
ylabel= r"$q_y$" + '('+r'$\AA^{-1}$'+')', extent=[qext[3],qext[2],qext[0],qext[1]],
vmin=vmin, vmax=vmax, logs= logs, image_name= image_name, file_name= file_name,
show_time = show_time,
save_format=save_format,cmap=cmap, show_colorbar=show_colorbar,
save= save, path=path,aspect= aspect)
else:
#qext = w
show_img( 1e-15+ img[ ROI[0]:ROI[1], ROI[2]:ROI[3] ], ax=ax,
xlabel= 'pixel', ylabel= 'pixel', extent=[ROI[0],ROI[1],ROI[2],ROI[3]],
vmin=vmin, vmax=vmax, logs= logs, image_name= image_name, save_format=save_format,cmap=cmap,
show_colorbar=show_colorbar, file_name= file_name, show_time = show_time,
save= save, path=path,aspect= aspect)
return ax
########################
##Fit sphere by scipy.leastsq fit
def fit_sphere_form_factor_func(parameters, ydata, xdata, yerror=None, nonvariables=None):
'''##Develop by YG at July 28, 2017 @CHX
This function is for fitting form factor of polyderse spherical particles by using scipy.leastsq fit
radius, sigma, delta_rho, background = parameters
'''
radius, sigma, delta_rho, background = parameters
fit = poly_sphere_form_factor_intensity( xdata, radius=radius, sigma=sigma,
delta_rho=delta_rho, background=background,
num_points=10, spread=3, fit_func='G' )
error = np.abs(ydata - fit)
return np.sqrt( error )
def fit_sphere_form_factor_by_leastsq( p0, q, pq, fit_range=None, ):
'''##Develop by YG at July 28, 2017 @CHX
Fitting form factor of polyderse spherical particles by using scipy.leastsq fit
Input:
radius, sigma, delta_rho, background = p0
Return
fit res, res[0] is the fitting parameters
'''
if fit_range is not None:
x1,x2 = fit_range
q1,q2 = find_index(q,x1),find_index(q,x2)
res = leastsq(fit_sphere_form_factor_func, [ p0 ], args=(pq[q1:q2], q[q1:q2], ),
ftol=1.49012e-38, xtol=1.49012e-38, factor=100,
full_output=1)
return res
def plot_fit_sphere_form_factor( q, pq, res, p0=None,xlim=None, ylim=None ):
'''##Develop by YG at July 28, 2017 @CHX'''
if p0 is not None:
radius, sigma, delta_rho, background = p0
fit_init = poly_sphere_form_factor_intensity( q, radius=radius, sigma=sigma,
delta_rho=delta_rho, background=background,
)
radius, sigma, delta_rho, background = res[0]
fit = poly_sphere_form_factor_intensity( q, radius=radius, sigma=sigma,
delta_rho=delta_rho, background=background,
)
fig, ax = plt.subplots()
if p0 is not None:
plot1D(x=q, y= fit_init, c='b',m='',ls='-', lw=3, ax=ax, logy=True, legend='Init_Fitting')
plot1D(x=q, y= fit, c='r',m='',ls='-', lw=3, ax=ax, logy=True, legend='Fitting')
plot1D(x=q, y = pq, c='k', m='X',ax=ax, markersize=3, ls='',legend='data',xlim=xlim,
ylim=ylim, logx=True, xlabel='Q (A-1)', ylabel='P(Q)')
txts = r'radius' + r' = %.2f '%( res[0][0]/10.) + r'$ nm$'
ax.text(x =0.02, y=.25, s=txts, fontsize=14, transform=ax.transAxes)
txts = r'sigma' + r' = %.3f'%( res[0][1])
#txts = r'$\beta$' + r'$ = %.3f$'%(beta[i]) + r'$ s^{-1}$'
ax.text(x =0.02, y=.15, s=txts, fontsize=14, transform=ax.transAxes)
def exm_plot():
fig, ax = plt.subplots()
ax.semilogy( q, iq, 'ro',label='data')
ax.semilogy( q, ff, '-b',label='fit')
ax.set_xlim( [0.0001, .01] )
ax.set_ylim( [1E-2,1E4] )
ax.legend(loc='best')
#plot1D( iq, q, logy=True, xlim=[0.0001, .01], ylim=[1E-3,1E4], ax=ax, legend='data')
#plot1D( ff, q, logy=True, xlim=[0.0001, .01], ax=ax, legend='cal')
#%run /XF11ID/analysis/Analysis_Pipelines/Develop/chxanalys/chxanalys/XPCS_SAXS.py
#%run /XF11ID/analysis/Analysis_Pipelines/Develop/chxanalys/chxanalys/chx_generic_functions.py
#%run /XF11ID/analysis/Analysis_Pipelines/Develop/chxanalys/chxanalys/SAXS.py
<file_sep>"""
Dec 10, 2015 Developed by Y.G.@CHX
<EMAIL>
This module is for the SAXS XPCS analysis
"""
from chxanalys.chx_libs import ( colors, colors_copy, markers, markers_copy,
colors_, markers_, )
from chxanalys.chx_libs import ( Figure, RUN_GUI )
from chxanalys.chx_generic_functions import *
from scipy.special import erf
from chxanalys.chx_compress_analysis import ( compress_eigerdata, read_compressed_eigerdata,
init_compress_eigerdata,
Multifile,get_each_ring_mean_intensityc,get_avg_imgc, mean_intensityc )
from chxanalys.chx_correlationc import ( cal_g2c,Get_Pixel_Arrayc,auto_two_Arrayc,get_pixelist_interp_iq,)
from chxanalys.chx_correlationp import ( cal_g2p)
from pandas import DataFrame
import os
def get_iq_invariant( qt, iqst ):
'''Get integer( q**2 * iqst )
iqst: shape should be time, q-length
qt: shape as q-length
return q**2 * iqst, shape will be time length
'''
return np.sum(iqst * qt**2, axis =1 )
def plot_time_iq_invariant( time_stamp, invariant, pargs, save=True,):
fig,ax = plt.subplots( )
plot1D( x = time_stamp, y = invariant,
xlabel='time (s)', ylabel='I(q)*Q^2', title='I(q)*Q^2 ~ time',
m='o', c = 'b', ax=ax )
if save:
path = pargs['path']
uid = pargs['uid']
save_arrays( np.vstack( [time_stamp, np.array(invariant)]).T,
label= ['time','Invariant'],filename='%s_iq_invariant.csv'%uid , path= path )
#fp = path + 'uid= %s--Iq~t-'%uid + CurTime + '.png'
fp = path + '%s_iq_invariant'%uid + '.png'
fig.savefig( fp, dpi=fig.dpi)
def plot_q2_iq( qt, iqst, time_stamp, pargs, ylim=[ -0.001, 0.01] ,
xlim=[0.007,0.2],legend_size=4, save=True, ):
fig, ax = plt.subplots()
N = iqst.shape[0]
for i in range(N):
yi = iqst[i] * qt**2
#time_labeli = 'time_%s s'%( round( time_edge[i][0] * timeperframe, 3) )
time_labeli = 'time_%s s'%( round( time_stamp[i],4) )
plot1D( x = qt, y = yi, legend= time_labeli, xlabel='Q (A-1)', ylabel='I(q)*Q^2', title='I(q)*Q^2 ~ time',
m=markers[i], c = colors[i], ax=ax, ylim=ylim, xlim=xlim,
legend_size=legend_size)
if save:
path = pargs['path']
uid = pargs['uid']
fp = path + '%s_q2_iq'%uid + '.png'
fig.savefig( fp, dpi=fig.dpi)
def recover_img_from_iq( qp, iq, center, mask):
'''YG. develop at CHX, 2017 July 18,
Recover image a circular average
'''
norm = get_pixelist_interp_iq( qp, iq, np.ones_like(mask), center)
img_ = norm.reshape( mask.shape)*mask
return img_
def get_cirucular_average_std( img, mask, setup_pargs, img_name='xx' ):
'''YG. develop at CHX, 2017 July 18,
Get the standard devation of tge circular average of img
image-->I(q)-->image_mean--> (image- image_mean)**2 --> I(q) --> std = sqrt(I(q))
'''
qp, iq, q = get_circular_average( img, mask , pargs=setup_pargs, save= False )
center = setup_pargs['center']
img_ = ( img - recover_img_from_iq( qp, iq, center, mask) )**2
qp_, iq_, q_ = get_circular_average( img_, mask , pargs=setup_pargs,save= False )
std = np.sqrt(iq_)
return qp, iq, q,std
def get_delta_img( img, mask, setup_pargs, img_name='xx', plot=False ):
'''YG. develop at CHX, 2017 July 18,
Get the difference between img and image recovered from the circular average of img'''
qp, iq, q = get_circular_average( img, mask , pargs=setup_pargs,save= False )
center = setup_pargs['center']
img_ = recover_img_from_iq( qp, iq, center, mask)
delta = img - img_ * img.mean()/ img_.mean()
if plot:
show_img( delta, logs=True, aspect= 1,
cmap= cmap_albula, vmin=1e-5, vmax=10**1, image_name= img_name)
return delta
def combine_ring_anglar_mask(ring_mask, ang_mask ):
'''combine ring and anglar mask '''
ring_max = ring_mask.max()
ang_mask_ = np.zeros( ang_mask.shape )
ind = np.where(ang_mask!=0)
ang_mask_[ind ] = ang_mask[ ind ] + 1E9 #add some large number to qr
dumy_ring_mask = np.zeros( ring_mask.shape )
dumy_ring_mask[ring_mask==1] =1
dumy_ring_ang = dumy_ring_mask * ang_mask
real_ang_lab = np.int_( np.unique( dumy_ring_ang )[1:] ) -1
ring_ang = ring_mask * ang_mask_
#print( real_ang_lab )
ura = np.unique( ring_ang )[1:]
ur = np.unique( ring_mask )[1:]
ua = np.unique( ang_mask )[real_ang_lab]
#print( np.unique( ring_mask )[1:], np.unique( ang_mask )[1:], np.unique( ring_ang )[1:] )
ring_ang_ = np.zeros_like( ring_ang )
newl = np.arange( 1, len(ura)+1)
#newl = np.int_( real_ang_lab )
#print( ura, ur, ua )
#print( len(ura) )
for i, label in enumerate(ura):
#print (i, label)
ring_ang_.ravel()[ np.where( ring_ang.ravel() == label)[0] ] = newl[i]
#print( np.unique( ring_ang_ ), len( np.unique( ring_ang_ ) ) )
return np.int_(ring_ang_)
def get_seg_from_ring_mask( inner_angle, outer_angle, num_angles, width_angle, center, ring_mask, qr_center ):
'''<NAME>, 2017
A simple wrap function to get angle cut mask from ring_mask
Parameter:
inner_angle, outer_angle, num_angles, width_angle: to define the angle
center: beam center
ring_mask: two-d array
Return:
seg_mask: two-d array
'''
widtha = (outer_angle - inner_angle )/(num_angles+ 0.01)
ang_mask, ang_center, ang_edges = get_angular_mask( ring_mask, inner_angle= inner_angle,
outer_angle = outer_angle, width = widtha,
num_angles = num_angles, center = center, flow_geometry=True )
#print( np.unique( ang_mask)[1:] )
seg_mask = combine_ring_anglar_mask( ring_mask, ang_mask)
qval_dict = get_qval_dict( qr_center = qr_center, qz_center = ang_center)
return seg_mask,qval_dict
def get_seg_dict_from_ring_mask( inner_angle, outer_angle, num_angles, width_angle, center,
ring_mask, qr_center ):
'''YG. Jan 6, 2017
A simple wrap function to get angle cut mask from ring_mask
Parameter:
inner_angle, outer_angle, num_angles, width_angle: to define the angle
center: beam center
ring_mask: two-d array
Return:
seg_mask: two-d array
'''
widtha = (outer_angle - inner_angle )/(num_angles+ 0.01)
ang_mask, ang_center, ang_edges = get_angular_mask(
np.ones_like(ring_mask), inner_angle= inner_angle,
outer_angle = outer_angle, width = widtha,
num_angles = num_angles, center = center, flow_geometry=True )
#print( np.unique( ang_mask)[1:] )
seg_mask, good_ind = combine_two_roi_mask( ring_mask, ang_mask)
qval_dict = get_qval_dict( qr_center = qr_center, qz_center = ang_center)
#print( np.unique( seg_mask)[1:], good_ind )
#print( list( qval_dict.keys()), good_ind , len(good_ind) )
qval_dict_ = { i:qval_dict[k] for (i,k) in enumerate( good_ind) }
return seg_mask, qval_dict_
def combine_two_roi_mask( ring_mask, ang_mask, pixel_num_thres=10):
'''combine two roi_mask into a new roi_mask
pixel_num_thres: integer, the low limit pixel number in each roi of the combined mask,
i.e., if the pixel number in one roi of the combined mask smaller than pixel_num_thres,
that roi will be considered as bad one and be removed.
e.g., ring_mask is a ring shaped mask, with unique index as (1,2)
ang_mask is a angular shaped mask, with unique index as (1,2,3,4)
the new mask will be ( 1,2,3,4 [for first ring];
5,6,7,8 [for second ring];
...)
'''
rf = np.ravel( ring_mask )
af = np.ravel( ang_mask )
ruiq = np.unique( ring_mask)
auiq = np.unique( ang_mask)
maxa = np.max( auiq )
ring_mask_ = np.zeros_like( ring_mask )
new_mask_ = np.zeros_like( ring_mask )
new_mask_ = np.zeros_like( ring_mask )
for i, ind in enumerate(ruiq[1:]):
ring_mask_.ravel()[
np.where( rf == ind )[0] ] = maxa * i
new_mask = ( ( ring_mask_ + ang_mask) *
np.array( ring_mask, dtype=bool) *
np.array( ang_mask, dtype=bool)
)
qind, pixelist = roi.extract_label_indices(new_mask)
noqs = len(np.unique(qind))
nopr = np.bincount(qind, minlength=(noqs+1))[1:]
#good_ind = np.unique( new_mask )[1:]
good_ind = np.where( nopr >= pixel_num_thres)[0] +1
#print( good_ind )
l = len(good_ind)
new_ind = np.arange( 1, l+1 )
for i, gi in enumerate( good_ind ):
new_mask_.ravel()[
np.where( new_mask.ravel() == gi)[0] ] = new_ind[i]
return new_mask_, good_ind -1
def bin_1D(x, y, nx=None, min_x=None, max_x=None):
"""
Bin the values in y based on their x-coordinates
Parameters
----------
x : array
position
y : array
intensity
nx : integer, optional
number of bins to use defaults to default bin value
min_x : float, optional
Left edge of first bin defaults to minimum value of x
max_x : float, optional
Right edge of last bin defaults to maximum value of x
Returns
-------
edges : array
edges of bins, length nx + 1
val : array
sum of values in each bin, length nx
count : array
The number of counts in each bin, length nx
"""
# handle default values
if min_x is None:
min_x = np.min(x)
if max_x is None:
max_x = np.max(x)
if nx is None:
nx = int(max_x - min_x)
#print ( min_x, max_x, nx)
# use a weighted histogram to get the bin sum
bins = np.linspace(start=min_x, stop=max_x, num=nx+1, endpoint=True)
#print (x)
#print (bins)
val, _ = np.histogram(a=x, bins=bins, weights=y)
# use an un-weighted histogram to get the counts
count, _ = np.histogram(a=x, bins=bins)
# return the three arrays
return bins, val, count
def circular_average(image, calibrated_center, threshold=0, nx=None,
pixel_size=(1, 1), min_x=None, max_x=None, mask=None):
"""Circular average of the the image data
The circular average is also known as the radial integration
Parameters
----------
image : array
Image to compute the average as a function of radius
calibrated_center : tuple
The center of the image in pixel units
argument order should be (row, col)
threshold : int, optional
Ignore counts above `threshold`
default is zero
nx : int, optional
number of bins in x
defaults is 100 bins
pixel_size : tuple, optional
The size of a pixel (in a real unit, like mm).
argument order should be (pixel_height, pixel_width)
default is (1, 1)
min_x : float, optional number of pixels
Left edge of first bin defaults to minimum value of x
max_x : float, optional number of pixels
Right edge of last bin defaults to maximum value of x
Returns
-------
bin_centers : array
The center of each bin in R. shape is (nx, )
ring_averages : array
Radial average of the image. shape is (nx, ).
"""
radial_val = utils.radial_grid(calibrated_center, image.shape, pixel_size)
if mask is not None:
#maks = np.ones_like( image )
mask = np.array( mask, dtype = bool)
binr = radial_val[mask]
image_mask = np.array( image )[mask]
else:
binr = np.ravel( radial_val )
image_mask = np.ravel(image)
#if nx is None: #make a one-pixel width q
# nx = int( max_r - min_r)
#if min_x is None:
# min_x= int( np.min( binr))
# min_x_= int( np.min( binr)/(np.sqrt(pixel_size[1]*pixel_size[0] )))
#if max_x is None:
# max_x = int( np.max(binr ))
# max_x_ = int( np.max(binr)/(np.sqrt(pixel_size[1]*pixel_size[0] )) )
#if nx is None:
# nx = max_x_ - min_x_
#binr_ = np.int_( binr /(np.sqrt(pixel_size[1]*pixel_size[0] )) )
binr_ = binr /(np.sqrt(pixel_size[1]*pixel_size[0] ))
#print ( min_x, max_x, min_x_, max_x_, nx)
bin_edges, sums, counts = bin_1D( binr_,
image_mask,
nx=nx,
min_x=min_x,
max_x=max_x)
#print (len( bin_edges), len( counts) )
th_mask = counts > threshold
#print (len(th_mask) )
ring_averages = sums[th_mask] / counts[th_mask]
bin_centers = utils.bin_edges_to_centers(bin_edges)[th_mask]
#print (len( bin_centers ) )
return bin_centers, ring_averages
def get_circular_average( avg_img, mask, pargs, show_pixel=True, min_x=None, max_x=None,
nx=None, plot_ = False , save=False, *argv,**kwargs):
"""get a circular average of an image
Parameters
----------
avg_img: 2D-array, the image
mask: 2D-array
pargs: a dict, should contains
center: the beam center in pixel
Ldet: sample to detector distance
lambda_: the wavelength
dpix, the pixel size in mm. For Eiger1m/4m, the size is 75 um (0.075 mm)
nx : int, optional
number of bins in x
defaults is 1500 bins
plot_: a boolen type, if True, plot the one-D curve
plot_qinpixel:a boolen type, if True, the x-axis of the one-D curve is q in pixel; else in real Q
Returns
-------
qp: q in pixel
iq: intensity of circular average
q: q in real unit (A-1)
"""
center, Ldet, lambda_, dpix= pargs['center'], pargs['Ldet'], pargs['lambda_'], pargs['dpix']
uid = pargs['uid']
qp, iq = circular_average(avg_img,
center, threshold=0, nx=nx, pixel_size=(dpix, dpix), mask=mask, min_x=min_x, max_x=max_x)
qp_ = qp * dpix
# convert bin_centers from r [um] to two_theta and then to q [1/px] (reciprocal space)
two_theta = utils.radius_to_twotheta(Ldet, qp_)
q = utils.twotheta_to_q(two_theta, lambda_)
if plot_:
if show_pixel:
fig = plt.figure(figsize=(8, 6))
ax1 = fig.add_subplot(111)
#ax2 = ax1.twiny()
ax1.semilogy(qp, iq, '-o')
#ax1.semilogy(q, iq , '-o')
ax1.set_xlabel('q (pixel)')
#ax1.set_xlabel('q ('r'$\AA^{-1}$)')
#ax2.cla()
ax1.set_ylabel('I(q)')
title = ax1.set_title('uid= %s--Circular Average'%uid)
else:
fig = plt.figure(figsize=(8, 6))
ax1 = fig.add_subplot(111)
ax1.semilogy(q, iq , '-o')
ax1.set_xlabel('q ('r'$\AA^{-1}$)')
ax1.set_ylabel('I(q)')
title = ax1.set_title('uid= %s--Circular Average'%uid)
ax2=None
if 'xlim' in kwargs.keys():
ax1.set_xlim( kwargs['xlim'] )
x1,x2 = kwargs['xlim']
w = np.where( (q >=x1 )&( q<=x2) )[0]
if 'ylim' in kwargs.keys():
ax1.set_ylim( kwargs['ylim'] )
title.set_y(1.1)
fig.subplots_adjust(top=0.85)
path = pargs['path']
fp = path + '%s_q_Iq'%uid + '.png'
fig.savefig( fp, dpi=fig.dpi)
if save:
path = pargs['path']
save_lists( [q, iq], label=['q_A-1', 'Iq'], filename='%s_q_Iq.csv'%uid, path= path )
return qp, iq, q
def plot_circular_average( qp, iq, q, pargs, show_pixel= False, loglog=False,
save=True,return_fig=False, *argv,**kwargs):
if RUN_GUI:
fig = Figure()
ax1 = fig.add_subplot(111)
else:
fig, ax1 = plt.subplots()
uid = pargs['uid']
if show_pixel:
if loglog:
ax1.loglog(qp, iq, '-o')
else:
ax1.semilogy(qp, iq, '-o')
ax1.set_xlabel('q (pixel)')
ax1.set_ylabel('I(q)')
title = ax1.set_title('%s_Circular Average'%uid)
else:
if loglog:
ax1.loglog(qp, iq, '-o')
else:
ax1.semilogy(q, iq , '-o')
ax1.set_xlabel('q ('r'$\AA^{-1}$)')
ax1.set_ylabel('I(q)')
title = ax1.set_title('%s_Circular Average'%uid)
ax2=None
if 'xlim' in kwargs.keys():
xlim = kwargs['xlim']
else:
xlim=[q.min(), q.max()]
if 'ylim' in kwargs.keys():
ylim = kwargs['ylim']
else:
ylim=[iq.min(), iq.max()]
ax1.set_xlim( xlim )
ax1.set_ylim( ylim )
title.set_y(1.1)
fig.subplots_adjust(top=0.85)
if save:
path = pargs['path']
fp = path + '%s_q_Iq'%uid + '.png'
fig.savefig( fp, dpi=fig.dpi)
if return_fig:
return fig
def get_angular_average( avg_img, mask, pargs, min_r, max_r,
nx=3600, plot_ = False , save=False, *argv,**kwargs):
"""get a angular average of an image
Parameters
----------
avg_img: 2D-array, the image
mask: 2D-array
pargs: a dict, should contains
center: the beam center in pixel
Ldet: sample to detector distance
lambda_: the wavelength
dpix, the pixel size in mm. For Eiger1m/4m, the size is 75 um (0.075 mm)
nx : int, optional
number of bins in x
defaults is 1500 bins
plot_: a boolen type, if True, plot the one-D curve
plot_qinpixel:a boolen type, if True, the x-axis of the one-D curve is q in pixel; else in real Q
Returns
-------
ang: ang in degree
iq: intensity of circular average
"""
center, Ldet, lambda_, dpix= pargs['center'], pargs['Ldet'], pargs['lambda_'], pargs['dpix']
uid = pargs['uid']
angq, ang = angular_average( avg_img, calibrated_center=center, pixel_size=(dpix,dpix), nx =nx,
min_r = min_r , max_r = max_r, mask=mask )
if plot_:
fig = plt.figure(figsize=(8, 6))
ax = fig.add_subplot(111)
ax.plot(angq, ang , '-o')
ax.set_xlabel("angle (deg)")
ax.set_ylabel("I(ang)")
#ax.legend(loc = 'best')
uid = pargs['uid']
title = ax.set_title('Uid= %s--t-I(Ang)'%uid)
title.set_y(1.01)
if save:
#dt =datetime.now()
#CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute)
path = pargs['path']
uid = pargs['uid']
#fp = path + 'Uid= %s--Ang-Iq~t-'%uid + CurTime + '.png'
fp = path + 'uid=%s--Ang-Iq-t-'%uid + '.png'
fig.savefig( fp, dpi=fig.dpi)
#plt.show()
return angq, ang
def angular_average(image, calibrated_center, threshold=0, nx=1500,
pixel_size=(1, 1), min_r=None, max_r=None, min_x=None, max_x=None, mask=None):
"""Angular_average of the the image data
Parameters
----------
image : array
Image to compute the average as a function of radius
calibrated_center : tuple
The center of the image in pixel units
argument order should be (row, col)
threshold : int, optional
Ignore counts above `threshold`
default is zero
nx : int, optional
number of bins in x
defaults is 100 bins
pixel_size : tuple, optional
The size of a pixel (in a real unit, like mm).
argument order should be (pixel_height, pixel_width)
default is (1, 1)
min_r: float, optional number of pixels
The min r, e.g., the starting radius for angule average
max_r:float, optional number of pixels
The max r, e.g., the ending radius for angule average
max_r - min_r gives the width of the angule average
min_x : float, optional number of pixels
Left edge of first bin defaults to minimum value of x
max_x : float, optional number of pixels
Right edge of last bin defaults to maximum value of x
Returns
-------
bin_centers : array
The center of each bin in degree shape is (nx, )
ring_averages : array
Radial average of the image. shape is (nx, ).
"""
angle_val = utils.angle_grid(calibrated_center, image.shape, pixel_size)
if min_r is None:
min_r=0
if max_r is None:
max_r = np.sqrt( (image.shape[0] - calibrated_center[0])**2 + (image.shape[1] - calibrated_center[1])**2 )
r_mask = make_ring_mask( calibrated_center, image.shape, min_r, max_r )
if mask is not None:
#maks = np.ones_like( image )
mask = np.array( mask*r_mask, dtype = bool)
bina = angle_val[mask]
image_mask = np.array( image )[mask]
else:
bina = np.ravel( angle_val )
image_mask = np.ravel(image*r_mask)
bin_edges, sums, counts = utils.bin_1D( bina,
image_mask,
nx,
min_x=min_x,
max_x=max_x)
#print (counts)
th_mask = counts > threshold
ang_averages = sums[th_mask] / counts[th_mask]
bin_centers = utils.bin_edges_to_centers(bin_edges)[th_mask]
return bin_centers*180/np.pi, ang_averages
def get_t_iqc( FD, frame_edge, mask, pargs, nx=1500, plot_ = False , save=False, show_progress=True,
*argv,**kwargs):
'''Get t-dependent Iq
Parameters
----------
data_series: a image series
frame_edge: list, the ROI frame regions, e.g., [ [0,100], [200,400] ]
mask: a image mask
nx : int, optional
number of bins in x
defaults is 1500 bins
plot_: a boolen type, if True, plot the time~one-D curve with qp as x-axis
Returns
---------
qp: q in pixel
iq: intensity of circular average
q: q in real unit (A-1)
'''
Nt = len( frame_edge )
iqs = list( np.zeros( Nt ) )
for i in range(Nt):
t1,t2 = frame_edge[i]
#print (t1,t2)
avg_img = get_avg_imgc( FD, beg=t1,end=t2, sampling = 1, plot_ = False,show_progress=show_progress )
qp, iqs[i], q = get_circular_average( avg_img, mask,pargs, nx=nx,
plot_ = False)
if plot_:
fig,ax = plt.subplots(figsize=(8, 6))
for i in range( Nt ):
t1,t2 = frame_edge[i]
ax.semilogy(q, iqs[i], label="frame: %s--%s"%( t1,t2) )
#ax.set_xlabel("q in pixel")
ax.set_xlabel('Q 'r'($\AA^{-1}$)')
ax.set_ylabel("I(q)")
if 'xlim' in kwargs.keys():
ax.set_xlim( kwargs['xlim'] )
if 'ylim' in kwargs.keys():
ax.set_ylim( kwargs['ylim'] )
ax.legend(loc = 'best', )
uid = pargs['uid']
title = ax.set_title('uid= %s--t~I(q)'%uid)
title.set_y(1.01)
if save:
#dt =datetime.now()
#CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute)
path = pargs['path']
uid = pargs['uid']
#fp = path + 'uid= %s--Iq~t-'%uid + CurTime + '.png'
fp = path + 'uid=%s--Iq-t-'%uid + '.png'
fig.savefig( fp, dpi=fig.dpi)
save_arrays( np.vstack( [q, np.array(iqs)]).T,
label= ['q_A-1']+ ['Fram-%s-%s'%(t[0],t[1]) for t in frame_edge],
filename='uid=%s-q-Iqt.csv'%uid, path= path )
#plt.show()
return qp, np.array( iqs ),q
def plot_t_iqc( q, iqs, frame_edge, pargs, save=True, return_fig=False, legend_size=None, *argv,**kwargs):
'''Plot t-dependent Iq
Parameters
----------
q: q in real unit (A-1), one-D array
frame_edge: list, the ROI frame regions, e.g., [ [0,100], [200,400] ]
iqs: intensity of circular average, shape is [len(frame_edge), len(q)]
pargs: a dict include data path, uid et.al info
Returns
---------
None
'''
Nt = iqs.shape[0]
if frame_edge is None:
frame_edge = np.zeros( Nt, dtype=object )
for i in range(Nt):
frame_edge[i] = ['Edge_%i'%i, 'Edge_%i'%(i+1) ]
#Nt = len( frame_edge )
fig,ax = plt.subplots(figsize=(8, 6))
for i in range( Nt ):
t1,t2 = frame_edge[i]
if np.any( iqs[i] ):
ax.semilogy(q, iqs[i], label="frame: %s--%s"%( t1,t2) )
#ax.set_xlabel("q in pixel")
ax.set_xlabel('Q 'r'($\AA^{-1}$)')
ax.set_ylabel("I(q)")
if 'xlim' in kwargs.keys():
ax.set_xlim( kwargs['xlim'] )
if 'ylim' in kwargs.keys():
ax.set_ylim( kwargs['ylim'] )
ax.legend(loc = 'best', fontsize = legend_size)
uid = pargs['uid']
title = ax.set_title('%s--t~I(q)'%uid)
title.set_y(1.01)
if save:
#dt =datetime.now()
#CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute)
path = pargs['path']
uid = pargs['uid']
#fp = path + 'uid= %s--Iq~t-'%uid + CurTime + '.png'
fp = path + '%s_q_Iqt'%uid + '.png'
fig.savefig( fp, dpi=fig.dpi)
save_arrays( np.vstack( [q, np.array(iqs)]).T,
label= ['q_A-1']+ ['Fram-%s-%s'%(t[0],t[1]) for t in frame_edge],
filename='%s_q_Iqt'%uid , path= path )
if return_fig:
return fig,ax
#plt.show()
def get_distance(p1,p2):
'''Calc the distance between two point'''
return np.sqrt( (p1[0] - p2[0])**2 + (p1[1] - p2[1])**2 )
def calc_q(L,a,wv):
''' calc_q(L,a,wv) - calculate the q value for length L, transverse
distance a and wavelength wv.
Use this to calculate the speckle size
L - sample to detector distance (mm)
a - pixel size transverse length from beam direction (mm)
wv - wavelength
Units of L and a should match and resultant q is in inverse units of wv.
'''
theta = np.arctan2(a,L)
q = 4*np.pi*np.sin(theta/2.)/wv
return q
def get_t_iq( data_series, frame_edge, mask, pargs, nx=1500, plot_ = False , save=False, *argv,**kwargs):
'''Get t-dependent Iq
Parameters
----------
data_series: a image series
frame_edge: list, the ROI frame regions, e.g., [ [0,100], [200,400] ]
mask: a image mask
nx : int, optional
number of bins in x
defaults is 1500 bins
plot_: a boolen type, if True, plot the time~one-D curve with qp as x-axis
Returns
---------
qp: q in pixel
iq: intensity of circular average
q: q in real unit (A-1)
'''
Nt = len( frame_edge )
iqs = list( np.zeros( Nt ) )
for i in range(Nt):
t1,t2 = frame_edge[i]
#print (t1,t2)
avg_img = get_avg_img( data_series[t1:t2], sampling = 1,
plot_ = False )
qp, iqs[i], q = get_circular_average( avg_img, mask,pargs, nx=nx,
plot_ = False)
if plot_:
fig,ax = plt.subplots(figsize=(8, 6))
for i in range( Nt ):
t1,t2 = frame_edge[i]
ax.semilogy(q, iqs[i], label="frame: %s--%s"%( t1,t2) )
#ax.set_xlabel("q in pixel")
ax.set_xlabel('Q 'r'($\AA^{-1}$)')
ax.set_ylabel("I(q)")
if 'xlim' in kwargs.keys():
ax.set_xlim( kwargs['xlim'] )
if 'ylim' in kwargs.keys():
ax.set_ylim( kwargs['ylim'] )
ax.legend(loc = 'best')
uid = pargs['uid']
title = ax.set_title('uid=%s--t-I(q)'%uid)
title.set_y(1.01)
if save:
#dt =datetime.now()
#CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute)
path = pargs['path']
uid = pargs['uid']
#fp = path + 'Uid= %s--Iq~t-'%uid + CurTime + '.png'
fp = path + 'uid=%s--Iq-t-'%uid + '.png'
fig.savefig( fp, dpi=fig.dpi)
#plt.show()
return qp, np.array( iqs ),q
def get_t_ang( data_series, frame_edge, mask, center, pixel_size, min_r, max_r,pargs,
nx=1500, plot_ = False , save=False, *argv,**kwargs):
'''Get t-dependent angule intensity
Parameters
----------
data_series: a image series
frame_edge: list, the ROI frame regions, e.g., [ [0,100], [200,400] ]
mask: a image mask
pixel_size : tuple, optional
The size of a pixel (in a real unit, like mm).
argument order should be (pixel_height, pixel_width)
default is (1, 1)
center: the beam center in pixel
min_r: float, optional number of pixels
The min r, e.g., the starting radius for angule average
max_r:float, optional number of pixels
The max r, e.g., the ending radius for angule average
max_r - min_r gives the width of the angule average
nx : int, optional
number of bins in x
defaults is 1500 bins
plot_: a boolen type, if True, plot the time~one-D curve with qp as x-axis
Returns
---------
qp: q in pixel
iq: intensity of circular average
q: q in real unit (A-1)
'''
Nt = len( frame_edge )
iqs = list( np.zeros( Nt ) )
for i in range(Nt):
t1,t2 = frame_edge[i]
#print (t1,t2)
avg_img = get_avg_img( data_series[t1:t2], sampling = 1,
plot_ = False )
qp, iqs[i] = angular_average( avg_img, center, pixel_size=pixel_size,
nx=nx, min_r=min_r, max_r = max_r, mask=mask )
if plot_:
fig,ax = plt.subplots(figsize=(8, 8))
for i in range( Nt ):
t1,t2 = frame_edge[i]
#ax.semilogy(qp* 180/np.pi, iqs[i], label="frame: %s--%s"%( t1,t2) )
ax.plot(qp, iqs[i], label="frame: %s--%s"%( t1,t2) )
ax.set_xlabel("angle (deg)")
ax.set_ylabel("I(ang)")
ax.legend(loc = 'best')
uid = pargs['uid']
title = ax.set_title('Uid= %s--t-I(Ang)'%uid)
title.set_y(1.01)
if save:
#dt =datetime.now()
#CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute)
path = pargs['path']
uid = pargs['uid']
#fp = path + 'Uid= %s--Ang-Iq~t-'%uid + CurTime + '.png'
fp = path + 'uid=%s--Ang-Iq-t-'%uid + '.png'
fig.savefig( fp, dpi=fig.dpi)
#plt.show()
return qp, np.array( iqs )
def make_ring_mask(center, shape, min_r, max_r ):
"""
Make a ring mask.
Parameters
----------
center : tuple
point in image where r=0; may be a float giving subpixel precision.
Order is (rr, cc).
shape: tuple
Image shape which is used to determine the maximum extent of output
pixel coordinates. Order is (rr, cc).
min_r: float, optional number of pixels
The min r, e.g., the starting radius of the ring
max_r:float, optional number of pixels
The max r, e.g., the ending radius of the ring
max_r - min_r gives the width of the ring
Returns
-------
ring_mask : array
"""
r_val = utils.radial_grid(center, shape, [1.,1.] )
r_mask = np.zeros_like( r_val, dtype=np.int32)
r_mask[np.where( (r_val >min_r) & (r_val < max_r ) )] = 1
return r_mask
def _make_roi(coords, edges, shape):
""" Helper function to create ring rois and bar rois
Parameters
----------
coords : array
shape is image shape
edges : list
List of tuples of inner (left or top) and outer (right or bottom)
edges of each roi.
e.g., edges=[(1, 2), (11, 12), (21, 22)]
shape : tuple
Shape of the image in which to create the ROIs
e.g., shape=(512, 512)
Returns
-------
label_array : array
Elements not inside any ROI are zero; elements inside each
ROI are 1, 2, 3, corresponding to the order they are
specified in `edges`.
Has shape=`image shape`
"""
label_array = np.digitize(coords, edges, right=False)
# Even elements of label_array are in the space between rings.
label_array = (np.where(label_array % 2 != 0, label_array, 0) + 1) // 2
return label_array.reshape(shape)
def angulars(edges, center, shape):
"""
Draw annual (angluar-shaped) shaped regions of interest.
Each ring will be labeled with an integer. Regions outside any ring will
be filled with zeros.
Parameters
----------
edges: list
giving the inner and outer angle in unit of radians
e.g., [(1, 2), (11, 12), (21, 22)]
center: tuple
point in image where r=0; may be a float giving subpixel precision.
Order is (rr, cc).
shape: tuple
Image shape which is used to determine the maximum extent of output
pixel coordinates. Order is (rr, cc).
Returns
-------
label_array : array
Elements not inside any ROI are zero; elements inside each
ROI are 1, 2, 3, corresponding to the order they are specified
in edges.
"""
edges = np.atleast_2d(np.asarray(edges)).ravel()
if not 0 == len(edges) % 2:
raise ValueError("edges should have an even number of elements, "
"giving inner, outer radii for each angular")
if not np.all( np.diff(edges) > 0):
raise ValueError("edges are expected to be monotonically increasing, "
"giving inner and outer radii of each angular from "
"r=0 outward")
angle_val = utils.angle_grid( center, shape) .ravel()
return _make_roi(angle_val, edges, shape)
def get_angular_mask( mask, inner_angle= 0, outer_angle = 360, width = None, edges = None,
num_angles = 12, center = None, dpix=[1,1], flow_geometry=False ):
'''
mask: 2D-array
inner_angle # the starting angle in unit of degree
outer_angle # the ending angle in unit of degree
width # width of each angle, in degree, default is None, there is no gap between the neighbour angle ROI
edges: default, None. otherwise, give a customized angle edges
num_angles # number of angles
center: the beam center in pixel
dpix, the pixel size in mm. For Eiger1m/4m, the size is 75 um (0.075 mm)
flow_geometry: if True, the angle should be between 0 and 180. the map will be a center inverse symmetry
Returns
-------
ang_mask: a ring mask, np.array
ang_center: ang in unit of degree
ang_val: ang edges in degree
'''
#center, Ldet, lambda_, dpix= pargs['center'], pargs['Ldet'], pargs['lambda_'], pargs['dpix']
#spacing = (outer_radius - inner_radius)/(num_rings-1) - 2 # spacing between rings
#inner_angle,outer_angle = np.radians(inner_angle), np.radians(outer_angle)
#if edges is None:
# ang_center = np.linspace( inner_angle,outer_angle, num_angles )
# edges = np.zeros( [ len(ang_center), 2] )
# if width is None:
# width = ( -inner_angle + outer_angle)/ float( num_angles -1 + 1e-10 )
# else:
# width = np.radians( width )
# edges[:,0],edges[:,1] = ang_center - width/2, ang_center + width/2
if flow_geometry:
if edges is None:
if inner_angle<0:
print('In this flow_geometry, the inner_angle should be larger than 0')
if outer_angle >180:
print('In this flow_geometry, the out_angle should be smaller than 180')
if edges is None:
if num_angles!=1:
spacing = (outer_angle - inner_angle - num_angles* width )/(num_angles-1) # spacing between rings
else:
spacing = 0
edges = roi.ring_edges(inner_angle, width, spacing, num_angles)
#print (edges)
angs = angulars( np.radians( edges ), center, mask.shape)
ang_center = np.average(edges, axis=1)
ang_mask = angs*mask
ang_mask = np.array(ang_mask, dtype=int)
if flow_geometry:
outer_angle -= 180
inner_angle -= 180
edges2 = roi.ring_edges(inner_angle, width, spacing, num_angles)
#print (edges)
angs2 = angulars( np.radians( edges2 ), center, mask.shape)
ang_mask2 = angs2*mask
ang_mask2 = np.array(ang_mask2, dtype=int)
ang_mask += ang_mask2
else:
for i, (al, ah) in enumerate( edges ):
if al<=-180. and ah >-180:
#print(i+1, al,ah)
edge3 = np.array([ [ al + 360, 180 ] ])
ang3 = angulars( np.radians( edge3 ), center, mask.shape) * mask
w = np.ravel( ang3 )==1
#print(w)
np.ravel( ang_mask )[w] = i+1
labels, indices = roi.extract_label_indices(ang_mask)
nopr = np.bincount( np.array(labels, dtype=int) )[1:]
if len( np.where( nopr ==0 )[0] !=0):
#print (nopr)
print ("Some angs contain zero pixels. Please redefine the edges.")
return ang_mask, ang_center, edges
def two_theta_to_radius(dist_sample, two_theta):
"""
Converts scattering angle (2:math:`2\\theta`) to radius (from the calibrated center)
with known detector to sample distance.
Parameters
----------
dist_sample : float
distance from the sample to the detector (mm)
two_theta : array
An array of :math:`2\\theta` values
Returns
-------
radius : array
The L2 norm of the distance (mm) of each pixel from the calibrated center.
"""
return np.tan(two_theta) * dist_sample
def get_ring_mask( mask, inner_radius=40, outer_radius = 762, width = 6, num_rings = 12,
edges=None, unit='pixel',pargs=None, return_q_in_pixel=False ):
#def get_ring_mask( mask, inner_radius= 0.0020, outer_radius = 0.009, width = 0.0002, num_rings = 12,
# edges=None, unit='pixel',pargs=None ):
'''
mask: 2D-array
inner_radius #radius of the first ring
outer_radius # radius of the last ring
width # width of each ring
num_rings # number of rings
pargs: a dict, should contains
center: the beam center in pixel
Ldet: sample to detector distance
lambda_: the wavelength, in unit of A
dpix, the pixel size in mm. For Eiger1m/4m, the size is 75 um (0.075 mm)
unit: if pixel, all the radius inputs are in unit of pixel
else: should be in unit of A-1
Returns
-------
ring_mask: a ring mask, np.array
q_ring_center: q in real unit (A-1)
q_ring_val: q edges in A-1
'''
center, Ldet, lambda_, dpix= pargs['center'], pargs['Ldet'], pargs['lambda_'], pargs['dpix']
#spacing = (outer_radius - inner_radius)/(num_rings-1) - 2 # spacing between rings
#qc = np.int_( np.linspace( inner_radius,outer_radius, num_rings ) )
#edges = np.zeros( [ len(qc), 2] )
#if width%2:
# edges[:,0],edges[:,1] = qc - width//2, qc + width//2 +1
#else:
# edges[:,0],edges[:,1] = qc - width//2, qc + width//2
# find the edges of the required rings
if edges is None:
if num_rings!=1:
spacing = (outer_radius - inner_radius - num_rings* width )/(num_rings-1) # spacing between rings
else:
spacing = 0
edges = roi.ring_edges(inner_radius, width, spacing, num_rings)
if (unit=='pixel') or (unit=='p'):
if not return_q_in_pixel:
two_theta = utils.radius_to_twotheta(Ldet, edges*dpix)
q_ring_val = utils.twotheta_to_q(two_theta, lambda_)
else:
q_ring_val = edges
#print(edges)
else: #in unit of A-1
two_theta = utils.q_to_twotheta( edges, lambda_)
q_ring_val = edges
edges = two_theta_to_radius(Ldet,two_theta)/dpix #converto pixel
q_ring_center = np.average(q_ring_val, axis=1)
rings = roi.rings(edges, center, mask.shape)
ring_mask = rings*mask
ring_mask = np.array(ring_mask, dtype=int)
labels, indices = roi.extract_label_indices(ring_mask)
nopr = np.bincount( np.array(labels, dtype=int) )[1:]
if len( np.where( nopr ==0 )[0] !=0):
print (nopr)
print ("Some rings contain zero pixels. Please redefine the edges.")
return ring_mask, q_ring_center, q_ring_val
def get_ring_anglar_mask(ring_mask, ang_mask,
q_ring_center, ang_center ):
'''get ring_anglar mask '''
ring_max = ring_mask.max()
ang_mask_ = np.zeros( ang_mask.shape )
ind = np.where(ang_mask!=0)
ang_mask_[ind ] = ang_mask[ ind ] + 1E9 #add some large number to qr
dumy_ring_mask = np.zeros( ring_mask.shape )
dumy_ring_mask[ring_mask==1] =1
dumy_ring_ang = dumy_ring_mask * ang_mask
real_ang_lab = np.int_( np.unique( dumy_ring_ang )[1:] ) -1
ring_ang = ring_mask * ang_mask_
#convert label_array_qzr to [1,2,3,...]
ura = np.unique( ring_ang )[1:]
ur = np.unique( ring_mask )[1:]
ua = np.unique( ang_mask )[real_ang_lab]
ring_ang_ = np.zeros_like( ring_ang )
newl = np.arange( 1, len(ura)+1)
#newl = np.int_( real_ang_lab )
rc= [ [ q_ring_center[i]]*len( ua ) for i in range(len( ur )) ]
ac =list( ang_center[ua]) * len( ur )
#rc =list( q_ring_center) * len( ua )
#ac= [ [ ang_center[i]]*len( ur ) for i in range(len( ua )) ]
for i, label in enumerate(ura):
#print (i, label)
ring_ang_.ravel()[ np.where( ring_ang.ravel() == label)[0] ] = newl[i]
return np.int_(ring_ang_), np.concatenate( np.array( rc )), np.array( ac )
def show_ring_ang_roi( data, rois, alpha=0.3, save=False, *argv,**kwargs):
'''
May 16, 2016, Y.G.@CHX
plot a saxs image with rois( a label array)
Parameters:
data: 2-D array, a gisaxs image
rois: 2-D array, a label array
Options:
alpha: transparency of the label array on top of data
Return:
a plot of a qzr map of a gisaxs image with rois( a label array)
Examples:
show_qzr_roi( avg_imgr, box_maskr, inc_x0, ticks)
'''
#import matplotlib.pyplot as plt
#import copy
#import matplotlib.cm as mcm
#cmap='viridis'
#_cmap = copy.copy((mcm.get_cmap(cmap)))
#_cmap.set_under('w', 0)
avg_imgr, box_maskr = data, rois
num_qzr = len(np.unique( box_maskr)) -1
fig, ax = plt.subplots(figsize=(8,12))
ax.set_title("ROI--Labeled Array on Data")
im,im_label = show_label_array_on_image(ax, avg_imgr, box_maskr, imshow_cmap='viridis',
cmap='Paired', alpha=alpha,
vmin=0.01, vmax=30. , origin="lower")
for i in range( 1, num_qzr+1 ):
ind = np.where( box_maskr == i)[1]
indz = np.where( box_maskr == i)[0]
c = '%i'%i
y_val = int( indz.mean() )
x_val = int( ind.mean() )
#print (xval, y)
ax.text(x_val, y_val, c, va='center', ha='center')
#print (x_val1,x_val2)
divider = make_axes_locatable(ax)
cax = divider.append_axes("right", size="5%", pad=0.05)
plt.colorbar(im, cax=cax)
if save:
#dt =datetime.now()
#CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute)
path = kwargs['path']
if 'uid' in kwargs:
uid = kwargs['uid']
else:
uid = 'uid'
#fp = path + "uid= %s--Waterfall-"%uid + CurTime + '.png'
fp = path + "uid=%s--ROI-on-image-"%uid + '.png'
fig.savefig( fp, dpi=fig.dpi)
#ax.set_xlabel(r'$q_r$', fontsize=22)
#ax.set_ylabel(r'$q_z$',fontsize=22)
#plt.show()
def plot_qIq_with_ROI( q, iq, q_ring_center, logs=True, save=False, return_fig = False, *argv,**kwargs):
'''Aug 6, 2016, Y.G.@CHX
plot q~Iq with interested q rings'''
uid = 'uid'
if 'uid' in kwargs.keys():
uid = kwargs['uid']
if RUN_GUI:
fig = Figure(figsize=(8, 6))
axes = fig.add_subplot(111)
else:
fig, axes = plt.subplots(figsize=(8, 6))
if logs:
axes.semilogy(q, iq, '-o')
else:
axes.plot(q, iq, '-o')
axes.set_title('%s--Circular Average with the Q ring values'%uid)
axes.set_ylabel('I(q)')
axes.set_xlabel('Q 'r'($\AA^{-1}$)')
#axes.set_xlim(0, 0.02)
#axes.set_xlim(-0.00001, 0.1)
#axes.set_ylim(-0.0001, 10000)
#axes.set_ylim(0, 100)
if 'xlim' in kwargs.keys():
xlim = kwargs['xlim']
else:
xlim=[q.min(), q.max()]
if 'ylim' in kwargs.keys():
ylim = kwargs['ylim']
else:
ylim=[iq.min(), iq.max()]
axes.set_xlim( xlim )
axes.set_ylim( ylim )
num_rings = len( np.unique( q_ring_center) )
for i in range(num_rings):
axes.axvline(q_ring_center[i] )#, linewidth = 5 )
if save:
#dt =datetime.now()
#CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute)
path = kwargs['path']
if 'uid' in kwargs:
uid = kwargs['uid']
else:
uid = 'uid'
#fp = path + "uid= %s--Waterfall-"%uid + CurTime + '.png'
fp = path + "%s_ROI_on_Iq"%uid + '.png'
fig.savefig( fp, dpi=fig.dpi)
#plt.show()
if return_fig:
return fig, axes
def get_each_ring_mean_intensity( data_series, ring_mask, sampling, timeperframe, plot_ = True , save=False, *argv,**kwargs):
"""
get time dependent mean intensity of each ring
"""
mean_int_sets, index_list = roi.mean_intensity(np.array(data_series[::sampling]), ring_mask)
times = np.arange(len(data_series))*timeperframe # get the time for each frame
num_rings = len( np.unique( ring_mask)[1:] )
if plot_:
fig, ax = plt.subplots(figsize=(8, 8))
uid = 'uid'
if 'uid' in kwargs.keys():
uid = kwargs['uid']
ax.set_title("%s--Mean intensity of each ring"%uid)
for i in range(num_rings):
ax.plot( mean_int_sets[:,i], label="Ring "+str(i+1),marker = 'o', ls='-')
ax.set_xlabel("Time")
ax.set_ylabel("Mean Intensity")
ax.legend(loc = 'best')
if save:
#dt =datetime.now()
#CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute)
path = kwargs['path']
#fp = path + "Uid= %s--Mean intensity of each ring-"%uid + CurTime + '.png'
fp = path + "%s_Mean_intensity_of_each_ROI"%uid + '.png'
fig.savefig( fp, dpi=fig.dpi)
#plt.show()
return times, mean_int_sets
#plot g2 results
def plot_saxs_rad_ang_g2( g2, taus, res_pargs=None, master_angle_plot= False,return_fig=False,*argv,**kwargs):
'''plot g2 results of segments with radius and angle partation ,
g2: one-time correlation function
taus: the time delays
res_pargs, a dict, can contains
uid/path/qr_center/qz_center/
master_angle_plot: if True, plot angle first, then q
kwargs: can contains
vlim: [vmin,vmax]: for the plot limit of y, the y-limit will be [vmin * min(y), vmx*max(y)]
ylim/xlim: the limit of y and x
e.g.
plot_saxs_rad_ang_g2( g2b, taus= np.arange( g2b.shape[0]) *timeperframe, q_ring_center = q_ring_center, ang_center=ang_center, vlim=[.99, 1.01] )
'''
if res_pargs is not None:
uid = res_pargs['uid']
path = res_pargs['path']
q_ring_center= res_pargs[ 'q_ring_center']
num_qr = len( q_ring_center)
ang_center = res_pargs[ 'ang_center']
num_qa = len( ang_center )
else:
if 'uid' in kwargs.keys():
uid = kwargs['uid']
else:
uid = 'uid'
if 'path' in kwargs.keys():
path = kwargs['path']
else:
path = ''
if 'q_ring_center' in kwargs.keys():
q_ring_center = kwargs[ 'q_ring_center']
num_qr = len( q_ring_center)
else:
print( 'Please give q_ring_center')
if 'ang_center' in kwargs.keys():
ang_center = kwargs[ 'ang_center']
num_qa = len( ang_center)
else:
print( 'Please give ang_center')
if master_angle_plot:
first_var = num_qa
sec_var = num_qr
else:
first_var=num_qr
sec_var = num_qa
for qr_ind in range( first_var ):
if RUN_GUI:
fig = Figure(figsize=(10, 12))
else:
fig = plt.figure(figsize=(10, 12))
#fig = plt.figure()
if master_angle_plot:
title_qr = 'Angle= %.2f'%( ang_center[qr_ind]) + r'$^\circ$'
else:
title_qr = ' Qr= %.5f '%( q_ring_center[qr_ind]) + r'$\AA^{-1}$'
plt.title('uid= %s:--->'%uid + title_qr,fontsize=20, y =1.1)
#print (qz_ind,title_qz)
#if num_qr!=1:plt.axis('off')
plt.axis('off')
sx = int(round(np.sqrt( sec_var )) )
if sec_var%sx == 0:
sy = int(sec_var/sx)
else:
sy=int(sec_var/sx+1)
for sn in range( sec_var ):
ax = fig.add_subplot(sx,sy,sn+1 )
ax.set_ylabel("g2")
ax.set_xlabel(r"$\tau $ $(s)$", fontsize=16)
if master_angle_plot:
i = sn + qr_ind * num_qr
title_qa = '%.5f '%( q_ring_center[sn]) + r'$\AA^{-1}$'
else:
i = sn + qr_ind * num_qa
title_qa = '%.2f'%( ang_center[sn]) + r'$^\circ$' + '( %d )'%(i)
#title_qa = " Angle= " + '%.2f'%( ang_center[sn]) + r'$^\circ$' + '( %d )'%i
#title_qa = '%.2f'%( ang_center[sn]) + r'$^\circ$' + '( %d )'%(i)
#if num_qr==1:
# title = 'uid= %s:--->'%uid + title_qr + '__' + title_qa
#else:
# title = title_qa
title = title_qa
ax.set_title( title , y =1.1, fontsize=12)
y=g2[:, i]
ax.semilogx(taus, y, '-o', markersize=6)
if 'ylim' in kwargs:
ax.set_ylim( kwargs['ylim'])
elif 'vlim' in kwargs:
vmin, vmax =kwargs['vlim']
ax.set_ylim([min(y)*vmin, max(y[1:])*vmax ])
else:
pass
if 'xlim' in kwargs:
ax.set_xlim( kwargs['xlim'])
#dt =datetime.now()
#CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute)
#fp = path + 'g2--uid=%s-qr=%s'%(uid,q_ring_center[qr_ind]) + CurTime + '.png'
fp = path + 'uid=%s--g2-qr=%s'%(uid, q_ring_center[qr_ind] ) + '-.png'
plt.savefig( fp, dpi=fig.dpi)
fig.set_tight_layout(True)
if return_fig:
return fig
############################################
##a good func to fit g2 for all types of geogmetries
############################################
def fit_saxs_rad_ang_g2( g2, res_pargs=None,function='simple_exponential', fit_range=None,
master_angle_plot= False, *argv,**kwargs):
'''
Fit one-time correlation function
The support functions include simple exponential and stretched/compressed exponential
Parameters
----------
g2: one-time correlation function for fit, with shape as [taus, qs]
res_pargs: a dict, contains keys
taus: the time delay, with the same length as g2
q_ring_center: the center of q rings, for the title of each sub-plot
uid: unique id, for the title of plot
function:
'simple_exponential': fit by a simple exponential function, defined as
beta * np.exp(-2 * relaxation_rate * lags) + baseline
'streched_exponential': fit by a streched exponential function, defined as
beta * (np.exp(-2 * relaxation_rate * lags))**alpha + baseline
#fit_vibration:
# if True, will fit the g2 by a dumped sin function due to beamline mechnical oscillation
Returns
-------
fit resutls:
a dict, with keys as
'baseline':
'beta':
'relaxation_rate':
an example:
result = fit_g2( g2, res_pargs, function = 'simple')
result = fit_g2( g2, res_pargs, function = 'stretched')
'''
if res_pargs is not None:
uid = res_pargs['uid']
path = res_pargs['path']
q_ring_center= res_pargs[ 'q_ring_center']
num_qr = len( q_ring_center)
ang_center = res_pargs[ 'ang_center']
num_qa = len( ang_center )
taus=res_pargs['taus']
else:
if 'uid' in kwargs.keys():
uid = kwargs['uid']
else:
uid = 'uid'
if 'path' in kwargs.keys():
path = kwargs['path']
else:
path = ''
if 'q_ring_center' in kwargs.keys():
q_ring_center = kwargs[ 'q_ring_center']
num_qr = len( q_ring_center)
else:
print( 'Please give q_ring_center')
if 'ang_center' in kwargs.keys():
ang_center = kwargs[ 'ang_center']
num_qa = len( ang_center)
else:
print( 'Please give ang_center')
num_rings = g2.shape[1]
beta = np.zeros( num_rings ) # contrast factor
rate = np.zeros( num_rings ) # relaxation rate
alpha = np.zeros( num_rings ) # alpha
baseline = np.zeros( num_rings ) # baseline
freq= np.zeros( num_rings )
if function=='flow_para_function' or function=='flow_para':
flow= np.zeros( num_rings ) # baseline
if 'fit_variables' in kwargs:
additional_var = kwargs['fit_variables']
_vars =[ k for k in list( additional_var.keys()) if additional_var[k] is False]
else:
_vars = []
#print (_vars)
_guess_val = dict( beta=.1, alpha=1.0, relaxation_rate =0.005, baseline=1.0)
if 'guess_values' in kwargs:
guess_values = kwargs['guess_values']
_guess_val.update( guess_values )
if function=='simple_exponential' or function=='simple':
_vars = np.unique ( _vars + ['alpha'])
mod = Model(stretched_auto_corr_scat_factor)#, independent_vars= list( _vars) )
elif function=='stretched_exponential' or function=='stretched':
mod = Model(stretched_auto_corr_scat_factor)#, independent_vars= _vars)
elif function=='stretched_vibration':
mod = Model(stretched_auto_corr_scat_factor_with_vibration)#, independent_vars= _vars)
elif function=='flow_para_function' or function=='flow_para':
mod = Model(flow_para_function)#, independent_vars= _vars)
else:
print ("The %s is not supported.The supported functions include simple_exponential and stretched_exponential"%function)
mod.set_param_hint( 'baseline', min=0.5, max= 1.5 )
mod.set_param_hint( 'beta', min=0.0 )
mod.set_param_hint( 'alpha', min=0.0 )
mod.set_param_hint( 'relaxation_rate', min=0.0 )
if function=='flow_para_function' or function=='flow_para':
mod.set_param_hint( 'flow_velocity', min=0)
if function=='stretched_vibration':
mod.set_param_hint( 'freq', min=0)
mod.set_param_hint( 'amp', min=0)
_beta=_guess_val['beta']
_alpha=_guess_val['alpha']
_relaxation_rate = _guess_val['relaxation_rate']
_baseline= _guess_val['baseline']
pars = mod.make_params( beta=_beta, alpha=_alpha, relaxation_rate =_relaxation_rate, baseline= _baseline)
if function=='flow_para_function' or function=='flow_para':
_flow_velocity =_guess_val['flow_velocity']
pars = mod.make_params( beta=_beta, alpha=_alpha, flow_velocity=_flow_velocity,
relaxation_rate =_relaxation_rate, baseline= _baseline)
if function=='stretched_vibration':
_freq =_guess_val['freq']
_amp = _guess_val['amp']
pars = mod.make_params( beta=_beta, alpha=_alpha, freq=_freq, amp = _amp,
relaxation_rate =_relaxation_rate, baseline= _baseline)
for v in _vars:
pars['%s'%v].vary = False
if master_angle_plot:
first_var = num_qa
sec_var = num_qr
else:
first_var=num_qr
sec_var = num_qa
for qr_ind in range( first_var ):
#fig = plt.figure(figsize=(10, 12))
fig = plt.figure(figsize=(14, 8))
#fig = plt.figure()
if master_angle_plot:
title_qr = 'Angle= %.2f'%( ang_center[qr_ind]) + r'$^\circ$'
else:
title_qr = ' Qr= %.5f '%( q_ring_center[qr_ind]) + r'$\AA^{-1}$'
#plt.title('uid= %s:--->'%uid + title_qr,fontsize=20, y =1.1)
plt.axis('off')
#sx = int(round(np.sqrt( sec_var )) )
sy=4
#if sec_var%sx == 0:
if sec_var%sy == 0:
#sy = int(sec_var/sx)
sx = int(sec_var/sy)
else:
#sy=int(sec_var/sx+1)
sx=int(sec_var/sy+1)
for sn in range( sec_var ):
ax = fig.add_subplot(sx,sy,sn+1 )
ax.set_ylabel(r"$g^($" + r'$^2$' + r'$^)$' + r'$(Q,$' + r'$\tau$' + r'$)$' )
ax.set_xlabel(r"$\tau $ $(s)$", fontsize=16)
if master_angle_plot:
i = sn + qr_ind * num_qr
title_qa = '%.5f '%( q_ring_center[sn]) + r'$\AA^{-1}$'
else:
i = sn + qr_ind * num_qa
title_qa = '%.2f'%( ang_center[sn]) + r'$^\circ$' + '( %d )'%(i)
title = title_qa
ax.set_title( title , y =1.1)
if fit_range is not None:
y=g2[1:, i][fit_range[0]:fit_range[1]]
lags=taus[1:][fit_range[0]:fit_range[1]]
else:
y=g2[1:, i]
lags=taus[1:]
result1 = mod.fit(y, pars, x =lags )
#print ( result1.best_values)
rate[i] = result1.best_values['relaxation_rate']
#rate[i] = 1e-16
beta[i] = result1.best_values['beta']
#baseline[i] = 1.0
baseline[i] = result1.best_values['baseline']
#print( result1.best_values['freq'] )
if function=='simple_exponential' or function=='simple':
alpha[i] =1.0
elif function=='stretched_exponential' or function=='stretched':
alpha[i] = result1.best_values['alpha']
elif function=='stretched_vibration':
alpha[i] = result1.best_values['alpha']
freq[i] = result1.best_values['freq']
if function=='flow_para_function' or function=='flow_para':
flow[i] = result1.best_values['flow_velocity']
ax.semilogx(taus[1:], g2[1:, i], 'ro')
ax.semilogx(lags, result1.best_fit, '-b')
txts = r'$\gamma$' + r'$ = %.3f$'%(1/rate[i]) + r'$ s$'
x=0.25
y0=0.75
fontsize = 12
ax.text(x =x, y= y0, s=txts, fontsize=fontsize, transform=ax.transAxes)
txts = r'$\alpha$' + r'$ = %.3f$'%(alpha[i])
#txts = r'$\beta$' + r'$ = %.3f$'%(beta[i]) + r'$ s^{-1}$'
ax.text(x =x, y= y0-.1, s=txts, fontsize=fontsize, transform=ax.transAxes)
txts = r'$baseline$' + r'$ = %.3f$'%( baseline[i])
ax.text(x =x, y= y0-.2, s=txts, fontsize=fontsize, transform=ax.transAxes)
if function=='flow_para_function' or function=='flow_para':
txts = r'$flow_v$' + r'$ = %.3f$'%( flow[i])
ax.text(x =x, y= y0-.3, s=txts, fontsize=fontsize, transform=ax.transAxes)
if 'ylim' in kwargs:
ax.set_ylim( kwargs['ylim'])
elif 'vlim' in kwargs:
vmin, vmax =kwargs['vlim']
ax.set_ylim([min(y)*vmin, max(y[1:])*vmax ])
else:
pass
if 'xlim' in kwargs:
ax.set_xlim( kwargs['xlim'])
fp = path + 'uid=%s--g2--qr-%s--fit-'%(uid, q_ring_center[qr_ind] ) + '.png'
fig.savefig( fp, dpi=fig.dpi)
fig.tight_layout()
#plt.show()
result = dict( beta=beta, rate=rate, alpha=alpha, baseline=baseline )
if function=='flow_para_function' or function=='flow_para':
result = dict( beta=beta, rate=rate, alpha=alpha, baseline=baseline, flow_velocity=flow )
if function=='stretched_vibration':
result = dict( beta=beta, rate=rate, alpha=alpha, baseline=baseline, freq= freq )
return result
def save_seg_saxs_g2( g2, res_pargs, time_label=True, *argv,**kwargs):
'''
Aug 8, 2016, Y.G.@CHX
save g2 results,
res_pargs should contain
g2: one-time correlation function
res_pargs: contions taus, q_ring_center values
path:
uid:
'''
taus = res_pargs[ 'taus']
qz_center= res_pargs[ 'q_ring_center']
qr_center = res_pargs[ 'ang_center']
path = res_pargs['path']
uid = res_pargs['uid']
df = DataFrame( np.hstack( [ (taus).reshape( len(g2),1) , g2] ) )
columns=[]
columns.append('tau')
for qz in qz_center:
for qr in qr_center:
columns.append( [str(qz),str(qr)] )
df.columns = columns
if time_label:
dt =datetime.now()
CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute)
filename = os.path.join(path, 'g2-%s-%s.csv' %(uid,CurTime))
else:
filename = os.path.join(path, 'uid=%s--g2.csv' % (uid))
df.to_csv(filename)
print( 'The g2 of uid= %s is saved with filename as %s'%(uid, filename))
def linear_fit( x,y):
D0 = np.polyfit(x, y, 1)
gmfit = np.poly1d(D0)
return D0, gmfit
def plot_gamma():
'''not work'''
fig, ax = plt.subplots()
ax.set_title('Uid= %s--Beta'%uid)
ax.set_title('Uid= %s--Gamma'%uid)
#ax.plot( q_ring_center**2 , 1/rate, 'ro', ls='--')
ax.loglog( q_ring_center , 1/result['rate'], 'ro', ls='--')
#ax.set_ylabel('Log( Beta0 'r'$\beta$'"($s^{-1}$)")
ax.set_ylabel('Log( Gamma )')
ax.set_xlabel("$Log(q)$"r'($\AA^{-1}$)')
#plt.show()
def multi_uids_saxs_flow_xpcs_analysis( uids, md, run_num=1, sub_num=None, good_start=10, good_end= None,
force_compress=False, fit_vibration = True,
fit = True, compress=True, para_run=False ):
''''Aug 16, 2016, YG@CHX-NSLS2
Do SAXS-XPCS analysis for multi uid data
uids: a list of uids to be analyzed
md: metadata, should at least include
mask: array, mask data
data_dir: the path to save data, the result will be saved in data_dir/uid/...
dpix:
Ldet:
lambda:
timeperframe:
center
run_num: the run number
sub_num: the number in each sub-run
fit: if fit, do fit for g2 and show/save all fit plots
compress: apply a compress algorithm
Save g2/metadata/g2-fit plot/g2 q-rate plot/ of each uid in data_dir/uid/...
return:
g2s: a dictionary, {run_num: sub_num: g2_of_each_uid}
taus,
use_uids: return the valid uids
'''
g2s = {} # g2s[run_number][sub_seq] = g2 of each uid
lag_steps = [0]
useful_uids = {}
if sub_num is None:
sub_num = len( uids )//run_num
mask = md['mask']
data_dir = md['data_dir']
#ring_mask = md['ring_mask']
#q_ring_center = md['q_ring_center']
seg_mask_v = md['seg_mask_v']
seg_mask_p = md['seg_mask_p']
rcen_p, acen_p = md['rcen_p'], md['acen_v']
rcen_v, acen_v = md['rcen_p'], md['acen_v']
lag_steps =[0]
for run_seq in range(run_num):
g2s[ run_seq + 1] = {}
useful_uids[ run_seq + 1] = {}
i=0
for sub_seq in range( 0, sub_num ):
#good_end=good_end
uid = uids[ sub_seq + run_seq * sub_num ]
print( 'The %i--th uid to be analyzed is : %s'%(i, uid) )
try:
detector = get_detector( db[uid ] )
imgs = load_data( uid, detector, reverse= True )
except:
print( 'The %i--th uid: %s can not load data'%(i, uid) )
imgs=0
data_dir_ = os.path.join( data_dir, '%s/'%uid)
os.makedirs(data_dir_, exist_ok=True)
i +=1
if imgs !=0:
imgsa = apply_mask( imgs, mask )
Nimg = len(imgs)
md_ = imgs.md
useful_uids[ run_seq + 1][i] = uid
g2s[run_seq + 1][i] = {}
#if compress:
filename = '/XF11ID/analysis/Compressed_Data' +'/uid_%s.cmp'%uid
#update code here to use new pass uid to compress, 2016, Dec 3
if False:
mask, avg_img, imgsum, bad_frame_list = compress_eigerdata(imgs, mask, md_, filename,
force_compress= force_compress, bad_pixel_threshold= 2.4e18,nobytes=4,
para_compress=True, num_sub= 100)
if True:
mask, avg_img, imgsum, bad_frame_list = compress_eigerdata(uid, mask, md_, filename,
force_compress= False, bad_pixel_threshold= 2.4e18, nobytes=4,
para_compress= True, num_sub= 100, dtypes='uid', reverse=True )
try:
md['Measurement']= db[uid]['start']['Measurement']
#md['sample']=db[uid]['start']['sample']
#md['sample']= 'PS205000-PMMA-207000-SMMA3'
print( md['Measurement'] )
except:
md['Measurement']= 'Measurement'
md['sample']='sample'
dpix = md['x_pixel_size'] * 1000. #in mm, eiger 4m is 0.075 mm
lambda_ =md['incident_wavelength'] # wavelegth of the X-rays in Angstroms
Ldet = md['detector_distance'] * 1000 # detector to sample distance (mm)
exposuretime= md['count_time']
acquisition_period = md['frame_time']
timeperframe = acquisition_period#for g2
#timeperframe = exposuretime#for visiblitly
#timeperframe = 2 ## manual overwrite!!!! we apparently writing the wrong metadata....
center= md['center']
setup_pargs=dict(uid=uid, dpix= dpix, Ldet=Ldet, lambda_= lambda_,
timeperframe=timeperframe, center=center, path= data_dir_)
md['avg_img'] = avg_img
#plot1D( y = imgsum[ np.array( [i for i in np.arange( len(imgsum)) if i not in bad_frame_list])],
# title ='Uid= %s--imgsum'%uid, xlabel='Frame', ylabel='Total_Intensity', legend='' )
min_inten = 10
#good_start = np.where( np.array(imgsum) > min_inten )[0][0]
good_start = good_start
if good_end is None:
good_end_ = len(imgs)
else:
good_end_= good_end
FD = Multifile(filename, good_start, good_end_ )
good_start = max(good_start, np.where( np.array(imgsum) > min_inten )[0][0] )
print ('With compression, the good_start frame number is: %s '%good_start)
print ('The good_end frame number is: %s '%good_end_)
norm = None
###################
#Do correlaton here
for nconf, seg_mask in enumerate( [seg_mask_v, seg_mask_p ]):
if nconf==0:
conf='v'
else:
conf='p'
rcen = md['rcen_%s'%conf]
acen = md['acen_%s'%conf]
if not para_run:
g2, lag_stepsv =cal_g2( FD, seg_mask, bad_frame_list,good_start, num_buf = 8,
)
else:
g2, lag_stepsv =cal_g2p( FD, seg_mask, bad_frame_list,good_start, num_buf = 8,
imgsum= None, norm=norm )
if len( lag_steps) < len(lag_stepsv):
lag_steps = lag_stepsv
taus = lag_steps * timeperframe
res_pargs = dict(taus=taus, q_ring_center=np.unique(rcen),
ang_center= np.unique(acen), path= data_dir_, uid=uid +'_1a_mq%s'%conf )
save_g2( g2, taus=taus, qr=rcen, qz=acen, uid=uid +'_1a_mq%s'%conf, path= data_dir_ )
if nconf==0:
g2s[run_seq + 1][i]['v'] = g2 #perpendular
else:
g2s[run_seq + 1][i]['p'] = g2 #parallel
if fit:
if False:
g2_fit_result, taus_fit, g2_fit = get_g2_fit( g2, res_pargs=res_pargs,
function = 'stretched_vibration', vlim=[0.95, 1.05],
fit_variables={'baseline':True, 'beta':True, 'alpha':False,'relaxation_rate':True,
'freq':fit_vibration, 'amp':True},
fit_range= None,
guess_values={'baseline':1.0,'beta':0.05,'alpha':1.0,'relaxation_rate':0.01,
'freq': 60, 'amp':.1})
if nconf==0:#for vertical
function = 'stretched'
g2_fit_result, taus_fit, g2_fit = get_g2_fit( g2, res_pargs=res_pargs,
function = function, vlim=[0.95, 1.05],
fit_variables={'baseline':True, 'beta':True, 'alpha':False,'relaxation_rate':True,
},
fit_range= None,
guess_values={'baseline':1.0,'beta':0.05,'alpha':1.0,'relaxation_rate':0.01,
})
else:
function = 'flow_para'
g2_fit_result, taus_fit, g2_fit = get_g2_fit( g2, res_pargs=res_pargs,
function = function, vlim=[0.99, 1.05], fit_range= None,
fit_variables={'baseline':True, 'beta':True, 'alpha':False,'relaxation_rate':True,
'flow_velocity':True, },
guess_values={'baseline':1.0,'beta':0.05,'alpha':1.0,'relaxation_rate':0.01,
'flow_velocity':1, } )
save_g2( g2_fit, taus=taus_fit,qr=rcen, qz=acen,
uid=uid +'_1a_mq%s'%conf+'_fit', path= data_dir_ )
res_pargs_fit = dict(taus=taus, q_ring_center= np.unique(rcen),
ang_center= [acen[0]], path=data_dir_, uid=uid +'_1a_mq%s'%conf+'_fit' )
plot_g2( g2, res_pargs= res_pargs, tau_2 = taus_fit, g2_2 = g2_fit,
fit_res= g2_fit_result, function = function,
master_plot = 'qz',vlim=[0.95, 1.05],
geometry='ang_saxs', append_name= conf +'_fit' )
dfv = save_g2_fit_para_tocsv(g2_fit_result,
filename= uid +'_1a_mq'+conf+'_fit_para', path=data_dir_ )
fit_q_rate( np.unique(rcen)[:],dfv['relaxation_rate'], power_variable= False,
uid=uid +'_'+conf+'_fit_rate', path= data_dir_ )
#psave_obj( fit_result, data_dir_ + 'uid=%s-g2-fit-para'%uid )
psave_obj( md, data_dir_ + 'uid=%s-md'%uid ) #save the setup parameters
FD=0
avg_img, imgsum, bad_frame_list = [0,0,0]
md['avg_img']=0
imgs=0
print ('*'*40)
print()
taus = taus
return g2s, taus, useful_uids
def multi_uids_saxs_xpcs_analysis( uids, md, run_num=1, sub_num=None, good_start=10, good_end= None,
force_compress=False,
fit = True, compress=True, para_run=False ):
''''Aug 16, 2016, YG@CHX-NSLS2
Do SAXS-XPCS analysis for multi uid data
uids: a list of uids to be analyzed
md: metadata, should at least include
mask: array, mask data
data_dir: the path to save data, the result will be saved in data_dir/uid/...
dpix:
Ldet:
lambda:
timeperframe:
center
run_num: the run number
sub_num: the number in each sub-run
fit: if fit, do fit for g2 and show/save all fit plots
compress: apply a compress algorithm
Save g2/metadata/g2-fit plot/g2 q-rate plot/ of each uid in data_dir/uid/...
return:
g2s: a dictionary, {run_num: sub_num: g2_of_each_uid}
taus,
use_uids: return the valid uids
'''
g2s = {} # g2s[run_number][sub_seq] = g2 of each uid
lag_steps = [0]
useful_uids = {}
if sub_num is None:
sub_num = len( uids )//run_num
mask = md['mask']
data_dir = md['data_dir']
ring_mask = md['ring_mask']
q_ring_center = md['q_ring_center']
for run_seq in range(run_num):
g2s[ run_seq + 1] = {}
useful_uids[ run_seq + 1] = {}
i=0
for sub_seq in range( 0, sub_num ):
#good_end=good_end
uid = uids[ sub_seq + run_seq * sub_num ]
print( 'The %i--th uid to be analyzed is : %s'%(i, uid) )
try:
detector = get_detector( db[uid ] )
imgs = load_data( uid, detector, reverse= True )
except:
print( 'The %i--th uid: %s can not load data'%(i, uid) )
imgs=0
data_dir_ = os.path.join( data_dir, '%s/'%uid)
os.makedirs(data_dir_, exist_ok=True)
i +=1
if imgs !=0:
imgsa = apply_mask( imgs, mask )
Nimg = len(imgs)
md_ = imgs.md
useful_uids[ run_seq + 1][i] = uid
if compress:
filename = '/XF11ID/analysis/Compressed_Data' +'/uid_%s.cmp'%uid
#update code here to use new pass uid to compress, 2016, Dec 3
if False:
mask, avg_img, imgsum, bad_frame_list = compress_eigerdata(imgs, mask, md_, filename,
force_compress= force_compress, bad_pixel_threshold= 2.4e18,nobytes=4,
para_compress=True, num_sub= 100)
if True:
mask, avg_img, imgsum, bad_frame_list = compress_eigerdata(uid, mask, md_, filename,
force_compress= True, bad_pixel_threshold= 2.4e18, nobytes=4,
para_compress= True, num_sub= 100, dtypes='uid', reverse=True )
try:
md['Measurement']= db[uid]['start']['Measurement']
#md['sample']=db[uid]['start']['sample']
#md['sample']= 'PS205000-PMMA-207000-SMMA3'
print( md['Measurement'] )
except:
md['Measurement']= 'Measurement'
md['sample']='sample'
dpix = md['x_pixel_size'] * 1000. #in mm, eiger 4m is 0.075 mm
lambda_ =md['incident_wavelength'] # wavelegth of the X-rays in Angstroms
Ldet = md['detector_distance'] * 1000 # detector to sample distance (mm)
exposuretime= md['count_time']
acquisition_period = md['frame_time']
timeperframe = acquisition_period#for g2
#timeperframe = exposuretime#for visiblitly
#timeperframe = 2 ## manual overwrite!!!! we apparently writing the wrong metadata....
center= md['center']
setup_pargs=dict(uid=uid, dpix= dpix, Ldet=Ldet, lambda_= lambda_,
timeperframe=timeperframe, center=center, path= data_dir_)
md['avg_img'] = avg_img
#plot1D( y = imgsum[ np.array( [i for i in np.arange( len(imgsum)) if i not in bad_frame_list])],
# title ='Uid= %s--imgsum'%uid, xlabel='Frame', ylabel='Total_Intensity', legend='' )
min_inten = 10
#good_start = np.where( np.array(imgsum) > min_inten )[0][0]
good_start = good_start
if good_end is None:
good_end_ = len(imgs)
else:
good_end_= good_end
FD = Multifile(filename, good_start, good_end_ )
good_start = max(good_start, np.where( np.array(imgsum) > min_inten )[0][0] )
print ('With compression, the good_start frame number is: %s '%good_start)
print ('The good_end frame number is: %s '%good_end_)
hmask = create_hot_pixel_mask( avg_img, 1e8)
qp, iq, q = get_circular_average( avg_img, mask * hmask, pargs=setup_pargs, nx=None,
plot_ = False, show_pixel= True, xlim=[0.001,.05], ylim = [0.0001, 500])
norm = get_pixelist_interp_iq( qp, iq, ring_mask, center)
if not para_run:
g2, lag_steps_ =cal_g2c( FD, ring_mask, bad_frame_list,good_start, num_buf = 8,
imgsum= None, norm= norm )
else:
g2, lag_steps_ =cal_g2p( FD, ring_mask, bad_frame_list,good_start, num_buf = 8,
imgsum= None, norm= norm )
if len( lag_steps) < len(lag_steps_):
lag_steps = lag_steps_
FD=0
avg_img, imgsum, bad_frame_list = [0,0,0]
md['avg_img']=0
imgs=0
else:
sampling = 1000 #sampling should be one
#good_start = check_shutter_open( imgsra, min_inten=5, time_edge = [0,10], plot_ = False )
good_start = good_start
good_series = apply_mask( imgsa[good_start: ], mask )
imgsum, bad_frame_list = get_each_frame_intensity(good_series ,sampling = sampling,
bad_pixel_threshold=1.2e8, plot_ = False, uid=uid)
bad_image_process = False
if len(bad_frame_list):
bad_image_process = True
print( bad_image_process )
g2, lag_steps_ =cal_g2( good_series, ring_mask, bad_image_process,
bad_frame_list, good_start, num_buf = 8 )
if len( lag_steps) < len(lag_steps_):
lag_steps = lag_step_
taus_ = lag_steps_ * timeperframe
taus = lag_steps * timeperframe
res_pargs = dict(taus=taus_, q_ring_center=q_ring_center, path=data_dir_, uid=uid )
save_saxs_g2( g2, res_pargs )
#plot_saxs_g2( g2, taus, vlim=[0.95, 1.05], res_pargs=res_pargs)
if fit:
fit_result = fit_saxs_g2( g2, res_pargs, function = 'stretched', vlim=[0.95, 1.05],
fit_variables={'baseline':True, 'beta':True, 'alpha':False,'relaxation_rate':True},
guess_values={'baseline':1.0,'beta':0.05,'alpha':1.0,'relaxation_rate':0.01})
fit_q_rate( q_ring_center[:], fit_result['rate'][:], power_variable= False,
uid=uid, path= data_dir_ )
psave_obj( fit_result, data_dir_ + 'uid=%s-g2-fit-para'%uid )
psave_obj( md, data_dir_ + 'uid=%s-md'%uid ) #save the setup parameters
g2s[run_seq + 1][i] = g2
print ('*'*40)
print()
return g2s, taus, useful_uids
def plot_mul_g2( g2s, md ):
'''
Plot multi g2 functions generated by multi_uids_saxs_xpcs_analysis
Will create a large plot with q_number pannels
Each pannel (for each q) will show a number (run number of g2 functions
'''
q_ring_center = md['q_ring_center']
sids = md['sids']
useful_uids = md['useful_uids']
taus =md['taus']
run_num = md['run_num']
sub_num = md['sub_num']
uid_ = md['uid_']
fig = plt.figure(figsize=(12, 20))
plt.title('uid= %s:--->'%uid_ ,fontsize=20, y =1.06)
Nq = len(q_ring_center)
if Nq!=1:
plt.axis('off')
sx = int(round(np.sqrt( Nq )) )
if Nq%sx == 0:
sy = int(Nq/sx)
else:
sy=int(Nq/sx+1)
for sn in range( Nq ):
ax = fig.add_subplot(sx,sy,sn+1 )
ax.set_ylabel( r"$g_2$" + '(' + r'$\tau$' + ')' )
ax.set_xlabel(r"$\tau $ $(s)$", fontsize=16)
for run_seq in range(run_num):
i=0
for sub_seq in range( 0, sub_num ):
#print( run_seq, sub_seq )
uid = useful_uids[run_seq +1][ sub_seq +1 ]
sid = sids[i]
if i ==0:
title = r'$Q_r= $'+'%.5f '%( q_ring_center[sn]) + r'$\AA^{-1}$'
ax.set_title( title , y =1.1, fontsize=12)
y=g2s[run_seq+1][sub_seq+1][:, sn]
len_tau = len( taus )
len_g2 = len( y )
len_ = min( len_tau, len_g2)
#print ( len_tau, len(y))
#ax.semilogx(taus[1:len_], y[1:len_], marker = '%s'%next(markers_), color='%s'%next(colors_),
# markersize=6, label = '%s'%sid)
ax.semilogx(taus[1:len_], y[1:len_], marker = markers[i], color= colors[i],
markersize=6, label = '%s'%sid)
if sn ==0:
ax.legend(loc='best', fontsize = 6)
i = i + 1
fig.set_tight_layout(True)
<file_sep>'''
Yugang Created at Aug 08, 2016, CHX-NSLS-II
Create a PDF file from XPCS data analysis results, which are generated by CHX data analysis pipeline
How to use:
python Create_Report.py full_file_path uid output_dir (option)
An exmplae to use:
python Create_Report.py /XF11ID/analysis/2016_2/yuzhang/Results/August/af8f66/ af8f66
python Create_Report.py /XF11ID/analysis/2016_2/yuzhang/Results/August/af8f66/ af8f66 /XF11ID/analysis/2016_2/yuzhang/Results/August/af8f66/test/
'''
def check_dict_keys( dicts, key):
if key not in list(dicts.keys()):
dicts[key] = 'unknown'
import h5py
from reportlab.pdfgen import canvas
from reportlab.lib.units import inch, cm , mm
from reportlab.lib.colors import pink, green, brown, white, black, red, blue
from reportlab.lib.styles import getSampleStyleSheet
#from reportlab.platypus import Image, Paragraph, Table
from reportlab.lib.pagesizes import letter, A4
from chxanalys.chx_generic_functions import (pload_obj )
from PIL import Image
from time import time
from datetime import datetime
import sys,os
import pandas as pds
import numpy as np
def add_one_line_string( c, s, top, left=30, fontsize = 11 ):
if (fontsize*len(s )) >1000:
fontsize = 1000./(len(s))
c.setFont("Helvetica", fontsize )
c.drawString(left, top, s)
def add_image_string( c, imgf, data_dir, img_left, img_top, img_height,
str1_left, str1_top,str1,
str2_left, str2_top, return_ = False ):
image = data_dir + imgf
if os.path.exists(image):
im = Image.open( image )
ratio = float(im.size[1])/im.size[0]
height= img_height
width = height/ratio
#if width>400:
# width = 350
# height = width*ratio
c.drawImage( image, img_left, img_top, width= width,height=height,mask=None)
c.setFont("Helvetica", 16)
c.setFillColor( blue )
c.drawString(str1_left, str1_top,str1 )
c.setFont("Helvetica", 12)
c.setFillColor(red)
c.drawString(str2_left, str2_top, 'filename: %s'%imgf )
if return_:
return height/ratio
else:
c.setFillColor( blue )
c.drawString( str1_left, str1_top, str1)
c.setFillColor(red)
c.drawString( str1_left, str1_top -40, '-->Not Calculated!' )
class create_pdf_report( object ):
'''Aug 16, YG@CHX-NSLS-II
Create a pdf report by giving data_dir, uid, out_dir
data_dir: the input data directory, including all necessary images
the images names should be:
meta_file = 'uid=%s-md'%uid
avg_img_file = 'uid=%s--img-avg-.png'%uid
ROI_on_img_file = 'uid=%s--ROI-on-Image-.png'%uid
qiq_file = 'uid=%s--Circular-Average-.png'%uid
ROI_on_Iq_file = 'uid=%s--ROI-on-Iq-.png'%uid
Iq_t_file = 'uid=%s--Iq-t-.png'%uid
img_sum_t_file = 'uid=%s--img-sum-t.png'%uid
wat_file= 'uid=%s--Waterfall-.png'%uid
Mean_inten_t_file= 'uid=%s--Mean-intensity-of-each-ROI-.png'%uid
g2_file = 'uid=%s--g2-.png'%uid
g2_fit_file = 'uid=%s--g2--fit-.png'%uid
q_rate_file = 'uid=--%s--Q-Rate--fit-.png'%uid
two_time_file = 'uid=%s--Two-time-.png'%uid
two_g2_file = 'uid=%s--g2--two-g2-.png'%uid
uid: the unique id
out_dir: the output directory
report_type:
'saxs': report saxs results
'gisaxs': report gisaxs results
Output:
A PDF file with name as "XPCS Analysis Report for uid=%s"%uid in out_dir folder
'''
def __init__( self, data_dir, uid, out_dir=None, filename=None, load=True, user=None,
report_type='saxs',md=None ):
self.data_dir = data_dir
self.uid = uid
self.md = md
#print(md)
if user is None:
user = 'chx'
self.user = user
if out_dir is None:
out_dir = data_dir
if not os.path.exists(out_dir):
os.makedirs(out_dir)
self.out_dir=out_dir
self.styles = getSampleStyleSheet()
self.width, self.height = letter
self.report_type = report_type
dt =datetime.now()
CurTime = '%02d/%02d/%s/-%02d/%02d/' % ( dt.month, dt.day, dt.year,dt.hour,dt.minute)
self.CurTime = CurTime
if filename is None:
filename="XPCS_Analysis_Report_for_uid=%s.pdf"%uid
filename=out_dir + filename
c = canvas.Canvas( filename, pagesize=letter)
self.filename= filename
#c.setTitle("XPCS Analysis Report for uid=%s"%uid)
c.setTitle(filename)
self.c = c
if load:
self.load_metadata()
def load_metadata(self):
uid=self.uid
data_dir = self.data_dir
#load metadata
meta_file = 'uid=%s_md'%uid
self.metafile = data_dir + meta_file
if self.md is None:
md = pload_obj( data_dir + meta_file )
self.md = md
else:
md = self.md
#print('Get md from giving md')
#print(md)
self.sub_title_num = 0
uid_g2 = None
uid_c12 = None
if 'uid_g2' in list(md.keys()):
uid_g2 = md['uid_g2']
if 'uid_c12' in list(md.keys()):
uid_c12 = md['uid_c12']
'''global definition'''
try:
beg = md['beg']
end= md['end']
uid_ = uid + '_fra_%s_%s'%(beg, end)
except:
uid_ = uid
if beg is None:
uid_ = uid
self.avg_img_file = 'uid=%s_img_avg.png'%uid
self.ROI_on_img_file = 'uid=%s_ROI_on_Image.png'%uid
self.qiq_file = 'uid=%s_q_Iq.png'%uid
self.qiq_fit_file = 'uid=%s_form_factor_fit.png'%uid
#self.qr_1d_file = 'uid=%s_Qr_ROI.png'%uid
if self.report_type =='saxs' or self.report_type =='ang_saxs':
self.ROI_on_Iq_file = 'uid=%s_ROI_on_Iq.png'%uid
elif self.report_type =='gi_saxs':
self.ROI_on_Iq_file = 'uid=%s_Qr_ROI.png'%uid
self.Iq_t_file = 'uid=%s_q_Iqt.png'%uid
self.img_sum_t_file = 'uid=%s_img_sum_t.png'%uid
self.wat_file= 'uid=%s_waterfall.png'%uid
self.Mean_inten_t_file= 'uid=%s_t_ROIs.png'%uid
if uid_g2 is None:
uid_g2 = uid_
self.g2_file = 'uid=%s_g2.png'%uid_g2
self.g2_fit_file = 'uid=%s_g2_fit.png'%uid_g2
#print( self.g2_fit_file )
self.g2_new_page = False
self.g2_fit_new_page = False
jfn = 'uid=%s_g2__joint.png'%uid_g2
if os.path.exists( data_dir + jfn):
self.g2_file = jfn
self.g2_new_page = True
jfn = 'uid=%s_g2_fit__joint.png'%uid_g2
if os.path.exists(data_dir + jfn ):
self.g2_fit_file = jfn
self.g2_fit_new_page = True
self.q_rate_file = 'uid=%s_Q_Rate_fit.png'%uid_g2
#print( self.q_rate_file )
if uid_c12 is None:
uid_c12 = uid_
self.q_rate_two_time_fit_file = 'uid=%s_two_time_Q_Rate_fit.png'%uid_c12
#print( self.q_rate_two_time_fit_file )
self.two_time_file = 'uid=%s_Two_time.png'%uid_c12
self.two_g2_file = 'uid=%s_g2_two_g2.png'%uid_c12
jfn = 'uid=%s_g2_two_g2__joint.png'%uid_c12
self.two_g2_new_page = False
if os.path.exists( data_dir + jfn ):
self.two_g2_file = jfn
self.two_g2_new_page = True
self.four_time_file = 'uid=%s_g4.png'%uid_
self.xsvs_fit_file = 'uid=%s_xsvs_fit.png'%uid_
self.contrast_file = 'uid=%s_contrast.png'%uid_
self.dose_file = 'uid=%s_dose_analysis.png'%uid_
jfn = 'uid=%s_dose_analysis__joint.png'%uid_
self.dose_file_new_page = False
if os.path.exists( data_dir + jfn ):
self.dose_file = jfn
self.dose_file_new_page = True
#print( self.dose_file )
if False:
self.flow_g2v = 'uid=%s_1a_mqv_g2_v_fit.png'%uid_
self.flow_g2p = 'uid=%s_1a_mqp_g2_p_fit.png'%uid_
self.flow_g2v_rate_fit = 'uid=%s_v_fit_rate_Q_Rate_fit.png'%uid_
self.flow_g2p_rate_fit = 'uid=%s_p_fit_rate_Q_Rate_fit.png'%uid_
if True:
self.two_time = 'uid=%s_pv_two_time.png'%uid_
#self.two_time_v = 'uid=%s_pv_two_time.png'%uid_
#self.flow_g2bv = 'uid=%s_g2b_v_fit.png'%uid_
#self.flow_g2bp = 'uid=%s_g2b_p_fit.png'%uid_
self.flow_g2_g2b_p = 'uid=%s_g2_two_g2_p.png'%uid_
self.flow_g2_g2b_v = 'uid=%s_g2_two_g2_v.png'%uid_
self.flow_g2bv_rate_fit = 'uid=%s_vertb_Q_Rate_fit.png'%uid_
self.flow_g2bp_rate_fit = 'uid=%s_parab_Q_Rate_fit.png'%uid_
self.flow_g2v = 'uid=%s_g2_v_fit.png'%uid_
self.flow_g2p = 'uid=%s_g2_p_fit.png'%uid_
self.flow_g2v_rate_fit = 'uid=%s_vert_Q_Rate_fit.png'%uid_
self.flow_g2p_rate_fit = 'uid=%s_para_Q_Rate_fit.png'%uid_
#self.report_header(page=1, top=730, new_page=False)
#self.report_meta(new_page=False)
self.q2Iq_file = 'uid=%s_q2_iq.png'%uid
self.iq_invariant_file = 'uid=%s_iq_invariant.png'%uid
def report_invariant( self, top= 300, new_page=False):
'''create the invariant analysis report
two images:
ROI on average intensity image
ROI on circular average
'''
uid=self.uid
c= self.c
#add sub-title, static images
c.setFillColor(black)
c.setFont("Helvetica", 20)
ds = 230
self.sub_title_num +=1
c.drawString(10, top, "%s. I(q) Invariant Analysis"%self.sub_title_num ) #add title
#add q2Iq
c.setFont("Helvetica", 14)
imgf = self.q2Iq_file
#print( imgf )
label = 'q^2*I(q)'
add_image_string( c, imgf, self.data_dir, img_left= 60, img_top=top - ds*1.15, img_height=180,
str1_left=110, str1_top = top-35,str1=label,
str2_left = 60, str2_top = top -320 )
#add iq_invariant
imgf = self.iq_invariant_file
img_height= 180
img_left,img_top =320, top - ds*1.15
str1_left, str1_top,str1= 420, top- 35, 'I(q) Invariant'
str2_left, str2_top = 350, top- 320
#print ( imgf )
add_image_string( c, imgf, self.data_dir, img_left, img_top, img_height,
str1_left, str1_top,str1,
str2_left, str2_top )
if new_page:
c.showPage()
c.save()
def report_header(self, page=1, new_page=False):
'''create headers, including title/page number'''
c= self.c
CurTime = self.CurTime
uid=self.uid
user=self.user
c.setFillColor(black)
c.setFont("Helvetica", 14)
#add page number
c.drawString(250, 10, "Page--%s--"%( page ) )
#add time stamp
#c.drawString(350, 10, "Created at %s@CHX-by-%s"%( CurTime,user ) )
s_ = "Created at %s@CHX-By-%s"%( CurTime,user )
add_one_line_string( c, s_, 10, left=350,fontsize = 11 )
#add title
#c.setFont("Helvetica", 22)
title = "XPCS Analysis Report for uid=%s"%uid
c.setFont("Helvetica", 1000/( len(title) ) )
#c.drawString(180,760, "XPCS Report of uid=%s"%uid ) #add title
c.drawString(50,760, "XPCS Analysis Report for uid=%s"%uid ) #add title
#add a line under title
c.setStrokeColor( red )
c.setLineWidth(width=1.5)
c.line( 50, 750, 550, 750 )
if new_page:
c.showPage()
c.save()
def report_meta(self, top=740, new_page=False):
'''create the meta data report,
the meta data include:
uid
Sample:
Measurement
Wavelength
Detector-Sample Distance
Beam Center
Mask file
Data dir
Pipeline notebook
'''
c=self.c
#load metadata
md = self.md
try:
uid = md['uid']
except:
uid=self.uid
#add sub-title, metadata
c.setFont("Helvetica", 20)
ds = 15
self.sub_title_num += 1
c.drawString(10, top, "%s. Metadata"%self.sub_title_num ) #add title
top = top - 5
fontsize = 11
c.setFont("Helvetica", fontsize)
nec_keys = [ 'sample', 'start_time', 'stop_time','Measurement' ,'exposure time' ,'incident_wavelength', 'cam_acquire_t',
'frame_time','detector_distance', 'feedback_x', 'feedback_y', 'shutter mode',
'beam_center_x', 'beam_center_y', 'beam_refl_center_x', 'beam_refl_center_y','mask_file','bad_frame_list', 'transmission', 'roi_mask_file']
for key in nec_keys:
check_dict_keys(md, key)
try:#try exp time from detector
exposuretime= md['count_time'] #exposure time in sec
except:
exposuretime= md['cam_acquire_time'] #exposure time in sec
try:#try acq time from detector
acquisition_period = md['frame_time']
except:
try:
acquisition_period = md['acquire period']
except:
uid = md['uid']
acquisition_period = float( db[uid]['start']['acquire period'] )
s = []
s.append( 'UID: %s'%uid ) ###line 1, for uid
s.append('Sample: %s'%md['sample'] ) ####line 2 sample
s.append('Data Acquisition From: %s To: %s'%(md['start_time'], md['stop_time']))####line 3 Data Acquisition time
s.append( 'Measurement: %s'%md['Measurement'] ) ####line 4 'Measurement
s.append( 'Wavelength: %s A | Num of Image: %d | Exposure time: %s ms | Acquire period: %s ms'%( md['incident_wavelength'], int(md['number of images']),round(float(exposuretime)*1000,4), round(float( acquisition_period )*1000,4) ) ) ####line 5 'lamda...
s.append( 'Detector-Sample Distance: %s m| FeedBack Mode: x -> %s & y -> %s| Shutter Mode: %s'%(
md['detector_distance'], md['feedback_x'], md['feedback_y'], md['shutter mode'] ) ) ####line 6 'Detector-Sample Distance..
if self.report_type == 'saxs':
s7= 'Beam Center: [%s, %s] (pixel)'%(md['beam_center_x'], md['beam_center_y'])
elif self.report_type == 'gi_saxs':
s7= ('Incident Center: [%s, %s] (pixel)'%(md['beam_center_x'], md['beam_center_y']) +
' || ' +
'Reflect Center: [%s, %s] (pixel)'%(md['beam_refl_center_x'], md['beam_refl_center_y']) )
elif self.report_type == 'ang_saxs' or self.report_type == 'gi_waxs' :
s7= 'Beam Center: [%s, %s] (pixel)'%(md['beam_center_x'], md['beam_center_y'])
else:
s7 = ''
s7 += ' || ' + 'BadLen: %s'%len(md['bad_frame_list'])
s7 += ' || ' + 'Transmission: %s'%md['transmission']
s.append( s7 ) ####line 7 'Beam center...
m = 'Mask file: %s'%md['mask_file'] + ' || ' + 'ROI mask file: %s'%md['roi_mask_file']
#s.append( 'Mask file: %s'%md['mask_file'] ) ####line 8 mask filename
#s.append( ) ####line 8 mask filename
s.append(m)
s.append( 'Analysis Results Dir: %s'%self.data_dir ) ####line 9 results folder
s.append( 'Metadata Dir: %s.csv-&.pkl'%self.metafile ) ####line 10 metadata folder
try:
s.append( 'Pipeline notebook: %s'%md['NOTEBOOK_FULL_PATH'] ) ####line 11 notebook folder
except:
pass
#print( 'here' )
line =1
for s_ in s:
add_one_line_string( c, s_, top -ds*line , left=30,fontsize = fontsize )
line += 1
if new_page:
c.showPage()
c.save()
def report_static( self, top=560, new_page=False, iq_fit=False):
'''create the static analysis report
two images:
average intensity image
circular average
'''
#add sub-title, static images
c= self.c
c.setFont("Helvetica", 20)
uid=self.uid
ds = 220
self.sub_title_num +=1
c.drawString(10, top, "%s. Static Analysis"%self.sub_title_num ) #add title
#add average image
c.setFont("Helvetica", 14)
imgf = self.avg_img_file
if self.report_type == 'saxs':
ipos = 60
dshift=0
elif self.report_type == 'gi_saxs':
ipos = 200
dshift= 140
elif self.report_type == 'ang_saxs':
ipos = 200
dshift= 140
else:
ipos = 200
dshift= 140
add_image_string( c, imgf, self.data_dir, img_left= ipos, img_top=top-ds, img_height=180,
str1_left=90 + dshift, str1_top = top-35,str1='Average Intensity Image',
str2_left = 80 + dshift, str2_top = top -230 )
#add q_Iq
if self.report_type == 'saxs':
imgf = self.qiq_file
#print(imgf)
if iq_fit:
imgf = self.qiq_fit_file
label = 'Circular Average'
lab_pos = 390
fn_pos = 320
add_image_string( c, imgf, self.data_dir, img_left=320, img_top=top-ds, img_height=180,
str1_left=lab_pos, str1_top = top-35,str1=label,
str2_left = fn_pos, str2_top = top -230 )
else:
if False:
imgf = self.ROI_on_Iq_file #self.qr_1d_file
label = 'Qr-1D'
lab_pos = 420
fn_pos = 350
add_image_string( c, imgf, self.data_dir, img_left=320, img_top=top-ds, img_height=180,
str1_left=lab_pos, str1_top = top-35,str1=label,
str2_left = fn_pos, str2_top = top -230 )
if new_page:
c.showPage()
c.save()
def report_ROI( self, top= 300, new_page=False):
'''create the static analysis report
two images:
ROI on average intensity image
ROI on circular average
'''
uid=self.uid
c= self.c
#add sub-title, static images
c.setFillColor(black)
c.setFont("Helvetica", 20)
ds = 230
self.sub_title_num +=1
c.drawString(10, top, "%s. Define of ROI"%self.sub_title_num ) #add title
#add ROI on image
c.setFont("Helvetica", 14)
imgf = self.ROI_on_img_file
label = 'ROI on Image'
add_image_string( c, imgf, self.data_dir, img_left= 60, img_top=top - ds*1.15, img_height=240,
str1_left=110, str1_top = top-35,str1=label,
str2_left = 60, str2_top = top -260 )
#add q_Iq
if self.report_type == 'saxs' or self.report_type == 'gi_saxs' or self.report_type == 'ang_saxs':
imgf = self.ROI_on_Iq_file
img_height=180
img_left,img_top =320, top - ds
str1_left, str1_top,str1= 420, top- 35, 'ROI on Iq'
str2_left, str2_top = 350, top- 260
#print ( imgf )
add_image_string( c, imgf, self.data_dir, img_left, img_top, img_height,
str1_left, str1_top,str1,
str2_left, str2_top )
if new_page:
c.showPage()
c.save()
def report_time_analysis( self, top= 720,new_page=False):
'''create the time dependent analysis report
four images:
each image total intensity as a function of time
iq~t
waterfall
mean intensity of each ROI as a function of time
'''
c= self.c
uid=self.uid
#add sub-title, Time-dependent plot
c.setFont("Helvetica", 20)
top1=top
ds = 20
self.sub_title_num +=1
c.drawString(10, top, "%s. Time Dependent Plot"%self.sub_title_num ) #add title
c.setFont("Helvetica", 14)
top = top1 - 160
#add img_sum_t
if self.report_type == 'saxs':
ipos = 80
elif self.report_type == 'gi_saxs':
ipos = 200
elif self.report_type == 'ang_saxs':
ipos = 200
else:
ipos = 200
imgf = self.img_sum_t_file
img_height=140
img_left,img_top = ipos, top
str1_left, str1_top,str1= ipos + 60, top1 - 20 , 'img sum ~ t'
str2_left, str2_top = ipos, top- 5
add_image_string( c, imgf, self.data_dir, img_left, img_top, img_height,
str1_left, str1_top,str1,
str2_left, str2_top )
#plot iq~t
if self.report_type == 'saxs':
imgf = self.Iq_t_file
image = self.data_dir + imgf
img_height=140
img_left,img_top = 350, top
str1_left, str1_top,str1= 420, top1-20 , 'iq ~ t'
str2_left, str2_top = 360, top- 5
add_image_string( c, imgf, self.data_dir, img_left, img_top, img_height,
str1_left, str1_top,str1,
str2_left, str2_top )
elif self.report_type == 'gi_saxs':
pass
top = top1 - 340
#add waterfall plot
imgf = self.wat_file
img_height=160
img_left,img_top = 80, top
str1_left, str1_top,str1= 140, top + img_height, 'waterfall plot'
str2_left, str2_top = 80, top- 5
if self.report_type != 'ang_saxs':
add_image_string( c, imgf, self.data_dir, img_left, img_top, img_height,
str1_left, str1_top,str1,
str2_left, str2_top )
else:
pass
#add mean-intensity of each roi
imgf = self.Mean_inten_t_file
img_height=160
img_left,img_top = 360, top
str1_left, str1_top,str1= 330, top + img_height, 'Mean-intensity-of-each-ROI'
str2_left, str2_top = 310, top- 5
if self.report_type != 'ang_saxs':
add_image_string( c, imgf, self.data_dir, img_left, img_top, img_height,
str1_left, str1_top,str1,
str2_left, str2_top )
else:
pass
if new_page:
c.showPage()
c.save()
def report_one_time( self, top= 350, g2_fit_file=None, q_rate_file=None, new_page=False):
'''create the one time correlation function report
Two images:
One Time Correlation Function with fit
q-rate fit
'''
c= self.c
uid=self.uid
#add sub-title, One Time Correlation Function
c.setFillColor(black)
c.setFont("Helvetica", 20)
ds = 20
self.sub_title_num +=1
c.drawString(10, top, "%s. One Time Correlation Function"%self.sub_title_num ) #add title
c.setFont("Helvetica", 14)
#add g2 plot
if g2_fit_file is None:
imgf = self.g2_fit_file
else:
imgf = g2_fit_file
if self.report_type != 'ang_saxs':
img_height= 300
top = top - 320
str2_left, str2_top = 80, top- 0
else:
img_height= 550
top = top - 600
str2_left, str2_top = 80, top - 400
#add one_time caculation
img_left,img_top = 1, top
if self.g2_fit_new_page or self.g2_new_page:
img_height= 550
top = top - 250
str2_left, str2_top = 80, top - 0
img_left,img_top = 60, top
str1_left, str1_top,str1= 150, top + img_height, 'g2 fit plot'
img_width = add_image_string( c, imgf, self.data_dir, img_left, img_top, img_height,
str1_left, str1_top,str1,
str2_left, str2_top, return_=True )
#print( imgf,self.data_dir )
#add g2 plot fit
top = top + 70 #
if q_rate_file is None:
imgf = self.q_rate_file
else:
imgf = q_rate_file
if self.report_type != 'ang_saxs':
#print(img_width)
if img_width > 400:
img_height = 90
else:
img_height= 180
img_left,img_top = img_width-10, top #350, top
str2_left, str2_top = img_width + 50, top - 5 #380, top - 5
str1_left, str1_top,str1= 450, top + 230, 'q-rate fit plot'
else:
img_height= 300
img_left,img_top = 350, top - 150
str2_left, str2_top = 380, top - 5
str1_left, str1_top,str1= 450, top + 180, 'q-rate fit plot'
if self.g2_fit_new_page or self.g2_new_page:
top = top - 200
img_height= 180
img_left,img_top = 350, top
str2_left, str2_top = 380, top - 5
str1_left, str1_top,str1= 450, top + 230, 'q-rate fit plot'
add_image_string( c, imgf, self.data_dir, img_left, img_top, img_height,
str1_left, str1_top,str1,
str2_left, str2_top )
if new_page:
c.showPage()
c.save()
def report_mulit_one_time( self, top= 720,new_page=False):
'''create the mulit one time correlation function report
Two images:
One Time Correlation Function with fit
q-rate fit
'''
c= self.c
uid=self.uid
#add sub-title, One Time Correlation Function
c.setFillColor(black)
c.setFont("Helvetica", 20)
ds = 20
self.sub_title_num +=1
c.drawString(10, top, "%s. One Time Correlation Function"%self.sub_title_num ) #add title
c.setFont("Helvetica", 14)
#add g2 plot
top = top - 320
imgf = self.g2_fit_file
image = self.data_dir + imgf
if not os.path.exists(image):
image = self.data_dir + self.g2_file
im = Image.open( image )
ratio = float(im.size[1])/im.size[0]
height= 300
c.drawImage( image, 1, top, width= height/ratio,height=height, mask= 'auto')
#c.drawImage( image, 1, top, width= height/ratio,height=height, mask= None )
c.setFont("Helvetica", 16)
c.setFillColor( blue)
c.drawString( 150, top + height , 'g2 fit plot' )
c.setFont("Helvetica", 12)
c.setFillColor(red)
c.drawString( 80, top- 0, 'filename: %s'%imgf )
#add g2 plot fit
top = top + 70 #
imgf = self.q_rate_file
image = self.data_dir + imgf
if os.path.exists(image):
im = Image.open( image )
ratio = float(im.size[1])/im.size[0]
height= 180
c.drawImage( image, 350, top, width= height/ratio,height=height,mask= 'auto')
c.setFont("Helvetica", 16)
c.setFillColor( blue)
c.drawString( 450, top + 230, 'q-rate fit plot' )
c.setFont("Helvetica", 12)
c.setFillColor(red)
c.drawString( 380, top- 5, 'filename: %s'%imgf )
if new_page:
c.showPage()
c.save()
def report_two_time( self, top= 720, new_page=False):
'''create the one time correlation function report
Two images:
Two Time Correlation Function
two one-time correlatoin function from multi-one-time and from diagonal two-time
'''
c= self.c
uid=self.uid
#add sub-title, Time-dependent plot
c.setFont("Helvetica", 20)
ds = 20
self.sub_title_num +=1
c.drawString(10, top, "%s. Two Time Correlation Function"%self.sub_title_num ) #add title
c.setFont("Helvetica", 14)
top1=top
top = top1 - 330
#add q_Iq_t
imgf = self.two_time_file
img_height= 300
img_left,img_top = 80, top
str1_left, str1_top,str1= 180, top + 300, 'two time correlation function'
str2_left, str2_top = 180, top - 10
img_width = add_image_string( c, imgf, self.data_dir, img_left, img_top, img_height,
str1_left, str1_top,str1,
str2_left, str2_top, return_=True )
top = top - 340
#add q_Iq_t
imgf = self.two_g2_file
if True:#not self.two_g2_new_page:
img_height= 300
img_left,img_top = 100 -70, top
str1_left, str1_top,str1= 210-70, top + 310, 'compared g2'
str2_left, str2_top = 180-70, top - 10
if self.two_g2_new_page:
img_left,img_top = 100, top
img_width = add_image_string( c, imgf, self.data_dir, img_left, img_top, img_height,
str1_left, str1_top,str1,
str2_left, str2_top,return_=True )
#print(imgf)
top = top + 50
imgf = self.q_rate_two_time_fit_file
if img_width < 400:
img_height= 140
img_left,img_top = 350, top + 30
str2_left, str2_top = 380 - 80, top - 5
str1_left, str1_top,str1= 450 -80 , top + 230, 'q-rate fit from two-time'
else:
img_height = 90
img_left,img_top = img_width-10, top #350, top
str2_left, str2_top = img_width + 50, top - 5 #380, top - 5
str1_left, str1_top,str1= 450, top + 230, 'q-rate fit plot'
add_image_string( c, imgf, self.data_dir, img_left, img_top, img_height,
str1_left, str1_top,str1,
str2_left, str2_top )
if new_page:
c.showPage()
c.save()
def report_four_time( self, top= 720, new_page=False):
'''create the one time correlation function report
Two images:
Two Time Correlation Function
two one-time correlatoin function from multi-one-time and from diagonal two-time
'''
c= self.c
uid=self.uid
#add sub-title, Time-dependent plot
c.setFont("Helvetica", 20)
ds = 20
self.sub_title_num +=1
c.drawString(10, top, "%s. Four Time Correlation Function"%self.sub_title_num ) #add title
c.setFont("Helvetica", 14)
top1=top
top = top1 - 330
#add q_Iq_t
imgf = self.four_time_file
img_height= 300
img_left,img_top = 80, top
str1_left, str1_top,str1= 180, top + 300, 'four time correlation function'
str2_left, str2_top = 180, top - 10
add_image_string( c, imgf, self.data_dir, img_left, img_top, img_height,
str1_left, str1_top,str1,
str2_left, str2_top )
if new_page:
c.showPage()
c.save()
def report_dose( self, top= 720, new_page=False):
c= self.c
uid=self.uid
#add sub-title, Time-dependent plot
c.setFont("Helvetica", 20)
ds = 20
self.sub_title_num +=1
c.drawString(10, top, "%s. Dose Analysis"%self.sub_title_num ) #add title
c.setFont("Helvetica", 14)
top1=top
top = top1 - 530
#add q_Iq_t
imgf = self.dose_file
img_height= 500
img_left,img_top = 80, top
str1_left, str1_top,str1= 180, top + 500, 'dose analysis'
str2_left, str2_top = 180, top - 10
#print( self.data_dir + self.dose_file)
if os.path.exists( self.data_dir + imgf):
#print( self.dose_file)
im = Image.open( self.data_dir + imgf )
ratio = float(im.size[1])/im.size[0]
width = img_height/ratio
#print(width)
if width >450:
img_height = 450*ratio
if self.dose_file_new_page:
#img_left,img_top = 180, top
img_left,img_top = 100, top
add_image_string( c, imgf, self.data_dir, img_left, img_top, img_height,
str1_left, str1_top,str1,
str2_left, str2_top )
if new_page:
c.showPage()
c.save()
def report_flow_pv_g2( self, top= 720, new_page=False):
'''create the one time correlation function report
Two images:
Two Time Correlation Function
two one-time correlatoin function from multi-one-time and from diagonal two-time
'''
c= self.c
uid=self.uid
#add sub-title, Time-dependent plot
c.setFont("Helvetica", 20)
ds = 20
self.sub_title_num +=1
c.drawString(10, top, "%s. Flow One Time Analysis"%self.sub_title_num ) #add title
c.setFont("Helvetica", 14)
top1=top
top = top1 - 330
#add xsvs fit
imgf = self.flow_g2v
image = self.data_dir + imgf
img_height= 300
img_left,img_top = 80, top
str1_left, str1_top,str1= 210, top + 300, 'XPCS Vertical Flow'
str2_left, str2_top = 180, top - 10
add_image_string( c, imgf, self.data_dir, img_left, img_top, img_height,
str1_left, str1_top,str1,
str2_left, str2_top )
imgf = self.flow_g2v_rate_fit
img_height= 200
img_left,img_top = 350, top +50
str1_left, str1_top,str1= 210, top + 300, ''
str2_left, str2_top = 350, top - 10 + 50
add_image_string( c, imgf, self.data_dir, img_left, img_top, img_height,
str1_left, str1_top,str1,
str2_left, str2_top )
top = top - 340
#add contrast fit
imgf = self.flow_g2p
img_height= 300
img_left,img_top = 80, top
str1_left, str1_top,str1= 210, top + 300, 'XPCS Parallel Flow'
str2_left, str2_top = 180, top - 10
add_image_string( c, imgf, self.data_dir, img_left, img_top, img_height,
str1_left, str1_top,str1,
str2_left, str2_top )
imgf = self.flow_g2p_rate_fit
img_height= 200
img_left,img_top = 350, top +50
str1_left, str1_top,str1= 210, top + 300, ''
str2_left, str2_top = 350, top - 10 + 50
add_image_string( c, imgf, self.data_dir, img_left, img_top, img_height,
str1_left, str1_top,str1,
str2_left, str2_top )
if new_page:
c.showPage()
c.save()
def report_flow_pv_two_time( self, top= 720, new_page=False):
'''create the two time correlation function report
Two images:
Two Time Correlation Function
two one-time correlatoin function from multi-one-time and from diagonal two-time
'''
c= self.c
uid=self.uid
#add sub-title, Time-dependent plot
c.setFont("Helvetica", 20)
ds = 20
self.sub_title_num +=1
c.drawString(10, top, "%s. Flow One &Two Time Comparison"%self.sub_title_num ) #add title
c.setFont("Helvetica", 14)
top1=top
top = top1 - 330
#add xsvs fit
if False:
imgf = self.two_time
image = self.data_dir + imgf
img_height= 300
img_left,img_top = 80, top
str1_left, str1_top,str1= 210, top + 300, 'Two_time'
str2_left, str2_top = 180, top - 10
add_image_string( c, imgf, self.data_dir, img_left, img_top, img_height,
str1_left, str1_top,str1,
str2_left, str2_top )
imgf = self.flow_g2_g2b_p
img_height= 300
img_left,img_top = 80, top
str1_left, str1_top,str1= 210, top + 300, 'XPCS Vertical Flow by two-time'
str2_left, str2_top = 180, top - 10
add_image_string( c, imgf, self.data_dir, img_left, img_top, img_height,
str1_left, str1_top,str1,
str2_left, str2_top )
imgf = self.flow_g2bp_rate_fit
img_height= 200
img_left,img_top = 350, top +50
str1_left, str1_top,str1= 210, top + 300, ''
str2_left, str2_top = 350, top - 10 + 50
add_image_string( c, imgf, self.data_dir, img_left, img_top, img_height,
str1_left, str1_top,str1,
str2_left, str2_top )
top = top - 340
#add contrast fit
imgf = self.flow_g2_g2b_v
img_height= 300
img_left,img_top = 80, top
str1_left, str1_top,str1= 210, top + 300, 'XPCS Parallel Flow by two-time'
str2_left, str2_top = 180, top - 10
add_image_string( c, imgf, self.data_dir, img_left, img_top, img_height,
str1_left, str1_top,str1,
str2_left, str2_top )
imgf = self.flow_g2bv_rate_fit
img_height= 200
img_left,img_top = 350, top +50
str1_left, str1_top,str1= 210, top + 300, ''
str2_left, str2_top = 350, top - 10 + 50
add_image_string( c, imgf, self.data_dir, img_left, img_top, img_height,
str1_left, str1_top,str1,
str2_left, str2_top )
if new_page:
c.showPage()
c.save()
def report_xsvs( self, top= 720, new_page=False):
'''create the one time correlation function report
Two images:
Two Time Correlation Function
two one-time correlatoin function from multi-one-time and from diagonal two-time
'''
c= self.c
uid=self.uid
#add sub-title, Time-dependent plot
c.setFont("Helvetica", 20)
ds = 20
self.sub_title_num +=1
c.drawString(10, top, "%s. Visibility Analysis"%self.sub_title_num ) #add title
c.setFont("Helvetica", 14)
top = top - 330
#add xsvs fit
imgf = self.xsvs_fit_file
add_image_string( c, imgf, self.data_dir, img_left=100, img_top=top, img_height= 300,
str1_left=210, str1_top = top +300,str1='XSVS_Fit_by_Negtive_Binomal Function',
str2_left = 180, str2_top = top -10 )
#add contrast fit
top = top -340
imgf = self.contrast_file
add_image_string( c, imgf, self.data_dir, img_left=100, img_top=top, img_height= 300,
str1_left=210, str1_top = top + 310,str1='contrast get from xsvs and xpcs',
str2_left = 180, str2_top = top -10 )
if False:
top1=top
top = top1 - 330
#add xsvs fit
imgf = self.xsvs_fit_file
image = self.data_dir + imgf
im = Image.open( image )
ratio = float(im.size[1])/im.size[0]
height= 300
c.drawImage( image, 100, top, width= height/ratio,height=height,mask=None)
c.setFont("Helvetica", 16)
c.setFillColor( blue)
c.drawString( 210, top + 300 , 'XSVS_Fit_by_Negtive_Binomal Function' )
c.setFont("Helvetica", 12)
c.setFillColor(red)
c.drawString( 180, top- 10, 'filename: %s'%imgf )
top = top - 340
#add contrast fit
imgf = self.contrast_file
image = self.data_dir + imgf
im = Image.open( image )
ratio = float(im.size[1])/im.size[0]
height= 300
c.drawImage( image, 100, top, width= height/ratio,height=height,mask=None)
c.setFont("Helvetica", 16)
c.setFillColor( blue)
c.drawString( 210, top + 310, 'contrast get from xsvs and xpcs' )
c.setFont("Helvetica", 12)
c.setFillColor(red)
c.drawString( 180, top- 10, 'filename: %s'%imgf )
if new_page:
c.showPage()
c.save()
def new_page(self):
c=self.c
c.showPage()
def save_page(self):
c=self.c
c.save()
def done(self):
out_dir = self.out_dir
uid=self.uid
print()
print('*'*40)
print ('The pdf report is created with filename as: %s'%(self.filename ))
print('*'*40)
def create_multi_pdf_reports_for_uids( uids, g2, data_dir, report_type='saxs', append_name='' ):
''' Aug 16, YG@CHX-NSLS-II
Create multi pdf reports for each uid in uids
uids: a list of uids to be reported
g2: a dictionary, {run_num: sub_num: g2_of_each_uid}
data_dir:
Save pdf report in data dir
'''
for key in list( g2.keys()):
i=1
for sub_key in list( g2[key].keys() ):
uid_i = uids[key][sub_key]
data_dir_ = os.path.join( data_dir, '%s/'%uid_i )
if append_name!='':
uid_name = uid_i + append_name
else:
uid_name = uid_i
c= create_pdf_report( data_dir_, uid_i,data_dir,
report_type=report_type, filename="XPCS_Analysis_Report_for_uid=%s.pdf"%uid_name )
#Page one: Meta-data/Iq-Q/ROI
c.report_header(page=1)
c.report_meta( top=730)
#c.report_one_time( top= 500 )
#c.new_page()
if report_type =='flow':
c.report_flow_pv_g2( top= 720)
c.save_page()
c.done()
def create_one_pdf_reports_for_uids( uids, g2, data_dir, filename='all_in_one', report_type='saxs' ):
''' Aug 16, YG@CHX-NSLS-II
Create one pdf reports for each uid in uids
uids: a list of uids to be reported
g2: a dictionary, {run_num: sub_num: g2_of_each_uid}
data_dir:
Save pdf report in data dir
'''
c= create_pdf_report( data_dir, uid=filename, out_dir=data_dir, load=False, report_type= report_type)
page=1
for key in list( g2.keys()):
i=1
for sub_key in list( g2[key].keys() ):
uid_i = uids[key][sub_key]
data_dir_ = os.path.join( data_dir, '%s/'%uid_i)
c.uid = uid_i
c.data_dir = data_dir_
c.load_metadata()
#Page one: Meta-data/Iq-Q/ROI
c.report_header(page=page)
c.report_meta( top=730)
c.report_one_time( top= 500 )
c.new_page()
page += 1
c.uid = filename
c.save_page()
c.done()
def save_res_h5( full_uid, data_dir, save_two_time=False ):
'''
YG. Nov 10, 2016
save the results to a h5 file
will save meta data/avg_img/mask/roi (ring_mask or box_mask)/
will aslo save multi-tau calculated one-time correlation function g2/taus
will also save two-time derived one-time correlation function /g2b/taus2
if save_two_time if True, will save two-time correaltion function
'''
with h5py.File(data_dir + '%s.h5'%full_uid, 'w') as hf:
#write meta data
meta_data = hf.create_dataset("meta_data", (1,), dtype='i')
for key in md.keys():
try:
meta_data.attrs[key] = md[key]
except:
pass
shapes = md['avg_img'].shape
avg_h5 = hf.create_dataset("avg_img", data = md['avg_img'] )
mask_h5 = hf.create_dataset("mask", data = md['mask'] )
roi_h5 = hf.create_dataset("roi", data = md['ring_mask'] )
g2_h5 = hf.create_dataset("g2", data = g2 )
taus_h5 = hf.create_dataset("taus", data = taus )
if save_two_time:
g12b_h5 = hf.create_dataset("g12b", data = g12b )
g2b_h5 = hf.create_dataset("g2b", data = g2b )
taus2_h5 = hf.create_dataset("taus2", data = taus2 )
def printname(name):
print (name)
#f.visit(printname)
def load_res_h5( full_uid, data_dir ):
'''YG. Nov 10, 2016
load results from a h5 file
will load meta data/avg_img/mask/roi (ring_mask or box_mask)/
will aslo load multi-tau calculated one-time correlation function g2/taus
will also load two-time derived one-time correlation function /g2b/taus2
if save_two_time if True, will load two-time correaltion function
'''
with h5py.File(data_dir + '%s.h5'%full_uid, 'r') as hf:
meta_data_h5 = hf.get( "meta_data" )
meta_data = {}
for att in meta_data_h5.attrs:
meta_data[att] = meta_data_h5.attrs[att]
avg_h5 = np.array( hf.get("avg_img" ) )
mask_h5 = np.array(hf.get("mask" ))
roi_h5 =np.array( hf.get("roi" ))
g2_h5 = np.array( hf.get("g2" ))
taus_h5 = np.array( hf.get("taus" ))
g2b_h5 = np.array( hf.get("g2b"))
taus2_h5 = np.array( hf.get("taus2"))
if 'g12b' in hf:
g12b_h5 = np.array( hf.get("g12b"))
if 'g12b' in hf:
return meta_data, avg_h5, mask_h5,roi_h5, g2_h5, taus_h5, g2b_h5, taus2_h5, g12b
else:
return meta_data, avg_h5, mask_h5,roi_h5, g2_h5, taus_h5, g2b_h5, taus2_h5
def make_pdf_report( data_dir, uid, pdf_out_dir, pdf_filename, username,
run_fit_form, run_one_time, run_two_time, run_four_time, run_xsvs, run_dose=None, report_type='saxs', md=None,report_invariant=False, return_class=False
):
if uid.startswith("uid=") or uid.startswith("Uid="):
uid = uid[4:]
c= create_pdf_report( data_dir, uid, pdf_out_dir, filename= pdf_filename, user= username, report_type=report_type, md = md )
#print( c.md)
#Page one: Meta-data/Iq-Q/ROI
c.report_header(page=1)
c.report_meta( top=730)
c.report_static( top=540, iq_fit = run_fit_form )
c.report_ROI( top= 290)
#Page Two: img~t/iq~t/waterfall/mean~t/g2/rate~q
c.new_page()
c.report_header(page=2)
page = 2
if c.report_type != 'ang_saxs':
c.report_time_analysis( top= 720)
if run_one_time:
if c.report_type != 'ang_saxs':
top = 350
else:
top = 500
if c.g2_fit_new_page:
c.new_page()
page +=1
top = 720
c.report_one_time( top= top )
#self.two_g2_new_page = True
#self.g2_fit_new_page = True
#Page Three: two-time/two g2
if run_two_time:
c.new_page()
page +=1
c.report_header(page= page)
c.report_two_time( top= 720 )
if run_four_time:
c.new_page()
page +=1
c.report_header(page= page)
c.report_four_time( top= 720 )
if run_xsvs:
c.new_page()
page +=1
c.report_header(page= page)
c.report_xsvs( top= 720 )
if run_dose:
c.new_page()
page +=1
c.report_header(page= page)
c.report_dose( top = 702)
if report_invariant:
c.new_page()
page +=1
c.report_header(page= page)
c.report_invariant( top = 702)
else:
c.report_flow_pv_g2( top= 720, new_page= True)
c.report_flow_pv_two_time( top= 720, new_page= True )
c.save_page()
c.done()
if return_class:
return c
######################################
###Deal with saving dict to hdf5 file
def save_dict_to_hdf5(dic, filename):
"""
....
"""
with h5py.File(filename, 'w') as h5file:
recursively_save_dict_contents_to_group(h5file, '/', dic)
def load_dict_from_hdf5(filename):
"""
....
"""
with h5py.File(filename, 'r') as h5file:
return recursively_load_dict_contents_from_group(h5file, '/')
def recursively_save_dict_contents_to_group( h5file, path, dic):
"""..."""
# argument type checking
if not isinstance(dic, dict):
raise ValueError("must provide a dictionary")
if not isinstance(path, str):
raise ValueError("path must be a string")
if not isinstance(h5file, h5py._hl.files.File):
raise ValueError("must be an open h5py file")
# save items to the hdf5 file
for key, item in dic.items():
#print(key,item)
key = str(key)
if isinstance(item, list):
item = np.array(item)
#print(item)
if not isinstance(key, str):
raise ValueError("dict keys must be strings to save to hdf5")
# save strings, numpy.int64, and numpy.float64 types
if isinstance(item, (np.int64, np.float64, str, np.float, float, np.float32,int)):
#print( 'here' )
h5file[path + key] = item
if not h5file[path + key].value == item:
raise ValueError('The data representation in the HDF5 file does not match the original dict.')
# save numpy arrays
elif isinstance(item, np.ndarray):
try:
h5file[path + key] = item
except:
item = np.array(item).astype('|S9')
h5file[path + key] = item
if not np.array_equal(h5file[path + key].value, item):
raise ValueError('The data representation in the HDF5 file does not match the original dict.')
# save dictionaries
elif isinstance(item, dict):
recursively_save_dict_contents_to_group(h5file, path + key + '/', item)
# other types cannot be saved and will result in an error
else:
#print(item)
raise ValueError('Cannot save %s type.' % type(item))
def recursively_load_dict_contents_from_group( h5file, path):
"""..."""
ans = {}
for key, item in h5file[path].items():
if isinstance(item, h5py._hl.dataset.Dataset):
ans[key] = item.value
elif isinstance(item, h5py._hl.group.Group):
ans[key] = recursively_load_dict_contents_from_group(h5file, path + key + '/')
return ans
def export_xpcs_results_to_h5( filename, export_dir, export_dict ):
'''
YG. May 10, 2017
save the results to a h5 file
filename: the h5 file name
export_dir: the exported file folder
export_dict: dict, with keys as md, g2, g4 et.al.
'''
fout = export_dir + filename
dicts = ['md', 'qval_dict', 'qval_dict_v', 'qval_dict_p']
dict_nest=['taus_uids', 'g2_uids' ]
with h5py.File(fout, 'w') as hf:
for key in list(export_dict.keys()):
#print( key )
if key in dicts: #=='md' or key == 'qval_dict':
md= export_dict[key]
meta_data = hf.create_dataset( key, (1,), dtype='i')
for key_ in md.keys():
try:
meta_data.attrs[str(key_)] = md[key_]
except:
pass
elif key in dict_nest:
#print(key)
try:
recursively_save_dict_contents_to_group(hf, '/%s/'%key, export_dict[key] )
except:
print("Can't export the key: %s in this dataset."%key)
elif key in ['g2_fit_paras','g2b_fit_paras', 'spec_km_pds', 'spec_pds', '<KEY>']:
export_dict[key].to_hdf( fout, key=key, mode='a', )
else:
data = hf.create_dataset(key, data = export_dict[key] )
#add this fill line at Octo 27, 2017
data.set_fill_value = np.nan
print( 'The xpcs analysis results are exported to %s with filename as %s'%(export_dir , filename))
def extract_xpcs_results_from_h5_debug( filename, import_dir, onekey=None, exclude_keys=None ):
'''
YG. Dec 22, 2016
extract data from a h5 file
filename: the h5 file name
import_dir: the imported file folder
onekey: string, if not None, only extract that key
return:
extact_dict: dict, with keys as md, g2, g4 et.al.
'''
import pandas as pds
import numpy as np
extract_dict = {}
fp = import_dir + filename
pds_type_keys = []
dicts = ['md', 'qval_dict', 'qval_dict_v', 'qval_dict_p', 'taus_uids', 'g2_uids']
if exclude_keys is None:
exclude_keys =[]
if onekey is None:
for k in dicts:
extract_dict[k] = {}
with h5py.File( fp, 'r') as hf:
#print (list( hf.keys()) )
for key in list( hf.keys()):
if key not in exclude_keys:
if key in dicts:
extract_dict[key] = recursively_load_dict_contents_from_group(hf, '/' + key + '/')
elif key in ['g2_fit_paras','g2b_fit_paras', 'spec_km_pds', 'spec_pds', 'qr_1d_pds']:
pds_type_keys.append( key )
else:
extract_dict[key] = np.array( hf.get( key ))
for key in pds_type_keys:
if key not in exclude_keys:
extract_dict[key] = pds.read_hdf(fp, key= key )
else:
if onekey == 'md':
with h5py.File( fp, 'r') as hf:
md = hf.get('md')
for key in list(md.attrs):
extract_dict['md'][key] = md.attrs[key]
elif onekey in ['g2_fit_paras','g2b_fit_paras', 'spec_km_pds', 'spec_pds', 'qr_1d_pds']:
extract_dict[onekey] = pds.read_hdf(fp, key= onekey )
else:
try:
with h5py.File( fp, 'r') as hf:
extract_dict[onekey] = np.array( hf.get( onekey ))
except:
print("The %s dosen't have this %s value"%(fp, onekey) )
return extract_dict
def export_xpcs_results_to_h5_old( filename, export_dir, export_dict ):
'''
YG. Dec 22, 2016
save the results to a h5 file
filename: the h5 file name
export_dir: the exported file folder
export_dict: dict, with keys as md, g2, g4 et.al.
'''
import h5py
fout = export_dir + filename
dicts = ['md', 'qval_dict', 'qval_dict_v', 'qval_dict_p'] #{k1: { }}
dict_nest= ['taus_uids', 'g2_uids'] #{k1: {k2:}}
with h5py.File(fout, 'w') as hf:
for key in list(export_dict.keys()):
#print( key )
if key in dicts: #=='md' or key == 'qval_dict':
md= export_dict[key]
meta_data = hf.create_dataset( key, (1,), dtype='i')
for key_ in md.keys():
try:
meta_data.attrs[str(key_)] = md[key_]
except:
pass
elif key in dict_nest:
k1 = export_dict[key]
v1 = hf.create_dataset( key, (1,), dtype='i')
for k2 in k1.keys():
v2 = hf.create_dataset( k1, (1,), dtype='i')
elif key in ['g2_fit_paras','g2b_fit_paras', 'spec_km_pds', 'spec_pds', 'qr_1d_pds']:
export_dict[key].to_hdf( fout, key=key, mode='a', )
else:
data = hf.create_dataset(key, data = export_dict[key] )
print( 'The xpcs analysis results are exported to %s with filename as %s'%(export_dir , filename))
def extract_xpcs_results_from_h5( filename, import_dir, onekey=None, exclude_keys=None ):
'''
YG. Dec 22, 2016
extract data from a h5 file
filename: the h5 file name
import_dir: the imported file folder
onekey: string, if not None, only extract that key
return:
extact_dict: dict, with keys as md, g2, g4 et.al.
'''
import pandas as pds
import numpy as np
extract_dict = {}
fp = import_dir + filename
pds_type_keys = []
dicts = ['md', 'qval_dict', 'qval_dict_v', 'qval_dict_p', 'taus_uids', 'g2_uids']
if exclude_keys is None:
exclude_keys =[]
if onekey is None:
for k in dicts:
extract_dict[k] = {}
with h5py.File( fp, 'r') as hf:
#print (list( hf.keys()) )
for key in list( hf.keys()):
if key not in exclude_keys:
if key in dicts:
md = hf.get(key)
for key_ in list(md.attrs):
#print(key, key_)
if key == 'qval_dict':
extract_dict[key][int(key_)] = md.attrs[key_]
else:
extract_dict[key][key_] = md.attrs[key_]
elif key in ['g2_fit_paras','g2b_fit_paras', 'spec_km_pds', 'spec_pds', 'qr_1d_pds']:
pds_type_keys.append( key )
else:
extract_dict[key] = np.array( hf.get( key ))
for key in pds_type_keys:
if key not in exclude_keys:
extract_dict[key] = pds.read_hdf(fp, key= key )
else:
if onekey == 'md':
with h5py.File( fp, 'r') as hf:
md = hf.get('md')
for key in list(md.attrs):
extract_dict['md'][key] = md.attrs[key]
elif onekey in ['g2_fit_paras','g2b_fit_paras', 'spec_km_pds', 'spec_pds', 'qr_1d_pds']:
extract_dict[onekey] = pds.read_hdf(fp, key= onekey )
else:
try:
with h5py.File( fp, 'r') as hf:
extract_dict[onekey] = np.array( hf.get( onekey ))
except:
print("The %s dosen't have this %s value"%(fp, onekey) )
return extract_dict
def read_contrast_from_multi_csv( uids, path, times=None, unit=20 ):
'''Y.G. 2016, Dec 23, load contrast from multi csv file'''
N = len(uids)
if times is None:
times = np.array( [0] + [2**i for i in range(N)] )*unit
for i, uid in enumerate(uids):
fp = path + uid + '/uid=%s--contrast_factorL.csv'%uid
contri = pds.read_csv( fp )
qs = np.array( contri[contri.columns[0]] )
contri_ = np.array( contri[contri.columns[1]] )
if i ==0:
contr = np.zeros( [ N, len(qs)])
contr[i] = contri_
#contr[0,:] = np.nan
return times, contr
def read_contrast_from_multi_h5( uids, path, ):
'''Y.G. 2016, Dec 23, load contrast from multi h5 file'''
N = len(uids)
times_xsvs = np.zeros( N )
for i, uid in enumerate(uids):
t = extract_xpcs_results_from_h5( filename= '%s_Res.h5'%uid,
import_dir = path + uid + '/' , onekey= 'times_xsvs')
times_xsvs[i] = t['times_xsvs'][0]
contri = extract_xpcs_results_from_h5( filename= '%s_Res.h5'%uid,
import_dir = path + uid + '/' , onekey= 'contrast_factorL')
if i ==0:
contr = np.zeros( [ N, contri['contrast_factorL'].shape[0] ])
contr[i] = contri['contrast_factorL'][:,0]
return times_xsvs, contr
<file_sep>######################################################################################
########Dec 16, 2015, <NAME>, <EMAIL>, CHX, NSLS-II, BNL################
########Time correlation function, include one-time, two-time, four-time##############
########Muli-tau method, array-operation method#######################################
######################################################################################
import numpy as np
import sys
import time
import skbeam.core.roi as roi
from matplotlib import gridspec
from datetime import datetime
from tqdm import tqdm
import itertools
import matplotlib.pyplot as plt
from matplotlib.colors import LogNorm
from chxanalys.chx_libs import ( colors as colors_array, markers as markers_array, markers_copy, lstyles, Figure, RUN_GUI)
#from chxanalys.chx_libs import colors_ as mcolors, markers_ as markers
from chxanalys.chx_libs import mcolors, markers, multi_tau_lags, colors
from modest_image import ModestImage, imshow
def delays( num_lev=3, num_buf=4, time=1 ):
''' DOCUMENT delays(time=)
return array of delays.
KEYWORD: time: scale delays by time ( should be time between frames)
'''
if num_buf%2!=0:print ("nobuf must be even!!!" )
dly=np.zeros( (num_lev+1)*int(num_buf/2) +1 )
dict_dly ={}
for i in range( 1,num_lev+1):
if i==1:imin= 1
else:imin= int(num_buf/2)+1
ptr=(i-1)*int(num_buf/2)+ np.arange(imin,num_buf+1)
dly[ptr]= np.arange( imin, num_buf+1) *2**(i-1)
dict_dly[i] = dly[ptr-1]
dly*=time
#print (i, ptr, imin)
return dly, dict_dly
class Get_Pixel_Array(object):
'''
Dec 16, 2015, Y.G.@CHX
a class to get intested pixels from a images sequence,
load ROI of all images into memory
get_data: to get a 2-D array, shape as (len(images), len(pixellist))
One example:
data_pixel = Get_Pixel_Array( imgsr, pixelist).get_data()
'''
def __init__(self, indexable, pixelist):
'''
indexable: a images sequences
pixelist: 1-D array, interest pixel list
'''
self.indexable = indexable
self.pixelist = pixelist
#self.shape = indexable.shape
try:
self.length= len(indexable)
except:
self.length= indexable.length
def get_data(self ):
'''
To get intested pixels array
Return: 2-D array, shape as (len(images), len(pixellist))
'''
#print (self.length)
data_array = np.zeros([ self.length,len(self.pixelist)])
for key in tqdm( range(self.length )):
data_array[key] = np.ravel( self.indexable[key])[self.pixelist]
return data_array
class Reverse_Coordinate(object):
'''obsolete codes'''
def __init__(self, indexable, mask):
self.indexable = indexable
self.mask = mask
try:
self.shape = indexable.shape
except:
#if
self.shape = [len(indexable), indexable[0].shape[0], indexable[0].shape[1] ]
#self.shape = indexable.shape
self.length= len(indexable)
def __getitem__(self, key ):
if self.mask is not None:
img =self.indexable[key] * self.mask
else:
img = self.indexable[key]
if len(img.shape) ==3:
img_=img[:,::-1,:]
if len(img.shape)==2:
img_=img[::-1,:]
return img_
def get_mean_intensity( data_pixel, qind):
'''
Dec 16, 2015, Y.G.@CHX
a function to get mean intensity as a function of time (image number)
Parameters:
data_pixel: 2-D array, shape as (len(images), len(qind)),
use function Get_Pixel_Array( ).get_data( ) to get
qind: 1-D int array, a index list of interest pixel, values change from 1 to int number
Return:
mean_inten: a dict, with keys as the unique values of qind,
each dict[key]: 1-D array, with shape as data_pixel.shape[0],namely, len(images)
One example:
mean_inten = get_mean_intensity( data_pixel, qind)
'''
noqs = len( np.unique(qind) )
mean_inten = {}
for qi in range(1, noqs + 1 ):
pixelist_qi = np.where( qind == qi)[0]
#print (pixelist_qi.shape, data_pixel[qi].shape)
data_pixel_qi = data_pixel[:,pixelist_qi]
mean_inten[qi] = data_pixel_qi.mean( axis =1 )
return mean_inten
def run_time(t0):
'''Calculate running time of a program
Dec 16, 2015, Y.G.@CHX
Parameters
----------
t0: time_string, t0=time.time()
The start time
Returns
-------
Print the running time
One usage
---------
t0=time.time()
.....(the running code)
run_time(t0)
'''
elapsed_time = time.time() - t0
print ('Total time: %.2f min' %(elapsed_time/60.))
def get_each_frame_ROI_intensity( data_pixel,
bad_pixel_threshold=1e10,
plot_ = False, *argv,**kwargs):
'''
Dec 16, 2015, Y.G.@CHX
Get the ROI intensity of each frame
Also get bad_frame_list by check whether above bad_pixel_threshold
Usuage:
imgsum, bad_frame_list = get_each_frame_intensity( data_pixel,
bad_pixel_threshold=1e10, plot_ = True)
'''
#print ( argv, kwargs )
imgsum = np.array( [np.sum(img ) for img in tqdm( data_series[::sampling] , leave = True ) ] )
if plot_:
uid = 'uid'
if 'uid' in kwargs.keys():
uid = kwargs['uid']
fig, ax = plt.subplots()
ax.plot(imgsum,'bo')
ax.set_title('uid= %s--imgsum'%uid)
ax.set_xlabel( 'Frame_bin_%s'%sampling )
ax.set_ylabel( 'Total_Intensity' )
if save:
#dt =datetime.now()
#CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute)
path = kwargs['path']
#fp = path + "uid= %s--Waterfall-"%uid + CurTime + '.png'
fp = path + "uid=%s--imgsum-"%uid + '.png'
fig.savefig( fp, dpi=fig.dpi)
#plt.show()
bad_frame_list = np.where( np.array(imgsum) > bad_pixel_threshold )[0]
if len(bad_frame_list):
print ('Bad frame list are: %s' %bad_frame_list)
else:
print ('No bad frames are involved.')
return imgsum,bad_frame_list
def auto_two_Array( data, rois, data_pixel=None ):
'''
Dec 16, 2015, Y.G.@CHX
a numpy operation method to get two-time correlation function
Parameters:
data: images sequence, shape as [img[0], img[1], imgs_length]
rois: 2-D array, the interested roi, has the same shape as image, can be rings for saxs, boxes for gisaxs
Options:
data_pixel: if not None,
2-D array, shape as (len(images), len(qind)),
use function Get_Pixel_Array( ).get_data( ) to get
Return:
g12: a 3-D array, shape as ( imgs_length, imgs_length, q)
One example:
g12 = auto_two_Array( imgsr, ring_mask, data_pixel = data_pixel )
'''
start_time = time.time()
qind, pixelist = roi.extract_label_indices( rois )
noqs = len( np.unique(qind) )
nopr = np.bincount(qind, minlength=(noqs+1))[1:]
if data_pixel is None:
data_pixel = Get_Pixel_Array( data, pixelist).get_data()
#print (data_pixel.shape)
noframes = data_pixel.shape[0]
g12b = np.zeros( [noframes, noframes, noqs] )
Unitq = (noqs/10)
proi=0
for qi in tqdm( range(1, noqs + 1 )):
pixelist_qi = np.where( qind == qi)[0]
#print (pixelist_qi.shape, data_pixel[qi].shape)
data_pixel_qi = data_pixel[:,pixelist_qi]
sum1 = (np.average( data_pixel_qi, axis=1)).reshape( 1, noframes )
sum2 = sum1.T
g12b[:,:,qi -1 ] = np.dot( data_pixel_qi, data_pixel_qi.T) /sum1 / sum2 / nopr[qi -1]
#print ( proi, int( qi //( Unitq) ) )
# if int( qi //( Unitq) ) == proi:
# sys.stdout.write("#")
# sys.stdout.flush()
# proi += 1
elapsed_time = time.time() - start_time
print ('Total time: %.2f min' %(elapsed_time/60.))
return g12b
####################################
##Derivation of Two time correlation
#####################################
#####################################
#get one-time @different age
#####################################
def get_qedge2( qstart,qend,qwidth,noqs, return_int = False ):
''' DOCUMENT make_qlist( )
give qstart,qend,qwidth,noqs
return a qedge by giving the noqs, qstart,qend,qwidth.
a qcenter, which is center of each qedge
KEYWORD: None '''
import numpy as np
qcenter = np.linspace(qstart,qend,noqs)
#print ('the qcenter is: %s'%qcenter )
qedge=np.zeros(2*noqs)
qedge[::2]= ( qcenter- (qwidth/2) ) #+1 #render even value
qedge[1::2]= ( qcenter+ qwidth/2) #render odd value
if not return_int:
return qedge, qcenter
else:
return np.int(qedge), np.int(qcenter )
def get_qedge( qstart,qend,qwidth,noqs, return_int = False ):
''' DOCUMENT make_qlist( )
give qstart,qend,qwidth,noqs
return a qedge by giving the noqs, qstart,qend,qwidth.
a qcenter, which is center of each qedge
KEYWORD: None '''
import numpy as np
qcenter = np.linspace(qstart,qend,noqs)
#print ('the qcenter is: %s'%qcenter )
qedge=np.zeros( [noqs,2])
qedge[:,0]= ( qcenter- (qwidth/2) ) #+1 #render even value
qedge[:,1]= ( qcenter+ qwidth/2) #render odd value
if not return_int:
return qedge, qcenter
else:
return np.int(qedge), np.int(qcenter )
def get_time_edge( tstart, tend, twidth, nots, return_int = False ):
''' Get time edge and time center by giving tstart, tend, twidth,nots
Return:
tedge: array, [ [ tedge1_start, tedge1_end], [ tedge2_start, tedge2_end], ... ]
tcenter: array, [tcenter1, tcenter2, ...]
if return_int = True, return tedge, tcenter in integer
'''
import numpy as np
tcenter = np.linspace(tstart,tend,nots)
#print ('the qcenter is: %s'%qcenter )
tedge=np.zeros( [nots,2])
tedge[:,0]= ( tcenter- (twidth/2) ) #+1 #render even value
tedge[:,1]= ( tcenter+ twidth/2) #render odd value
if not return_int:
return tedge, tcenter
else:
return np.int(tedge), np.int(tcenter )
def rotate_g12q_to_rectangle( g12q ):
'''
Dec 16, 2015, Y.G.@CHX
Rotate anti clockwise 45 of a one-q two correlation function along diagonal to a masked array
the shape ( imgs_length, imgs_length ) of g12q will change to ( imgs_length, 2*imgs_length -1)
Parameters:
g12q: a 2-D array, one-q two correlation function, shape as ( imgs_length, imgs_length )
Return:
g12qr: a masked 2-D array, shape as ( imgs_length, 2*imgs_length -1 )
x-axis: taus, from 0 to imgs_length
y-axis: ages, from 0 to imgs_length( the middle of y) to 2imgs_length-1 (top)
One example:
g12qr = rotate_g12q_to_rectangle(g12bm[:,:,0] )
'''
M,N = g12q.shape
g12qr = np.ma.empty(( 2*N-1,N ))
g12qr.mask = True
for i in range(N):
g12qr[i:(2*N-1-i):2, i ] = g12q.diagonal(i)
return g12qr
def get_aged_g2_from_g12( g12, age_edge, age_center ):
'''
Dec 16, 2015, Y.G.@CHX
Get one-time correlation function of different age from two correlation function
namely, calculate the different aged mean of each diag line of g12 to get one-time correlation fucntion
Parameters:
g12: a 3-D array, a two correlation function, shape as ( imgs_length, imgs_length, noqs )
Options:
slice_num: int, the slice number of the diagonal of g12
slice_width: int, each slice width in unit of pixel
slice start: int, can start from 0
slice end: int, can end at 2*imgs_length -1
Return:
g2_aged: a dict, one time correlation function at different age
the keys of dict is ages in unit of pixel
dict[key]:
a two-D array, shape as ( imgs_length ),
a multi-q one-time correlation function
One example:
g2_aged = get_aged_g2_from_g12( g12, slice_num =3, slice_width= 500,
slice_start=4000, slice_end= 20000-4000 )
'''
m,n,noqs = g12.shape
g2_aged = {}
for q in range(noqs):
g12q = g12[:,:,q]
g2q_aged = get_aged_g2_from_g12q( g12q, age_edge, age_center )
if q==0:
keys= list( g2q_aged.keys() )
for key in keys:
if q==0:
g2_aged[key] = np.zeros( [ len( g2q_aged[key] ) ,noqs] )
g2_aged[key][:,q] = g2q_aged[key]
#print( q, key )
return g2_aged
def get_aged_g2_from_g12q( g12q, age_edge, age_center =None, timeperframe=1,time_sampling='log', num_bufs=8 ):
'''
Revised at Octo 20, 2017, correct age, should be (t1+t2)/2, namely, age_edge will *2, age_center will keep same
Revised at Sep 28, 2017 add time_sampling='log', num_bufs=8 options
Dec 16, 2015, Y.G.<EMAIL>
Revised at April 19, 2017
Get one-time correlation function of different age from 1q-two correlation function
namely, calculate the different aged mean of each diag line of g12 to get one-time correlation fucntion
Parameters:
g12q: a 2-D array, one-q two correlation function, shape as ( imgs_length, imgs_length )
age_edge, list, e.g., [[0, 500], [2249, 2749], [4500, 5000]]
can be obtained by function:
age_edge = create_time_slice( len(imgsa), slice_num= 3, slice_width= 500, edges = None )
Options:
timeperframe: in unit of sec
age_center: None, will use the center of age_edge
time_sampling: 'log', log sampling of taus; Or 'linear', namely, delta tau = 1
num_bufs: 8, buf number for log sampling tau
Return:
g2_aged: a dict, one time correlation function at different age
the keys of dict is ages in unit of pixel
dict[key]:
a 1-D array, shape as ( imgs_length ),
a one-q one-time correlation function
One example:
g2_aged = get_aged_g2_from_g12q( g12q, age_edge )
'''
arr= rotate_g12q_to_rectangle( g12q )
m,n = arr.shape #m should be 2*n-1
#age_edge, age_center = get_qedge( qstart=slice_start,qend= slice_end,
# qwidth = slice_width, noqs =slice_num )
#print(arr.shape)
age_edge = np.int_(age_edge)
if age_center is None:
age_center = (age_edge[:,0] + age_edge[:,1] )//2
age_edge_ = age_edge * 2
age_center_ = age_center * timeperframe
g2_aged = {}
lag_dict = {}
#print( age_edge, age_center)
for i,age in enumerate(age_center_):
age_edges_0, age_edges_1 = age_edge_[i][0], age_edge_[i][1]
#print(i, age, age_edges_0, age_edges_1)
g2i = arr[ age_edges_0: age_edges_1 ].mean( axis =0 )
#print('here')
g2i_ = np.array( g2i )
g2_aged[age] = g2i_[np.nonzero( g2i_)[0]]
N = len( g2_aged[age] )
lag_dict[age] = np.arange( N ) *1.0
if time_sampling =='log':
num_levels = int(np.log( N/(num_bufs-1))/np.log(2) +1) +1
tot_channels, lag_steps, dict_lag = multi_tau_lags(num_levels, num_bufs)
#max_taus= lag_steps[age].max()
lag_steps_ = lag_steps[ lag_steps < N ]
#print(i, age, lag_steps, N, lag_steps_, len(g2_aged[age]))
g2_aged[age] = g2_aged[age][lag_steps_]
lag_dict[age] = lag_steps_ *1.0
#print( lag_dict[age] )
lag_dict[age] *= timeperframe
return lag_dict, g2_aged
def get_aged_g2_from_g12q2( g12q, slice_num = 6, slice_width=5, slice_start=0, slice_end= 1 ):
'''
Dec 16, 2015, Y.G.@CHX
Get one-time correlation function of different age from two correlation function
namely, calculate the different aged mean of each diag line of g12 to get one-time correlation fucntion
Parameters:
g12q: a 2-D array, one-q two correlation function, shape as ( imgs_length, imgs_length )
Options:
slice_num: int, the slice number of the diagonal of g12
slice_width: int, each slice width in unit of pixel
slice start: int, can start from 0
slice end: int, can end at 2*imgs_length -1
Return:
g2_aged: a dict, one time correlation function at different age
the keys of dict is ages in unit of pixel
dict[key]:
a 1-D array, shape as ( imgs_length ),
a one-q one-time correlation function
One example:
g2_aged = get_aged_g2_from_g12q( g12q, slice_num =3, slice_width= 500,
slice_start=4000, slice_end= 20000-4000 )
'''
arr= rotate_g12q_to_rectangle( g12q )
m,n = arr.shape #m should be 2*n-1
age_edge, age_center = get_qedge( qstart=slice_start,qend= slice_end,
qwidth = slice_width, noqs =slice_num )
age_edge, age_center = np.int_(age_edge), np.int_(age_center)
#print (age_edge, age_center)
g2_aged = {}
for i,age in enumerate(age_center):
age_edges_0, age_edges_1 = age_edge[ i*2 : 2*i+2]
g2i = arr[ age_edges_0: age_edges_1 ].mean( axis =0 )
g2i_ = np.array( g2i )
g2_aged[age] = g2i_[np.nonzero( g2i_)[0]]
return g2_aged
def show_g12q_aged_g2( g12q, g2_aged, taus_aged = None, slice_width=10, timeperframe=1,vmin= 1, vmax= 1.25,
save=True, uid='uid', path='', *argv,**kwargs):
'''
Octo 20, 2017, add taus_aged option
Dec 16, 2015, Y.G.@CHX
Plot one-time correlation function of different age with two correlation function
Parameters:
g12q: a 2-D array, one-q two correlation function, shape as ( imgs_length, imgs_length )
tau_aged: a dict, taus for different age
g2_aged: a dict, one time correlation function at different age
obtained by: for example,
g2_aged = get_aged_g2_from_g12q( g12q, slice_num =3, slice_width= 500,
slice_start=4000, slice_end= 20000-4000 )
the keys of dict is ages in unit of pixel
dict[key]:
a 1-D array, shape as ( imgs_length ),
a one-q one-time correlation function
Options:
slice_width: int, each slice width in unit of pixel, for line width of a plot
timeperframe: float, time per frame for axis unit
vmin, float, matplot vmin
vmax, float, matplot vmax
Return:
two plots, one for the two-time correlation, g12q,
One example:
show_g12q_aged_g2( g12q, g2_aged,timeperframe=1,vmin= 1, vmax= 1.22 )
'''
age_center = np.array( list( sorted( g2_aged.keys() ) ) )
print ('the cut age centers are: ' + str(age_center) )
age_center = np.int_(np.array( list( sorted( g2_aged.keys() ) ) )/timeperframe) *2 #in pixel
M,N = g12q.shape
#fig, ax = plt.subplots( figsize = (8,8) )
figw =10
figh = 8
fig = plt.figure(figsize=(figw,figh))
#gs = gridspec.GridSpec(1, 2, width_ratios=[10, 8],height_ratios=[8,8] )
gs = gridspec.GridSpec(1, 2 )
ax = plt.subplot(gs[0])
im = imshow(ax, g12q, origin='lower' , cmap='viridis',
norm= LogNorm( vmin, vmax ), extent=[0, N,0,N])
#plt.gca().set_xticks(ticks)
ticks = np.round( plt.gca().get_xticks() * timeperframe, 2)
#print( ticks )
ax.set_xticklabels(ticks )
ax.set_yticklabels(ticks )
#plt.xticks(ticks, fontsize=9)
#), extent=[0, g12q.shape[0]*timeperframe, 0, g12q.shape[0]*timeperframe ] )
ax1 = plt.subplot(gs[1])
linS1 = [ [0]*len(age_center ), np.int_(age_center - slice_width//2 ) ]
linS2 = [ [0]*len(age_center ), np.int_(age_center + slice_width//2 ) ]
linE1 = [ np.int_(age_center - slice_width//2 ), [0]*len(age_center) ]
linE2 = [ np.int_(age_center + slice_width//2 ), [0]*len(age_center) ]
linC = [ [0]*len(age_center ), np.int_(age_center ) ]
for i in range( len(age_center ) ):
ps = linS1[1][i]
pe = linE1[0][i]
if ps>=N:s0=ps - N;s1=N
else:s0=0;s1=ps
e0 = s1;e1=s0
#if pe>=N:e0=N;e1=pe - N
#else:e0=pe;e1=0
ps = linS2[1][i]
pe = linE2[0][i]
if ps>=N:S0=ps - N;S1=N
else:S0=0;S1=ps
#if pe>=N:e0=N;E1=pe - N
#else:E0=pe;E1=0
E0=S1;E1=S0
ps = linC[1][i]
if ps>=N:C0=ps - N;C1=N
else:C0=0;C1=ps
#if pe>=N:e0=N;E1=pe - N
#else:E0=pe;E1=0
D0=C1;D1=C0
lined= slice_width/2. #in data width
linewidthc= (lined * (figh*72./N)) * 0.5
#print( s0,e0, s1,e1, S0,E0, S1, E1)
#lined= slice_width/2. #in data width
#linewidth= (lined * (figh*72./N)) * 0.8
linewidth = 1
ax.plot( [s0,e0],[s1,e1], linewidth=linewidth , ls = '--', alpha=1 , color= colors_array[i] )
ax.plot( [S0,E0],[S1,E1], linewidth=linewidth , ls = '--', alpha=1 , color= colors_array[i] )
#print( i, [s0,e0],[s1,e1], [S0,E0],[S1,E1], colors_array[i] )
ax.plot( [C0,D0],[C1,D1], linewidth=linewidthc , ls = '-', alpha=.0 , color= colors_array[i] )
#ax.set_title( '%s_frames'%(N) )
ax.set_title( "%s_two_time"%uid )
ax.set_xlabel( r'$t_1$ $(s)$', fontsize = 18)
ax.set_ylabel( r'$t_2$ $(s)$', fontsize = 18)
fig.colorbar(im)
ax1.set_title("%s_aged_g2"%uid)
ki=0
for i in sorted(g2_aged.keys()):
#ax = fig.add_subplot(sx,sy,sn+1 )
if taus_aged is None:
gx= np.arange(len(g2_aged[i])) * timeperframe
else:
gx = taus_aged[i]
#marker = next(markers)
#print( g2_aged[i], marker )
#print(i)
ax1.plot( gx,g2_aged[i], marker = '%s'%markers_array[ki], ls='-', color= colors_array[ki], label=r"$t_a= %.1f s$"%i)
#print( i, ki, colors_array[ki] )
ki += 1
ax1.set_ylim( vmin, vmax )
ax1.set_xlabel(r"$\tau $ $(s)$", fontsize=18)
ax1.set_ylabel("g2")
ax1.set_xscale('log')
ax1.legend(fontsize='small', loc='best' )
if save:
#fp = path + "uid= %s--Waterfall-"%uid + CurTime + '.png'
fp = path + "%s_aged_g2"%uid + '.png'
#print( fp )
fig.savefig( fp, dpi=fig.dpi)
#plt.show()
def plot_aged_g2( g2_aged, tau=None,timeperframe=1, ylim=None, xlim=None):
''''A plot of g2 calculated from two-time'''
fig = plt.figure(figsize=(8,10))
age_center = list( sorted( g2_aged.keys() ) )
gs = gridspec.GridSpec(len(age_center),1 )
for n,i in enumerate( age_center):
ax = plt.subplot(gs[n])
if tau is None:
gx= np.arange(len(g2_aged[i])) * timeperframe
else:
gx=tau[i]
marker = markers[n]
c = colors[n]
ax.plot( gx,g2_aged[i], '-%s'%marker, c=c, label=r"$age= %.1f s$"%(i*timeperframe))
ax.set_xscale('log')
ax.legend(fontsize='large', loc='best' )
ax.set_xlabel(r"$\tau $ $(s)$", fontsize=18)
ax.set_ylabel("g2")
if ylim is not None:
ax.set_ylim( ylim )
if xlim is not None:
ax.set_ylim( xlim )
#####################################
#get fout-time
def get_tau_from_g12q( g12q, slice_num = 6, slice_width=1, slice_start=None, slice_end=None ):
'''
Dec 16, 2015, Y.G.@CHX
Get tau lines from two correlation function
namely, get diag line of g12 as a function of ages
Parameters:
g12q: a 2-D array, one-q two correlation function, shape as ( imgs_length, imgs_length )
Options:
slice_num: int, the slice number of the diagonal of g12
slice_width: int, each slice width in unit of pixel
slice start: int, can start from 0
slice end: int, can end at imgs_length -1
Return:
return: tau, a dict, tau lines
the keys of dict is tau(slice center) in unit of pixel
dict[key]:
a 1-D array, shape as ( tau_line-length ),
One example:
taus = get_aged_g2_from_g12q( g12q, slice_num =3, slice_width= 500,
slice_start=4000, slice_end= 20000-4000 )
'''
arr= rotate_g12q_to_rectangle( g12q )
m,n = arr.shape #m should be 2*n-1
age_edge, age_center = get_qedge( qstart=slice_start,qend= slice_end,
qwidth = slice_width, noqs =slice_num )
age_edge, age_center = np.int_(age_edge), np.int_(age_center)
#print (age_edge, age_center)
tau = {}
for i,age in enumerate(age_center):
age_edges_0, age_edges_1 = age_edge[ i*2 : 2*i+2]
#print (age_edges_0, age_edges_1)
g2i = arr[ :,age_edges_0: age_edges_1 ].mean( axis =1 )
g2i_ = np.array( g2i )
tau[age] = g2i_[np.nonzero( g2i_)[0]]
return tau
def show_g12q_taus( g12q, taus, slice_width=10, timeperframe=1,vmin= 1, vmax= 1.25 ):
'''
Dec 16, 2015, Y.G.@CHX
Plot tau-lines as a function of age with two correlation function
Parameters:
g12q: a 2-D array, one-q two correlation function, shape as ( imgs_length, imgs_length )
tau, a dict, tau lines
the keys of dict is tau(slice center) in unit of pixel
dict[key]:
a 1-D array, shape as ( tau_line-length ),
obtained by: for example,
taus = get_tau_from_g12q( g12b_norm[:,:,0], slice_num = 5, slice_width=1,
slice_start=3, slice_end= 5000-1 ))
Options:
slice_width: int, each slice width in unit of pixel, for line width of a plot
timeperframe: float, time per frame for axis unit
vmin, float, matplot vmin
vmax, float, matplot vmax
Return:
two plots, one for tau lines~ages, g12q,
One example:
show_g12q_taus( g12b_norm[:,:,0], taus, slice_width=50,
timeperframe=1,vmin=1.01,vmax=1.55 )
'''
age_center = list( taus.keys() )
print ('the cut tau centers are: ' +str(age_center) )
M,N = g12q.shape
#fig, ax = plt.subplots( figsize = (8,8) )
figw =10
figh = 10
fig = plt.figure(figsize=(figw,figh))
gs = gridspec.GridSpec(1, 2, width_ratios=[10, 8],height_ratios=[8,8] )
ax = plt.subplot(gs[0])
ax1 = plt.subplot(gs[1])
im = imshow(ax, g12q, origin='lower' , cmap='viridis',
norm= LogNorm( vmin= vmin, vmax= vmax ) , extent=[0, N, 0, N ] )
linS = []
linE=[]
linS.append( zip( np.int_(age_center) -1, [0]*len(age_center) ))
linE.append( zip( [N -1]*len(age_center), N - np.int_(age_center) ))
for i, [ps,pe] in enumerate(zip(linS[0],linE[0])):
lined= slice_width #/2. *draw_scale_tau #in data width
linewidth= (lined * (figh*72./N)) * 0.8
#print (ps,pe)
ax.plot( [ps[0],pe[0]],[ps[1],pe[1]], linewidth=linewidth ) #, color= )
ax.set_title( '%s_frames'%(N) )
ax.set_xlabel( r'$t_1$ $(s)$', fontsize = 18)
ax.set_ylabel( r'$t_2$ $(s)$', fontsize = 18)
fig.colorbar(im)
ax1.set_title("Tau_Cuts_in_G12")
for i in sorted(taus.keys()):
gx= np.arange(len(taus[i])) * timeperframe
marker = next(markers)
ax1.plot( gx,taus[i], '-%s'%marker, label=r"$tau= %.1f s$"%(i*timeperframe))
ax1.set_ylim( vmin,vmax )
ax1.set_xlabel(r'$t (s)$',fontsize=5)
ax1.set_ylabel("g2")
ax1.set_xscale('log')
ax1.legend(fontsize='small', loc='best' )
#plt.show()
def histogram_taus(taus, hisbin=20, plot=True,timeperframe=1):
'''
Dec 16, 2015, Y.G.@CHX
Do histogram and plot of tau-lines
Parameters:
taus, a dict, tau lines
the keys of dict is tau(slice center) in unit of pixel
dict[key]:
a 1-D array, shape as ( tau_line-length ),
obtained by: for example,
taus = get_tau_from_g12q( g12b_norm[:,:,0], slice_num = 5, slice_width=1,
slice_start=3, slice_end= 5000-1 ))
Options:
bins: int, bins number for the histogram
plot: if True, show the histogram plot
timeperframe: float, time per frame for axis unit
Return:
his: a dict, his[key], the histogram of tau-lines
if plot, plot the histogram of tau-lines
One example:
his = histogram_taus(taus, hisbin=30, plot=True, timeperframe=timeperframe)
'''
his={}
for key in list(taus.keys()):
his[key] = np.histogram( taus[key], bins=hisbin)
if plot:
fig, ax1 = plt.subplots(figsize=(8, 8))
ax1.set_title("Tau_histgram")
for key in sorted(his.keys()):
tx= 0.5*( his[key][1][:-1] + his[key][1][1:])
marker = next(markers)
ax1.plot( tx, his[key][0], '-%s'%marker, label=r"$tau= %.1f s$"%(key*timeperframe) )
#ax1.set_ylim( 1.05,1.35 )
ax1.set_xlim( 1.05,1.35 )
ax1.set_xlabel(r'$g_2$',fontsize=19)
ax1.set_ylabel(r"histgram of g2 @ tau",fontsize=15)
#ax1.set_xscale('log')
ax1.legend(fontsize='large', loc='best' )
#plt.show()
return his
#####################################
#get one-time
#####################################
def get_one_time_from_two_time_old( g12, norms=None, nopr = None ):
'''
Dec 16, 2015, Y.G.@CHX
Get one-time correlation function from two correlation function
namely, calculate the mean of each diag line of g12 to get one-time correlation fucntion
Parameters:
g12: a 3-D array, two correlation function, shape as ( imgs_length, imgs_length, q)
Options:
norms: if not None, a 2-D array, shape as ( imgs_length, q), a normalization for further get one-time from two time, get by: g12b_norm, g12b_not_norm, norms = auto_two_Array_g1_norm( imgsr, ring_mask, data_pixel = data_pixel )
nopr: if not None, 1-D array, shape as [q], the number of interested pixel of each q
Return:
g2f12: a 2-D array, shape as ( imgs_length, q),
a one-time correlation function
One example:
g2b_norm = get_one_time_from_two_time(g12b_norm, norms=None, nopr=None )
g2b_not_norm = get_one_time_from_two_time(g12b_not_norm, norms=norms, nopr=nopr)
'''
m,n,noqs = g12.shape
g2f12 = np.zeros( [m,noqs ] )
for q in range(noqs):
y=g12[:,:,q]
for tau in range(m):
if norms is None:
g2f12[tau,q] = np.nanmean( np.diag(y,k=int(tau)) )
else:
yn = norms[:,q]
yn1 = np.average( yn[tau:] )
yn2 = np.average( yn[: m-tau] )
g2f12[tau,q] = np.nanmean( np.diag(y,k=int(tau)) ) / (yn1*yn2*nopr[q])
return g2f12
def get_one_time_from_two_time( g12, norms=None, nopr = None ):
'''
Dec 16, 2015, Y.G.@CHX
Get one-time correlation function from two correlation function
namely, calculate the mean of each diag line of g12 to get one-time correlation fucntion
Parameters:
g12: a 3-D array, two correlation function, shape as ( imgs_length, imgs_length, q)
Options:
norms: if not None, a 2-D array, shape as ( imgs_length, q), a normalization for further get one-time from two time, get by: g12b_norm, g12b_not_norm, norms = auto_two_Array_g1_norm( imgsr, ring_mask, data_pixel = data_pixel )
nopr: if not None, 1-D array, shape as [q], the number of interested pixel of each q
Return:
g2f12: a 2-D array, shape as ( imgs_length, q),
a one-time correlation function
One example:
g2b_norm = get_one_time_from_two_time(g12b_norm, norms=None, nopr=None )
g2b_not_norm = get_one_time_from_two_time(g12b_not_norm, norms=norms, nopr=nopr)
'''
m,n,noqs = g12.shape
if norms is None:
g2f12 = np.array( [ np.nanmean( g12.diagonal(i), axis=1) for i in range(m) ] )
else:
g2f12 = np.zeros( [m,noqs ] )
for q in range(noqs):
yn= norms[:,q]
g2f12[i,q] = np.array( [ np.nanmean( g12[:,:,q].diagonal(i))/(
np.average( yn[i:] ) * np.average( yn[: m-i] ) * nopr[q] ) for i in range(m) ] )
return g2f12
def get_four_time_from_two_time( g12,g2=None, rois=None ):
'''
Dec 16, 2015, Y.G.@CHX
Get four-time correlation function from two correlation function
namely, calculate the deviation of each diag line of g12 to get four-time correlation fucntion
TOBEDONE: deal with bad frames
Parameters:
g12: a 3-D array, two correlation function, shape as ( imgs_length, imgs_length, q)
Options:
g2: if not None, a 2-D array, shape as ( imgs_length, q), or (tau, q)
one-time correlation fucntion, for normalization of the four-time
rois: if not None, a list, [x-slice-start, x-slice-end, y-slice-start, y-slice-end]
Return:
g4f12: a 2-D array, shape as ( imgs_length, q),
a four-time correlation function
One example:
s1,s2 = 0,2000
g4 = get_four_time_from_two_time( g12bm, g2b, roi=[s1,s2,s1,s2] )
'''
m,n,noqs = g12.shape
if g2 is not None:
norm = ( g2[0] -1)**2
else:
norm=1.
if rois is None:
g4f12 = np.array( [ (np.std( g12.diagonal(i), axis=1))**2/norm for i in range(m) ] )
else:
x1,x2,y1,y2 = rois
g4f12 = np.array( [ (np.std( g12[x1:x2,y1:y2, :].diagonal(i), axis=1))**2/norm for i in range(m) ] )
return g4f12
######
def make_g12_mask( badframes_list, g12_shape):
'''
Dec 16, 2015, Y.G.@CHX
make g12 mask by badlines
Parameters:
badframes_list: list, contains the bad frame number, like [100, 155, 10000]
g12_shape: the shape of one-q two correlation function, shape as ( imgs_length, imgs_length )
Return:
g12_mask: a 2-D array, shape as ( imgs_length, imgs_length )
One example:
g12_mask = make_g12_mask(bad_frames, g12b[:,:,0].shape)
'''
m,n = g12_shape
#g12_mask = np.ma.empty( ( m,n ) )
g12_mask = np.ma.ones( ( m,n ) )
g12_mask.mask= False
for bdl in badframes_list:
g12_mask.mask[:,bdl] = True
g12_mask.mask[bdl,:] = True
return g12_mask
def masked_g12( g12, badframes_list):
'''
Dec 16, 2015, Y.G.@CHX
make masked g12 with mask defined by badframes_list
Parameters:
g12: a 3-D array, two correlation function, shape as ( imgs_length, imgs_length, q)
badframes_list: list, contains the bad frame number, like [100, 155, 10000]
Return:
g12m: a masked 3-D array, shape as same as g12, ( imgs_length, imgs_length, q )
One example:
g12m = masked_g12( g12b, bad_frames)
'''
m,n,qs = g12.shape
g12m = np.ma.empty_like( g12 )
g12_mask = make_g12_mask( badframes_list, g12[:,:,0].shape)
for i in range(qs):
g12m[:,:,i] = g12[:,:,i] * g12_mask
return g12m
def show_one_C12( C12, fig_ax=None, return_fig=False,interpolation = 'none',cmap='viridis',
*argv,**kwargs):
'''
plot one-q of two-time correlation function
C12: two-time correlation function, with shape as [ time, time, qs]
q_ind: if integer, for a SAXS q, the nth of q to be plotted
if a list: for a GiSAXS [qz_ind, qr_ind]
kwargs: support
timeperframe: the time interval
N1: the start frame(time)
N2: the end frame(time)
vmin/vmax: for plot
title: if True, show the tile
e.g.,
show_C12(g12b, q_ind=1, N1=0, N2=500, vmin=1.05, vmax=1.07, )
'''
#strs = [ 'timeperframe', 'N1', 'N2', 'vmin', 'vmax', 'title']
if 'uid' in kwargs:
uid=kwargs['uid']
else:
uid='uid'
shape = C12.shape
if 'timeperframe' in kwargs.keys():
timeperframe = kwargs['timeperframe']
else:
timeperframe=1
if 'vmin' in kwargs.keys():
vmin = kwargs['vmin']
else:
vmin=1
if 'vmax' in kwargs.keys():
vmax = kwargs['vmax']
else:
vmax=1.05
if 'N1' in kwargs.keys():
N1 = kwargs['N1']
else:
N1=0
if 'N2' in kwargs.keys():
N2 = kwargs['N2']
else:
N2= shape[0]
if 'title' in kwargs.keys():
title = kwargs['title']
else:
title=True
data = C12[N1:N2,N1:N2]
if fig_ax is None:
if RUN_GUI:
fig = Figure()
ax = fig.add_subplot(111)
else:
fig, ax = plt.subplots()
else:
fig,ax=fig_ax
im = imshow(ax, data, origin='lower' , cmap=cmap,
norm= LogNorm( vmin, vmax ),
extent=[0, data.shape[0]*timeperframe, 0, data.shape[0]*timeperframe ],
interpolation = interpolation)
if title:
tit = '%s-[%s-%s] frames'%(uid,N1,N2)
ax.set_title( tit )
else:
tit=''
#ax.set_title('%s-%s frames--Qth= %s'%(N1,N2,g12_num))
ax.set_xlabel( r'$t_1$ $(s)$', fontsize = 18)
ax.set_ylabel( r'$t_2$ $(s)$', fontsize = 18)
fig.colorbar(im)
save=False
if 'save' in kwargs:
save=kwargs['save']
if save:
path=kwargs['path']
#fp = path + 'Two-time--uid=%s'%(uid) + tit + CurTime + '.png'
fp = path + '%s_Two_time'%(uid) + '.png'
plt.savefig( fp, dpi=fig.dpi)
if return_fig:
return fig, ax, im
def show_C12(C12, fig_ax=None, q_ind=1, return_fig=False, interpolation = 'none', cmap='viridis',
logs=True, qlabel=None, *argv,**kwargs):
'''
plot one-q of two-time correlation function
C12: two-time correlation function, with shape as [ time, time, qs]
q_ind: if integer, for a SAXS q, the nth of q to be plotted, starting from 1,
if a list: for a GiSAXS [qz_ind, qr_ind]
kwargs: support
timeperframe: the time interval
N1: the start frame(time)
N2: the end frame(time)
vmin/vmax: for plot
title: if True, show the tile
e.g.,
show_C12(g12b, q_ind=1, N1=0, N2=500, vmin=1.05, vmax=1.07, )
'''
#strs = [ 'timeperframe', 'N1', 'N2', 'vmin', 'vmax', 'title']
if 'uid' in kwargs:
uid=kwargs['uid']
else:
uid='uid'
shape = C12.shape
if (q_ind<0) or (q_ind>shape[2]-1):
raise Exceptions("Error: qind starts from 0 (corresponding to python array index 0, but in the plot it will show as 1) to the max Q-length of two time funcs-1 %s."%shape[2]-1)
if isinstance(q_ind, int):
C12_num = q_ind #-1
else:
qz_ind, qr_ind = q_ind #-1
C12_num = qz_ind * num_qr + qr_ind
if 'timeperframe' in kwargs.keys():
timeperframe = kwargs['timeperframe']
else:
timeperframe=1
if 'vmin' in kwargs.keys():
vmin = kwargs['vmin']
else:
vmin=1
if 'vmax' in kwargs.keys():
vmax = kwargs['vmax']
else:
vmax=1.05
if 'N1' in kwargs.keys():
N1 = kwargs['N1']
if N1<0:
N1 = 0
else:
N1=0
if 'N2' in kwargs.keys():
N2 = kwargs['N2']
if N2>shape[0]:
N2 = shape[0]
else:
N2= shape[0]
if 'title' in kwargs.keys():
title = kwargs['title']
else:
title=True
data = C12[N1:N2,N1:N2,C12_num]
if fig_ax is None:
if RUN_GUI:
fig = Figure()
ax = fig.add_subplot(111)
else:
fig, ax = plt.subplots()
else:
fig,ax=fig_ax
#extent=[0, data.shape[0]*timeperframe, 0, data.shape[0]*timeperframe ]
extent= np.array( [N1, N2, N1, N2]) *timeperframe
if logs:
im = imshow(ax, data, origin='lower' , cmap=cmap,
norm= LogNorm( vmin, vmax ), interpolation = interpolation,
extent=extent )
else:
im = imshow(ax, data, origin='lower' , cmap=cmap,
vmin=vmin, vmax=vmax, interpolation = interpolation,
extent=extent )
if qlabel is not None:
if isinstance(q_ind, int):
qstr = 'Qth= %s-qval=%s'%(C12_num+1, qlabel[C12_num])
else:
qstr = 'Qth= %s'%(C12_num+1)
if title:
if isinstance(q_ind, int):
tit = '%s-[%s-%s] frames--'%(uid,N1,N2) + qstr
else:
tit = '%s-[%s-%s] frames--Qzth= %s--Qrth= %s'%(uid,N1,N2, qz_ind, qr_ind )
ax.set_title( tit )
else:
tit=''
#ax.set_title('%s-%s frames--Qth= %s'%(N1,N2,g12_num))
ax.set_xlabel( r'$t_1$ $(s)$', fontsize = 18)
ax.set_ylabel( r'$t_2$ $(s)$', fontsize = 18)
fig.colorbar(im)
save=False
if 'save' in kwargs:
save=kwargs['save']
if save:
path=kwargs['path']
#fp = path + 'Two-time--uid=%s'%(uid) + tit + CurTime + '.png'
fp = path + '%s_Two_time'%(uid) + '.png'
plt.savefig( fp, dpi=fig.dpi)
if return_fig:
return fig, ax, im
class Exceptions(Exception):
pass<file_sep>from databroker import DataBroker as db, get_images, get_table, get_events, get_fields
from filestore.api import register_handler, deregister_handler
#from filestore.retrieve import _h_registry, _HANDLER_CACHE, HandlerBase
from eiger_io.pims_reader import EigerImages
from chxtools import handlers
## Import all the required packages for Data Analysis
#* scikit-beam - data analysis tools for X-ray science
# - https://github.com/scikit-beam/scikit-beam
#* xray-vision - plotting helper functions for X-ray science
# - https://github.com/Nikea/xray-vision
import xray_vision
import xray_vision.mpl_plotting as mpl_plot
from xray_vision.mpl_plotting import speckle
from xray_vision.mask.manual_mask import ManualMask
import skbeam.core.roi as roi
import skbeam.core.correlation as corr
import skbeam.core.utils as utils
import numpy as np
from datetime import datetime
import h5py
import matplotlib as mpl
import matplotlib.pyplot as plt
from matplotlib.colors import LogNorm
#%matplotlib notebook
#%matplotlib inline
import scipy.optimize as opt
from numpy.fft import fftshift, fft2, ifft2
from skimage.util import crop, pad
from numpy import log
from .chx_generic_functions import show_img, plot1D
from .Mask_Maker import load_metadata, load_images
def mkbox( mask_seg ):
''' Crop the non_zeros pixels of an image to a new image
and pad with zeros to make double the new image size
to avoid periodic artifacts from the FFT at correlation lengths.
'''
pxlst = np.where(mask_seg.ravel())[0]
dims = mask_seg.shape
imgwidthy = dims[1] #dimension in y, but in plot being x
imgwidthx = dims[0] #dimension in x, but in plot being y
#x and y are flipped???
#matrix notation!!!
pixely = pxlst%imgwidthy
pixelx = pxlst//imgwidthy
minpixelx = np.min(pixelx)
minpixely = np.min(pixely)
maxpixelx = np.max(pixelx)
maxpixely = np.max(pixely)
img_crop = crop( mask_seg, ((minpixelx, imgwidthx - maxpixelx -1 ),
(minpixely, imgwidthy - maxpixely -1 )) )
widthx = maxpixelx - minpixelx + 1
widthy = maxpixely - minpixely + 1
oldimg = np.zeros(dims)
oldimg.ravel()[pxlst] = 1
#img_crop = np.copy(oldimg[minpixelx:maxpixelx+1,minpixely:maxpixely+1])
#subpxlst = np.where(img_crop.ravel() != 0)[0]
#print("min/maxpixelx: {},{};min/maxpixely: {},{}".format(minpixelx,maxpixelx,minpixely,maxpixely))
#padx = 2**np.int( np.ceil( log(widthx +20 )/log(2) ) ) - widthx
#pady = 2**np.int( np.ceil( log(widthy +20)/log(2) ) ) -widthy
padx = 2**np.int( np.ceil( log(widthx*3)/log(2) ) ) - widthx
pady = 2**np.int( np.ceil( log(widthy*3)/log(2) ) ) -widthy
img_pad = pad( img_crop, ((0, padx),(0, pady ) ) ,'constant', constant_values=(0,0))
subpxlst = np.where(img_pad.ravel() != 0)[0]
return np.array(subpxlst,dtype = np.int32), np.array(img_pad,dtype = np.int32)
def cross_corr(img1,img2,mask=None):
'''Compute the autocorrelation of two images.
Right now does not take mask into account.
todo: take mask into account (requires extra calculations)
input:
img1: first image
img2: second image
mask: a mask array
output:
the autocorrelation of the two images (same shape as the correlated images)
'''
#if(mask is not None):
# img1 *= mask
# img2 *= mask
#img1_mean = np.mean( img1.flat )
#img2_mean = np.mean( img2.flat )
# imgc = fftshift( ifft2(
# fft2(img1/img1_mean -1.0 )*np.conj(fft2( img2/img2_mean -1.0 ))).real )
#imgc = fftshift( ifft2(
# fft2( img1/img1_mean )*np.conj(fft2( img2/img2_mean ))).real )
imgc = fftshift( ifft2(
fft2( img1 )*np.conj(fft2( img2 ))).real )
#imgc /= (img1.shape[0]*img1.shape[1])**2
if(mask is not None):
maskc = cross_corr(mask,mask)
imgc /= np.maximum( 1, maskc )
return imgc
def cross_corr_allregion(img1, img2, labeled_array):
"""
Spatial correlation between all ROI subregions of two images.
Correctly handle irregular masks by carefully zero-padding
to avoid periodic artifacts from the FFT at correlation
lengths.
"""
result = []
for label in np.unique(labeled_array):
mask = labeled_array == label
res = cross_corr( img1, img2, mask )
result.append(res)
return result
def cross_corr_subregion(img1, img2, mask_seg, center = None, q_pixel=None,sq = None):
"""
Spatial correlation between a subregion of two images.
Correctly handle irregular masks by carefully zero-padding
to avoid periodic artifacts from the FFT at correlation
lengths.
input:
img1: first image
img2: second image
mask_seg: a mask array to take the ROI from images
center = None, q_pixel=None,sq =None: for isotropic samples
output:
imgc: the autocorrelation of the two images
"""
pxlst = np.where( mask_seg.ravel())[0]
dims = img1.shape
imgwidthy = dims[1] #dimension in y, but in plot being x
imgwidthx = dims[0] #dimension in x, but in plot being y
#x and y are flipped???
#matrix notation!!!
subpxlst,submask = mkbox( mask_seg )
subimg1 = np.zeros_like(submask, dtype = float)
subimg2 = np.zeros_like(submask, dtype=float)
#print (subimg1.shape, subimg2.shape)
if center is not None:
pixely = pxlst%imgwidthy -center[1]
pixelx = pxlst//imgwidthy - center[0]
r= np.int_( np.hypot(pixelx, pixely) + 0.5 ) #why add 0.5?
subimg1.ravel()[subpxlst] = img1.ravel()[pxlst]/ np.interp( r,q_pixel,sq ) -1.0
subimg2.ravel()[subpxlst] = img2.ravel()[pxlst]/ np.interp( r,q_pixel,sq ) -1.0
else:
subimg1.ravel()[subpxlst] = img1.ravel()[pxlst]
subimg2.ravel()[subpxlst] = img2.ravel()[pxlst]
imgc = cross_corr(subimg1,subimg2, mask=submask)
# autocorr(img2, img2, mask=mask)
return imgc#, subimg1, subimg2
import scipy.optimize as opt
def get_cross_plot( imgc, imgc_width = None, imgc_widthy = None,plot_ = False, cx=None, cy=None ):
'''
imgc: the autocorrelation of the two images
the center part with imgc_width of the correlated result
Return:
imgc_center_part, cx, cy (the max intensity center)
'''
#imgc = cross_corr_subregion(img1, img2, center, mask_seg, q_pixel, sq )
cenx,ceny = int(imgc.shape[0]/2), int(imgc.shape[1]/2)
if imgc_width is None:
mx,my = np.where( imgc >1e-10 )
x1,x2,y1,y2 = mx[0], mx[-1], my[0], my[-1]
imgc_center_part = imgc[x1:x2, y1:y2]
else:
imgc_widthx = imgc_width
if imgc_widthy is None:
imgc_widthy = imgc_width
imgc_center_part = imgc[ cenx-imgc_widthx:cenx +imgc_widthx, ceny-imgc_widthy:ceny + imgc_widthy ]
if cx is None:
cx, cy = np.where( imgc_center_part == imgc_center_part.max() )
cx = cx[0]
cy = cy[0]
#print ( 'The pixel with max intensity in X-direction is: %s' % np.argmax ( imgc_center[imgc_w ]) )
#print ( 'The pixel with max intensity in Y-direction is: %s' % np.argmax ( imgc_center[:,imgc_w ]) )
#print ( 'The pixel shift in X-direction is: %s' % (cx - imgc_width))
#print ( 'The pixel shift in Y-direction is: %s' % (cy - imgc_width))
if plot_:
plot1D( imgc_center_part[ cx] )
if plot_:
plot1D( imgc_center_part[:,cy ] )
return imgc_center_part, cx, cy
def twoD_Gaussian( xy, amplitude, xo, yo, sigma_x, sigma_y, theta, offset):
'''
Two-D Gaussian Function
'''
x,y = xy[0], xy[1]
xo = float(xo)
yo = float(yo)
a = (np.cos(theta)**2)/(2*sigma_x**2) + (np.sin(theta)**2)/(2*sigma_y**2)
b = -(np.sin(2*theta))/(4*sigma_x**2) + (np.sin(2*theta))/(4*sigma_y**2)
c = (np.sin(theta)**2)/(2*sigma_x**2) + (np.cos(theta)**2)/(2*sigma_y**2)
g = offset + amplitude*np.exp( - (a*((x-xo)**2) + 2*b*(x-xo)*(y-yo)
+ c*((y-yo)**2)))
return g.ravel()
def fit_two_Gaussian( imgc_center_part, cx, cy, initial_guess = (1., 30, 30, 2, 2, 0, 0),plot_=False ):
'''
Fit image by a two-D gaussian function.
initial_guess = amplitude, xo, yo, sigma_x, sigma_y, theta, offset
'''
# Create x and y indices
imgc_w = int( imgc_center_part.shape[0]/2 )
x = np.linspace(0, imgc_w*2-1, imgc_w*2 )
y = np.linspace(0, imgc_w*2-1, imgc_w*2 )
x, y = np.meshgrid(x, y)
#xy = np.array ( (x,y) )
xy = np.array ( (x.ravel(),y.ravel()) )
popt, pcov = opt.curve_fit(twoD_Gaussian, xy, imgc_center_part.ravel(), p0=initial_guess)
data_fitted = twoD_Gaussian( xy, *popt) #.reshape(imgc_w*2, imgc_w*2)
#show_img( data_fitted.reshape(imgc_w*2, imgc_w*2) )
kai = np.sum( np.square( data_fitted - imgc_center_part.ravel() ) )
print ( kai, popt)
#print ( pcov )
print ( "Area = %s"%( popt[0]/( 2*np.pi* popt[3]* popt[4 ]) ) )
#print ( popt[0]/( 2*np.pi* popt[3]* popt[4 ]) )
fx_h, fy_h = np.meshgrid( cx, np.linspace(0, imgc_w*2-1, imgc_w*2 *4 ) )
fxy_h = np.array ( (fx_h.ravel(),fy_h.ravel()) )
data_fitted_h = twoD_Gaussian( fxy_h, *popt)
fx_v, fy_v = np.meshgrid( np.linspace(0, imgc_w*2-1, imgc_w*2 *4 ), cy )
fxy_v = np.array ( (fx_v.ravel(),fy_h.ravel()) )
data_fitted_v = twoD_Gaussian( fxy_v, *popt)
if plot_:
fig = plt.figure( )
ax = fig.add_subplot( 211 )
ax.set_title('Horzontal Fit')
plotx = np.linspace(0, imgc_w*2-1, imgc_w*2 *4 )
ax.plot( imgc_center_part[ cx] , 'bo' )
ax.plot( plotx, data_fitted_h , 'r-')
ax = fig.add_subplot( 212 )
ax.set_title('Vertical Fit')
plotx = np.linspace(0, imgc_w*2-1, imgc_w*2 *4 )
ax.plot( imgc_center_part[ :,cy] , 'bo' )
ax.plot( plotx, data_fitted_v , 'r-')
return
#plot1D( imgc_center[ cx] )
#plot1D( imgc_center[:,cy ] )
<file_sep>################################
######Movie_maker###############
################################
def read_imgs( inDir ):
'''Give image folder: inDir
Get a pims.sequences,
e.g. inDir= '/home/.../*.png'
'''
from pims import ImageSequence as Images
return Images( inDir )
def select_regoin(img, vert, keep_shape=True, qmask=None, ):
'''Get a pixellist by a rectangular region
defined by
verts e.g. xs,xe,ys,ye = vert #x_start, x_end, y_start,y_end
(dimy, dimx,) = img.shape
Giving cut postion, start, end, width '''
import numpy as np
xs,xe,ys,ye = vert
if keep_shape:
img_= np.zeros_like( img )
#img_= np.zeros( [dimy,dimx])
try:
img_[ys:ye, xs:xe] = True
except:
img_[ys:ye, xs:xe,:] = True
pixellist_ = np.where( img_.ravel() )[0]
#pixellist_ = img_.ravel()
if qmask is not None:
b=np.where( qmask.flatten()==False )[0]
pixellist_ = np.intersect1d(pixellist_,b)
#imgx = img[pixellist_]
#imgx = imgx.reshape( xe-xs, ye-ys)
imgx = img_.ravel()
imgx[pixellist_] = img.ravel()[pixellist_]
imgx = imgx.reshape( img.shape )
else:
try:
imgx =img[ys:ye, xs:xe]
except:
imgx =img[ys:ye, xs:xe,:]
return imgx
def save_png_series( imgs, ROI=None, logs=True, outDir=None, uid=None,vmin=None, vmax=None,cmap='viridis',dpi=100):
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.colors import LogNorm
"""
save a series of images in a format of png
Parameters
----------
imgs : array
image data array for the movie
dimensions are: [num_img][num_rows][num_cols]
ROI: e.g. xs,xe,ys,ye = vert #x_start, x_end, y_start,y_end
outDir: the output path
vmin/vmax: for image contrast
cmap: the color for plot
dpi: resolution
Returns
-------
save png files
"""
if uid==None:
uid='uid'
num_frame=0
for img in imgs:
fig = plt.figure()
ax = fig.add_subplot(111)
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
if ROI is None:
i0=img
asp =1.0
else:
i0=select_regoin(img, ROI, keep_shape=False,)
xs,xe,ys,ye = ROI
asp = (ye-ys)/float( xe - xs )
ax.set_aspect('equal')
if not logs:
im=ax.imshow(i0, origin='lower' ,cmap=cmap,interpolation="nearest", vmin=vmin,vmax=vmax) #vmin=0,vmax=1,
else:
im=ax.imshow(i0, origin='lower' ,cmap=cmap,
interpolation="nearest" , norm=LogNorm(vmin, vmax))
#ttl = ax.text(.75, .2, '', transform = ax.transAxes, va='center', color='white', fontsize=18)
#fig.set_size_inches( [5., 5 * asp] )
#plt.tight_layout()
fname = outDir + 'uid_%s-frame-%s.png'%(uid,num_frame )
num_frame +=1
plt.savefig( fname, dpi=None )
def movie_maker( imgs, num_frames=None, ROI=None,interval=20, fps=15, real_interval = 1.0,
movie_name="movie.mp4", outDir=None,
movie_writer='ffmpeg', logs=True, show_text_on_image=False,
vmin=None, vmax=None,cmap='viridis',dpi=100):
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from matplotlib.colors import LogNorm
"""
Make a movie by give a image series
Parameters
----------
imgs : array
image data array for the movie
dimensions are: [num_img][num_rows][num_cols]
ROI: e.g. xs,xe,ys,ye = vert #x_start, x_end, y_start,y_end
num_frames : int
number of frames in the array
interval : int, optional
delay between frames
movie_name : str, optional
name of the movie to save
movie_writer : str, optional
movie writer
fps : int, optional
Frame rate for movie.
real_interval:
the real time interval between each frame in unit of ms
outDir: the output path
vmin/vmax: for image contrast
cmap: the color for plot
dpi: resolution
Returns
-------
#ani :
# movie
"""
fig = plt.figure()
ax = fig.add_subplot(111)
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
if ROI is None:
i0=imgs[0]
asp =1.0
else:
i0=select_regoin(imgs[0], ROI, keep_shape=False,)
xs,xe,ys,ye = ROI
asp = (ye-ys)/float( xe - xs )
ax.set_aspect('equal')
#print( cmap, vmin, vmax )
if not logs:
im=ax.imshow(i0, origin='lower' ,cmap=cmap,interpolation="nearest", vmin=vmin,vmax=vmax)
else:
im=ax.imshow(i0, origin='lower' ,cmap=cmap,
interpolation="nearest" , norm=LogNorm(vmin, vmax))
ttl = ax.text(.75, .2, '', transform = ax.transAxes, va='center', color='white', fontsize=18)
#print asp
#fig.set_size_inches( [5., 5 * asp] )
plt.tight_layout()
if num_frames is None:num_frames=len(imgs)
def update_img(n):
if ROI is None:ign=imgs[n]
else:ign=select_regoin(imgs[n], ROI, keep_shape=False,)
im.set_data(ign)
if show_text_on_image:
if real_interval >=10:
ttl.set_text('%s s'%(n*real_interval/1000.))
elif real_interval <10:
ttl.set_text('%s ms'%(n*real_interval))
#im.set_text(n)
#print (n)
ani = animation.FuncAnimation(fig, update_img, num_frames,
interval=interval)
writer = animation.writers[movie_writer](fps=fps)
if outDir is not None:movie_name = outDir + movie_name
ani.save(movie_name, writer=writer,dpi=dpi)
#return ani
<file_sep>from chxanalys.chx_packages import *
from chxanalys.chx_libs import markers, colors
#from chxanalys.chx_generic_functions import get_short_long_labels_from_qval_dict
#RUN_GUI = False
#from chxanalys.chx_libs import markers
def get_t_iqc_uids( uid_list, setup_pargs, slice_num= 10, slice_width= 1):
'''Get Iq at different time edge (difined by slice_num and slice_width) for a list of uids
Input:
uid_list: list of string (uid)
setup_pargs: dict, for caculation of Iq, the key of this dict should include
'center': beam center
'dpix': pixel size
'lambda_': X-ray wavelength
'Ldet': distance between detector and sample
slice_num: slice number of the time edge
slice_edge: the width of the time_edge
Output:
qs: dict, with uid as key, with value as q values
iqsts:dict, with uid as key, with value as iq values
tstamp:dict, with uid as key, with value as time values
'''
iqsts = {}
tstamp = {}
qs = {}
label = []
for uid in uid_list:
md = get_meta_data( uid )
luid = md['uid']
timeperframe = md['cam_acquire_period']
N = md['cam_num_images']
filename = '/XF11ID/analysis/Compressed_Data' +'/uid_%s.cmp'%luid
good_start = 5
FD = Multifile(filename, good_start, N )
Nimg = FD.end - FD.beg
time_edge = create_time_slice( Nimg, slice_num= slice_num, slice_width= slice_width, edges = None )
time_edge = np.array( time_edge ) + good_start
#print( time_edge )
tstamp[uid] = time_edge[:,0] * timeperframe
qpt, iqsts[uid], qt = get_t_iqc( FD, time_edge, None, pargs=setup_pargs, nx=1500 )
qs[uid] = qt
return qs, iqsts, tstamp
def plot_t_iqtMq2(qt, iqst, tstamp, ax=None, perf='' ):
'''plot q2~Iq at differnt time'''
if ax is None:
fig, ax = plt.subplots()
q = qt
for i in range(iqst.shape[0]):
yi = iqst[i] * q**2
time_labeli = perf+'time_%s s'%( round( tstamp[i], 3) )
plot1D( x = q, y = yi, legend= time_labeli, xlabel='Q (A-1)', ylabel='I(q)*Q^2', title='I(q)*Q^2 ~ time',
m=markers[i], c = colors[i], ax=ax, ylim=[ -0.001, 0.005]) #, xlim=[0.007,0.1] )
def plot_t_iqc_uids( qs, iqsts, tstamps ):
'''plot q2~Iq at differnt time for a uid list
'''
keys = list(qs.keys())
fig, ax = plt.subplots()
for uid in keys:
qt = qs[uid]
iqst = iqsts[uid]
tstamp = tstamps[uid]
plot_t_iqtMq2(qt, iqst, tstamp, ax=ax, perf=uid + '_' )
def plot_entries_from_uids( uid_list, inDir, key= 'g2', qth = 1, legend_size=8,
yshift=0.01, ymulti=1, xlim=None, ylim=None,legend=None, uid_length = None):#,title='' ):
'''YG June 9, 2017@CHX
YG Sep 29, 2017@CHX.
plot enteries for a list uids
Input:
uid_list: list, a list of uid (string)
inDir: string, imported folder for saved analysis results
key: string, plot entry, surport
'g2' for one-time,
'iq' for q~iq
'mean_int_sets' for mean intensity of each roi as a function of frame
TODOLIST:#also can plot the following
dict_keys(['qt', 'imgsum', 'qval_dict_v', 'bad_frame_list', 'iqst',
'times_roi', 'iq_saxs', 'g2', 'mask', 'g2_uids', 'taus_uids',
'g2_fit_paras', 'mean_int_sets', 'roi_mask', 'qval_dict', 'taus',
'pixel_mask', 'avg_img', 'qval_dict_p', 'q_saxs', 'md'])
qth: integer, the intesrest q number
yshift: float, values of shift in y direction
xlim: [x1,x2], for plot x limit
ylim: [y1,y2], for plot y limit
Output:
show the plot
Example:
uid_list = ['5492b9', '54c5e0']
plot_entries_from_uids( uid_list, inDir, yshift = 0.01, key= 'g2', ylim=[1, 1.2])
'''
uid_dict = {}
fig, ax =plt.subplots()
for uid in uid_list:
if uid_length is not None:
uid_ = uid[:uid_length]
else:
uid_=uid
#print(uid_)
uid_dict[uid_] = get_meta_data( uid )['uid']
#for i, u in enumerate( list( uid_dict.keys() )):
for i,u in enumerate( list(uid_list)):
#print(u)
if uid_length is not None:
u = u[:uid_length]
inDiru = inDir + u + '/'
total_res = extract_xpcs_results_from_h5( filename = 'uid=%s_Res.h5'%uid_dict[u],
import_dir = inDiru, exclude_keys = ['g12b'] )
if key=='g2':
d = total_res[key][1:,qth]
taus = total_res['taus'][1:]
if legend is None:
leg=u
else:
leg='uid=%s-->'%u+legend[i]
plot1D( x = taus, y=d + yshift*i, c=colors[i], m = markers[i], ax=ax, logx=True, legend= leg,
xlabel='t (sec)', ylabel='g2', legend_size=legend_size,)
title='Q = %s'%(total_res['qval_dict'][qth])
ax.set_title(title)
elif key=='imgsum':
d = total_res[key]
plot1D( y=d + yshift*i, c=colors[i], m = markers[i], ax=ax, logx=False, legend= u,
xlabel='Frame', ylabel='imgsum',)
elif key == 'iq':
x= total_res['q_saxs']
y= total_res['iq_saxs']
plot1D( x=x, y= y* ymulti[i] + yshift*i, c=colors[i], m = markers[i], ax=ax, logx= False, logy=True,
legend= u, xlabel ='Q 'r'($\AA^{-1}$)', ylabel = "I(q)" )
else:
d = total_res[key][:,qth]
plot1D( x = np.arange(len(d)), y= d + yshift*i, c=colors[i], m = markers[i], ax=ax, logx=False, legend= u,
xlabel= 'xx', ylabel=key )
if key=='mean_int_sets':ax.set_xlabel( 'frame ')
if xlim is not None:ax.set_xlim(xlim)
if ylim is not None:ax.set_ylim(ylim)
####################################################################################################
##For real time analysis##
#################################################################################################
def get_iq_from_uids( uids, mask, setup_pargs ):
''' Y.G. developed July 17, 2017 @CHX
Get q-Iq of a uids dict, each uid could corrrespond one frame or a time seriers
uids: dict, val: meaningful decription, key: a list of uids
mask: bool-type 2D array
setup_pargs: dict, at least should contains, the following paramters for calculation of I(q)
'Ldet': 4917.50495,
'center': [988, 1120],
'dpix': 0.075000003562308848,
'exposuretime': 0.99998999,
'lambda_': 1.2845441,
'path': '/XF11ID/analysis/2017_2/yuzhang/Results/Yang_Pressure/',
'''
Nuid = len( np.concatenate( np.array( list(uids.values()) ) ) )
label = np.zeros( [ Nuid+1], dtype=object)
img_data = {} #np.zeros( [ Nuid, avg_img.shape[0], avg_img.shape[1]])
n = 0
for k in list(uids.keys()):
for uid in uids[k]:
uidstr = 'uid=%s'%uid
sud = get_sid_filenames(db[uid])
#print(sud)
md = get_meta_data( uid )
imgs = load_data( uid, md['detector'], reverse= True )
md.update( imgs.md );
Nimg = len(imgs);
if Nimg !=1:
filename = '/XF11ID/analysis/Compressed_Data' +'/uid_%s.cmp'%sud[1]
mask0, avg_img, imgsum, bad_frame_list = compress_eigerdata(imgs, mask, md, filename,
force_compress= False, para_compress= True, bad_pixel_threshold = 1e14,
bins=1, num_sub= 100, num_max_para_process= 500, with_pickle=True )
else:
avg_img = imgs[0]
show_img( avg_img, vmin=0.00001, vmax= 1e1, logs=True, aspect=1, #save_format='tif',
image_name= uidstr + '_img_avg', save=True,
path=setup_pargs['path'], cmap = cmap_albula )
setup_pargs['uid'] = uidstr
qp_saxs, iq_saxs, q_saxs = get_circular_average( avg_img, mask,
pargs= setup_pargs, save=True )
if n ==0:
iqs = np.zeros( [ len(q_saxs), Nuid+1])
iqs[:,0] = q_saxs
label[0] = 'q'
img_data[ k + '_'+ uid ] = avg_img
iqs[:,n+1] = iq_saxs
label[n+1] = k + '_'+ uid
n +=1
plot_circular_average( qp_saxs, iq_saxs, q_saxs, pargs= setup_pargs,
xlim=[q_saxs.min(), q_saxs.max()*0.9], ylim = [iq_saxs.min(), iq_saxs.max()] )
if 'filename' in list(setup_pargs.keys()):
filename = setup_pargs['filename']
else:
filename = 'qIq.csv'
pd = save_arrays( iqs, label=label, dtype='array', filename= filename,
path= setup_pargs['path'], return_res=True)
return pd, img_data
def wait_func( wait_time = 2 ):
print( 'Waiting %s secdons for upcoming data...'%wait_time)
time.sleep( wait_time)
#print( 'Starting to do something here...')
def wait_data_acquistion_finish( uid, wait_time = 2, max_try_num = 3 ):
'''check the completion of a data uid acquistion
Parameter:
uid:
wait_time: the waiting step in unit of second
check_func: the function to check the completion
max_try_num: the maximum number for waiting
Return:
True: completion
False: not completion (include waiting time exceeds the max_wait_time)
'''
FINISH = False
Fake_FINISH = True
w = 0
sleep_time = 0
while( not FINISH):
try:
get_meta_data( uid )
FINISH = True
print( 'The data acquistion finished.')
print( 'Starting to do something here...')
except:
wait_func( wait_time = wait_time )
w += 1
print('Try number: %s'%w)
if w> max_try_num:
print( 'There could be something going wrong with data acquistion.')
print( 'Force to terminate after %s tries.'%w)
FINISH = True
Fake_FINISH = False
sleep_time += wait_time
return FINISH * Fake_FINISH #, sleep_time
def get_uids_by_range( start_uidth=-1, end_uidth = 0 ):
'''Y.G. Dec 22, 2016
A wrap funciton to find uids by giving start and end uid number, i.e. -10, -1
Return:
uids: list, uid with 8 character length
fuids: list, uid with full length
'''
hdrs = list([ db[n] for n in range(start_uidth, end_uidth)] )
if len(hdrs)!=0:
print ('Totally %s uids are found.'%(len(hdrs)))
uids=[] #short uid
fuids=[] #full uid
for hdr in hdrs:
fuid = hdr['start']['uid']
uids.append( fuid[:8] )
fuids.append( fuid )
uids=uids[::-1]
fuids=fuids[::-1]
return np.array(uids), np.array(fuids)
def get_uids_in_time_period( start_time, stop_time ):
'''Y.G. Dec 22, 2016
A wrap funciton to find uids by giving start and end time
Return:
uids: list, uid with 8 character length
fuids: list, uid with full length
'''
hdrs = list( db(start_time= start_time, stop_time = stop_time) )
if len(hdrs)!=0:
print ('Totally %s uids are found.'%(len(hdrs)))
uids=[] #short uid
fuids=[] #full uid
for hdr in hdrs:
fuid = hdr['start']['uid']
uids.append( fuid[:8] )
fuids.append( fuid )
uids=uids[::-1]
fuids=fuids[::-1]
return np.array(uids), np.array(fuids)
def do_compress_on_line( start_time, stop_time, mask_dict=None, mask=None,
wait_time = 2, max_try_num = 3 ):
'''Y.G. Mar 10, 2017
Do on-line compress by giving start time and stop time
Parameters:
mask_dict: a dict, e.g., {mask1: mask_array1, mask2:mask_array2}
wait_time: search interval time
max_try_num: for each found uid, will try max_try_num*wait_time seconds
Return:
running time
'''
t0 = time.time()
uids, fuids = get_uids_in_time_period(start_time, stop_time)
print( fuids )
if len(fuids):
for uid in fuids:
print('*'*50)
print('Do compress for %s now...'%uid)
if db[uid]['start']['plan_name'] == 'count':
finish = wait_data_acquistion_finish( uid, wait_time,max_try_num )
if finish:
try:
md = get_meta_data( uid )
compress_multi_uids( [ uid ], mask=mask, mask_dict = mask_dict,
force_compress=False, para_compress= True, bin_frame_number=1 )
update_olog_uid( uid= md['uid'], text='Data are on-line sparsified!',attachments=None)
except:
print('There are something wrong with this data: %s...'%uid)
print('*'*50)
return time.time() - t0
def realtime_xpcs_analysis( start_time, stop_time, run_pargs, md_update=None,
wait_time = 2, max_try_num = 3, emulation=False,clear_plot=False ):
'''Y.G. Mar 10, 2017
Do on-line xpcs by giving start time and stop time
Parameters:
run_pargs: all the run control parameters, including giving roi_mask
md_update: if not None, a dict, will update all the found uid metadata by this md_update
e.g,
md['beam_center_x'] = 1012
md['beam_center_y']= 1020
md['det_distance']= 16718.0
wait_time: search interval time
max_try_num: for each found uid, will try max_try_num*wait_time seconds
emulation: if True, it will only check dataset and not do real analysis
Return:
running time
'''
t0 = time.time()
uids, fuids = get_uids_in_time_period(start_time, stop_time)
#print( fuids )
if len(fuids):
for uid in fuids:
print('*'*50)
#print('Do compress for %s now...'%uid)
print('Starting analysis for %s now...'%uid)
if db[uid]['start']['plan_name'] == 'count' or db[uid]['start']['plan_name'] == 'manual_count':
#if db[uid]['start']['dtype'] =='xpcs':
finish = wait_data_acquistion_finish( uid, wait_time,max_try_num )
if finish:
try:
md = get_meta_data( uid )
##corect some metadata
if md_update is not None:
md.update( md_update )
#if 'username' in list(md.keys()):
#try:
# md_cor['username'] = md_update['username']
#except:
# md_cor = None
#uid = uid[:8]
#print(md_cor)
if not emulation:
#suid=uid[:6]
run_xpcs_xsvs_single( uid, run_pargs= run_pargs, md_cor = None,
return_res= False, clear_plot=clear_plot )
#update_olog_uid( uid= md['uid'], text='Data are on-line sparsified!',attachments=None)
except:
print('There are something wrong with this data: %s...'%uid)
else:
print('\nThis is not a XPCS series. We will simiply ignore it.')
print('*'*50)
#print( 'Sleep 10 sec here!!!')
#time.sleep(10)
return time.time() - t0
####################################################################################################
##compress multi uids, sequential compress for uids, but for each uid, can apply parallel compress##
#################################################################################################
def compress_multi_uids( uids, mask, mask_dict = None, force_compress=False, para_compress= True, bin_frame_number=1 ):
''' Compress time series data for a set of uids
Parameters:
uids: list, a list of uid
mask: bool array, mask array
force_compress: default is False, just load the compresssed data;
if True, will compress it to overwrite the old compressed data
para_compress: apply the parallel compress algorithm
bin_frame_number:
Return:
None, save the compressed data in, by default, /XF11ID/analysis/Compressed_Data with filename as
'/uid_%s.cmp' uid is the full uid string
e.g., compress_multi_uids( uids, mask, force_compress= False, bin_frame_number=1 )
'''
for uid in uids:
print('UID: %s is in processing...'%uid)
md = get_meta_data( uid )
if bin_frame_number==1:
filename = '/XF11ID/analysis/Compressed_Data' +'/uid_%s.cmp'%md['uid']
else:
filename = '/XF11ID/analysis/Compressed_Data' +'/uid_%s_bined--%s.cmp'%(md['uid'],bin_frame_number)
imgs = load_data( uid, md['detector'], reverse= True )
print( imgs )
if mask_dict is not None:
mask = mask_dict[md['detector']]
print('The detecotr is: %s'% md['detector'])
md.update( imgs.md )
mask, avg_img, imgsum, bad_frame_list = compress_eigerdata(imgs, mask, md, filename,
force_compress= force_compress, para_compress= para_compress, bad_pixel_threshold= 1e14,
bins=bin_frame_number, num_sub= 100, num_max_para_process= 500, with_pickle=True )
print('Done!')
####################################################################################################
##get_two_time_mulit_uids, sequential cal for uids, but apply parallel for each uid ##
#################################################################################################
def get_two_time_mulit_uids( uids, roi_mask, norm= None, bin_frame_number=1, path=None, force_generate=False,
md=None, imgs=None,direct_load_data=False ):
''' Calculate two time correlation by using auto_two_Arrayc func for a set of uids,
if the two-time resutls are already created, by default (force_generate=False), just pass
Parameters:
uids: list, a list of uid
roi_mask: bool array, roi mask array
norm: the normalization array
path: string, where to save the two time
force_generate: default, False, if the two-time resutls are already created, just pass
if True, will force to calculate two-time no matter exist or not
Return:
None, save the two-time in as path + uid + 'uid=%s_g12b'%uid
e.g.,
get_two_time_mulit_uids( guids, roi_mask, norm= norm,bin_frame_number=1,
path= data_dir,force_generate=False )
'''
qind, pixelist = roi.extract_label_indices(roi_mask)
for uid in uids:
print('UID: %s is in processing...'%uid)
if not direct_load_data:
md = get_meta_data( uid )
imgs = load_data( uid, md['detector'], reverse= True )
else:
pass
N = len(imgs)
#print( N )
if bin_frame_number==1:
filename = '/XF11ID/analysis/Compressed_Data' +'/uid_%s.cmp'%md['uid']
else:
filename = '/XF11ID/analysis/Compressed_Data' +'/uid_%s_bined--%s.cmp'%(md['uid'],bin_frame_number)
FD = Multifile(filename, 0, N//bin_frame_number)
#print( FD.beg, FD.end)
os.makedirs(path + uid + '/', exist_ok=True)
filename = path + uid + '/' + 'uid=%s_g12b'%uid
doit = True
if not force_generate:
if os.path.exists( filename + '.npy'):
doit=False
print('The two time correlation function for uid=%s is already calculated. Just pass...'%uid)
if doit:
data_pixel = Get_Pixel_Arrayc( FD, pixelist, norm= norm ).get_data()
g12b = auto_two_Arrayc( data_pixel, roi_mask, index = None )
np.save( filename, g12b)
del g12b
print( 'The two time correlation function for uid={} is saved as {}.'.format(uid, filename ))
def get_series_g2_from_g12( g12b, fra_num_by_dose = None, dose_label = None,
good_start=0, log_taus = True, num_bufs=8, time_step=1 ):
'''
Get a series of one-time function from two-time by giving noframes
Parameters:
g12b: a two time function
good_start: the start frame number
fra_num_by_dose: a list, correlation number starting from index 0,
if this number is larger than g12b length, will give a warning message, and
will use g12b length to replace this number
by default is None, will = [ g12b.shape[0] ]
dose_label: the label of each dose, also is the keys of returned g2, lag
log_taus: if true, will only return a g2 with the correponding tau values
as calculated by multi-tau defined taus
Return:
g2_series, a dict, with keys as dose_label (corrected on if warning message is given)
lag_steps, the corresponding lags
'''
g2={}
lag_steps = {}
L,L,qs= g12b.shape
if fra_num_by_dose is None:
fra_num_by_dose = [L]
if dose_label is None:
dose_label = fra_num_by_dose
fra_num_by_dose = sorted( fra_num_by_dose )
dose_label = sorted( dose_label )
for i, good_end in enumerate(fra_num_by_dose):
key = round(dose_label[i] ,3)
#print( good_end )
if good_end>L:
warnings.warn("Warning: the dose value is too large, and please check the maxium dose in this data set and give a smaller dose value. We will use the maxium dose of the data.")
good_end = L
if not log_taus:
g2[ key ] = get_one_time_from_two_time(g12b[good_start:good_end,good_start:good_end,:] )
else:
#print( good_end, num_bufs )
lag_step = get_multi_tau_lag_steps(good_end, num_bufs)
lag_step = lag_step[ lag_step < good_end - good_start]
#print( len(lag_steps ) )
lag_steps[key] = lag_step * time_step
g2[key] = get_one_time_from_two_time(g12b[good_start:good_end,good_start:good_end,:] )[lag_step]
return lag_steps, g2
def get_fra_num_by_dose( exp_dose, exp_time, att=1, dead_time =2 ):
'''
Calculate the frame number to be correlated by giving a X-ray exposure dose
Paramters:
exp_dose: a list, the exposed dose, e.g., in unit of exp_time(ms)*N(fram num)*att( attenuation)
exp_time: float, the exposure time for a xpcs time sereies
dead_time: dead time for the fast shutter reponse time, CHX = 2ms
Return:
noframes: the frame number to be correlated, exp_dose/( exp_time + dead_time )
e.g.,
no_dose_fra = get_fra_num_by_dose( exp_dose = [ 3.34* 20, 3.34*50, 3.34*100, 3.34*502, 3.34*505 ],
exp_time = 1.34, dead_time = 2)
--> no_dose_fra will be array([ 20, 50, 100, 502, 504])
'''
return np.int_( np.array( exp_dose )/( exp_time + dead_time)/ att )
def get_series_one_time_mulit_uids( uids, qval_dict, trans = None, good_start=0, path=None,
exposure_dose = None, dead_time = 0,
num_bufs =8, save_g2=True,
md = None, imgs=None, direct_load_data= False ):
''' Calculate a dose depedent series of one time correlations from two time
Parameters:
uids: list, a list of uid
trans: list, same length as uids, the transmission list
exposure_dose: list, a list x-ray exposure dose;
by default is None, namely, = [ max_frame_number ],
can be [3.34 334, 3340] in unit of ms, in unit of exp_time(ms)*N(fram num)*att( attenuation)
path: string, where to load the two time, if None, ask for it
the real g12 path is two_time_path + uid + '/'
qval_dict: the dictionary for q values
Return:
taus_uids, with keys as uid, and
taus_uids[uid] is also a dict, with keys as dose_frame
g2_uids, with keys as uid, and
g2_uids[uid] is also a dict, with keys as dose_frame
will also save g2 results to the 'path'
'''
if path is None:
print( 'Please calculate two time function first by using get_two_time_mulit_uids function.')
else:
taus_uids = {}
g2_uids = {}
for i, uid in enumerate(uids):
print('UID: %s is in processing...'%uid)
if not direct_load_data:
md = get_meta_data( uid )
imgs = load_data( uid, md['detector'], reverse= True )
else:
pass
N = len(imgs)
if exposure_dose is None:
exposure_dose = [N]
g2_path = path + uid + '/'
g12b = np.load( g2_path + 'uid=%s_g12b.npy'%uid)
try:
exp_time = float( md['cam_acquire_time']) #*1000 #from second to ms
except:
exp_time = float( md['exposure time']) #* 1000 #from second to ms
if trans is None:
try:
transi = md['transmission']
except:
transi = [1]
else:
transi = trans[i]
fra_num_by_dose = get_fra_num_by_dose( exp_dose = exposure_dose,
exp_time =exp_time, dead_time = dead_time, att = transi )
print( 'uid: %s--> fra_num_by_dose: %s'%(uid, fra_num_by_dose ) )
taus_uid, g2_uid = get_series_g2_from_g12( g12b, fra_num_by_dose=fra_num_by_dose,
dose_label = exposure_dose,
good_start=good_start, num_bufs=num_bufs,
time_step = md['cam_acquire_period'] )
g2_uids['uid_%03d=%s'%(i,uid)] = g2_uid
taus_uids['uid_%03d=%s'%(i,uid)] = taus_uid
if save_g2:
for k in list( g2_uid.keys()):
#print(k)
uid_ = uid + '_fra_%s_%s'%(good_start, k )
save_g2_general( g2_uid[k], taus=taus_uid[k],qr=np.array( list( qval_dict.values() ) )[:,0],
uid=uid_+'_g2.csv', path= g2_path, return_res=False )
return taus_uids, g2_uids
def plot_dose_g2( taus_uids, g2_uids, qval_dict, qth_interest = None, ylim=[0.95, 1.05], vshift=0.1,
fit_res= None, geometry= 'saxs',filename= 'dose'+'_g2', legend_size=None,
path= None, function= None, g2_labels=None, ylabel= 'g2_dose', append_name= '_dose',
return_fig=False):
'''Plot a does-dependent g2
taus_uids, dict, with format as {uid1: { dose1: tau_1, dose2: tau_2...}, uid2: ...}
g2_uids, dict, with format as {uid1: { dose1: g2_1, dose2: g2_2...}, uid2: ...}
qval_dict: a dict of qvals
vshift: float, vertical shift value of different dose of g2
'''
uids = sorted( list( taus_uids.keys() ) )
#print( uids )
dose = sorted( list( taus_uids[ uids[0] ].keys() ) )
if qth_interest is None:
g2_dict= {}
taus_dict = {}
if g2_labels is None:
g2_labels = []
for i in range( len( dose )):
g2_dict[i + 1] = []
taus_dict[i +1 ] = []
#print ( i )
for j in range( len( uids )):
#print( uids[i] , dose[j])
g2_dict[i +1 ].append( g2_uids[ uids[j] ][ dose[i] ] + vshift*i )
taus_dict[i +1 ].append( taus_uids[ uids[j] ][ dose[i] ] )
if j ==0:
g2_labels.append( 'Dose_%s'%dose[i] )
plot_g2_general( g2_dict, taus_dict,
ylim=[ylim[0], ylim[1] + vshift * len(dose)],
qval_dict = qval_dict, fit_res= None, geometry= geometry,filename= filename,
path= path, function= function, ylabel= ylabel, g2_labels=g2_labels, append_name= append_name )
else:
fig,ax= plt.subplots()
q = qval_dict[qth_interest-1][0]
j = 0
for uid in uids:
#uid = uids[0]
#print( uid )
dose_list = sorted( list(taus_uids['%s'%uid].keys()) )
#print( dose_list )
for i, dose in enumerate(dose_list):
dose = float(dose)
if j ==0:
legend= 'dose_%s'%round(dose,2)
else:
legend = ''
#print( markers[i], colors[i] )
plot1D(x= taus_uids['%s'%uid][dose_list[i]],
y =g2_uids['%s'%uid][dose_list[i]][:,qth_interest] + i*vshift,
logx=True, ax=ax, legend= legend, m = markers[i], c= colors[i],
lw=3, title='%s_Q=%s'%(uid, q) + r'$\AA^{-1}$', legend_size=legend_size )
ylabel='g2--Dose (trans*exptime_sec)'
j +=1
ax.set_ylabel( r"$%s$"%ylabel + '(' + r'$\tau$' + ')' )
ax.set_xlabel(r"$\tau $ $(s)$", fontsize=16)
ax.set_ylim ( ylim )
if return_fig:
return fig, ax
#return taus_dict, g2_dict
def run_xpcs_xsvs_single( uid, run_pargs, md_cor=None, return_res=False,reverse=True, clear_plot=False ):
'''Y.G. Dec 22, 2016
Run XPCS XSVS analysis for a single uid
Parameters:
uid: unique id
run_pargs: dict, control run type and setup parameters, such as q range et.al.
reverse:,True, revserse the image upside down
Return:
save analysis result to csv/png/h5 files
return_res: if true, return a dict, containing g2,g4,g12,contrast et.al. depending on the run type
An example for the run_pargs:
run_pargs= dict(
scat_geometry = 'gi_saxs' #suport 'saxs', 'gi_saxs', 'ang_saxs' (for anisotropics saxs or flow-xpcs)
force_compress = True,#False,
para_compress = True,
run_fit_form = False,
run_waterfall = True,#False,
run_t_ROI_Inten = True,
#run_fit_g2 = True,
fit_g2_func = 'stretched',
run_one_time = True,#False,
run_two_time = True,#False,
run_four_time = False,
run_xsvs=True,
att_pdf_report = True,
show_plot = False,
CYCLE = '2016_3',
mask_path = '/XF11ID/analysis/2016_3/masks/',
mask_name = 'Nov28_4M_SAXS_mask.npy',
good_start = 5,
uniformq = True,
inner_radius= 0.005, #0.005 for 50 nm, 0.006, #for 10nm/coralpor
outer_radius = 0.04, #0.04 for 50 nm, 0.05, #for 10nm/coralpor
num_rings = 12,
gap_ring_number = 6,
number_rings= 1,
#qcenters = [ 0.00235,0.00379,0.00508,0.00636,0.00773, 0.00902] #in A-1
#width = 0.0002
qth_interest = 1, #the intested single qth
use_sqnorm = False,
use_imgsum_norm = True,
pdf_version = '_1' #for pdf report name
)
md_cor: if not None, will update the metadata with md_cor
'''
scat_geometry = run_pargs['scat_geometry']
force_compress = run_pargs['force_compress']
para_compress = run_pargs['para_compress']
run_fit_form = run_pargs['run_fit_form']
run_waterfall = run_pargs['run_waterfall']
run_t_ROI_Inten = run_pargs['run_t_ROI_Inten']
#run_fit_g2 = run_pargs['run_fit_g2'],
fit_g2_func = run_pargs['fit_g2_func']
run_one_time = run_pargs['run_one_time']
run_two_time = run_pargs['run_two_time']
run_four_time = run_pargs['run_four_time']
run_xsvs=run_pargs['run_xsvs']
try:
run_dose = run_pargs['run_dose']
except:
run_dose= False
###############################################################
if scat_geometry =='gi_saxs': #to be done for other types
run_xsvs = False;
###############################################################
###############################################################
if scat_geometry == 'ang_saxs':
run_xsvs= False;run_waterfall=False;run_two_time=False;run_four_time=False;run_t_ROI_Inten=False;
###############################################################
if 'bin_frame' in list( run_pargs.keys() ):
bin_frame = run_pargs['bin_frame']
bin_frame_number= run_pargs['bin_frame_number']
else:
bin_frame = False
if not bin_frame:
bin_frame_number = 1
att_pdf_report = run_pargs['att_pdf_report']
show_plot = run_pargs['show_plot']
CYCLE = run_pargs['CYCLE']
mask_path = run_pargs['mask_path']
mask_name = run_pargs['mask_name']
good_start = run_pargs['good_start']
use_imgsum_norm = run_pargs['use_imgsum_norm']
try:
use_sqnorm = run_pargs['use_sqnorm']
except:
use_sqnorm = False
try:
inc_x0 = run_pargs['inc_x0']
inc_y0 = run_pargs['inc_y0']
except:
inc_x0 = None
inc_y0= None
#for different scattering geogmetry, we only need to change roi_mask
#and qval_dict
qval_dict = run_pargs['qval_dict']
if scat_geometry != 'ang_saxs':
roi_mask = run_pargs['roi_mask']
qind, pixelist = roi.extract_label_indices( roi_mask )
noqs = len(np.unique(qind))
nopr = np.bincount(qind, minlength=(noqs+1))[1:]
else:
roi_mask_p = run_pargs['roi_mask_p']
qval_dict_p = run_pargs['qval_dict_p']
roi_mask_v = run_pargs['roi_mask_v']
qval_dict_v = run_pargs['qval_dict_v']
if scat_geometry == 'gi_saxs':
refl_x0 = run_pargs['refl_x0']
refl_y0 = run_pargs['refl_y0']
Qr, Qz, qr_map, qz_map = run_pargs['Qr'], run_pargs['Qz'], run_pargs['qr_map'], run_pargs['qz_map']
taus=None;g2=None;tausb=None;g2b=None;g12b=None;taus4=None;g4=None;times_xsv=None;contrast_factorL=None;
qth_interest = run_pargs['qth_interest']
pdf_version = run_pargs['pdf_version']
try:
username = run_pargs['username']
except:
username = getpass.getuser()
data_dir0 = os.path.join('/XF11ID/analysis/', CYCLE, username, 'Results/')
os.makedirs(data_dir0, exist_ok=True)
print('Results from this analysis will be stashed in the directory %s' % data_dir0)
#uid = (sys.argv)[1]
print ('*'*40)
print ( '*'*5 + 'The processing uid is: %s'%uid + '*'*5)
print ('*'*40)
suid = uid[:6]
data_dir = os.path.join(data_dir0, '%s/'%suid)
os.makedirs(data_dir, exist_ok=True)
print('Results from this analysis will be stashed in the directory %s' % data_dir)
md = get_meta_data( uid )
uidstr = 'uid=%s'%uid[:6]
imgs = load_data( uid, md['detector'], reverse= reverse )
md.update( imgs.md )
Nimg = len(imgs)
if md_cor is not None:
md.update( md_cor )
if inc_x0 is not None:
md['beam_center_x']= inc_x0
if inc_y0 is not None:
md['beam_center_y']= inc_y0
#print( run_pargs )
#print( run_pargs['inc_x0'],run_pargs['inc_y0'] )
#print( inc_x0, inc_y0 )
center = [ int(md['beam_center_y']),int( md['beam_center_x'] ) ] #beam center [y,x] for python image
pixel_mask = 1- np.int_( np.array( imgs.md['pixel_mask'], dtype= bool) )
print( 'The data are: %s' %imgs )
if False:
print_dict( md, ['suid', 'number of images', 'uid', 'scan_id', 'start_time', 'stop_time', 'sample', 'Measurement',
'acquire period', 'exposure time',
'det_distanc', 'beam_center_x', 'beam_center_y', ] )
## Overwrite Some Metadata if Wrong Input
dpix, lambda_, Ldet, exposuretime, timeperframe, center = check_lost_metadata(
md, Nimg, inc_x0 = inc_x0, inc_y0= inc_y0, pixelsize = 7.5*10*(-5) )
print( 'The beam center is: %s'%center )
timeperframe *= bin_frame_number
setup_pargs=dict(uid=uidstr, dpix= dpix, Ldet=Ldet, lambda_= lambda_, exposuretime=exposuretime,
timeperframe=timeperframe, center=center, path= data_dir)
#print_dict( setup_pargs )
mask = load_mask(mask_path, mask_name, plot_ = False, image_name = uidstr + '_mask', reverse=reverse )
mask *= pixel_mask
if md['detector'] =='eiger4m_single_image':
mask[:,2069] =0 # False #Concluded from the previous results
show_img(mask,image_name = uidstr + '_mask', save=True, path=data_dir)
mask_load=mask.copy()
imgsa = apply_mask( imgs, mask )
img_choice_N = 2
img_samp_index = random.sample( range(len(imgs)), img_choice_N)
avg_img = get_avg_img( imgsa, img_samp_index, plot_ = False, uid =uidstr)
if avg_img.max() == 0:
print('There are no photons recorded for this uid: %s'%uid)
print('The data analysis should be terminated! Please try another uid.')
else:
if scat_geometry !='saxs':
show_img( avg_img, vmin=.1, vmax=np.max(avg_img*.1), logs=True,
image_name= uidstr + '_%s_frames_avg'%img_choice_N, save=True, path=data_dir)
else:
show_saxs_qmap( avg_img, setup_pargs, width=400, show_pixel = False,
vmin=.1, vmax= np.max(avg_img), logs=True, image_name= uidstr + '_%s_frames_avg'%img_choice_N )
compress=True
photon_occ = len( np.where(avg_img)[0] ) / ( imgsa[0].size)
#compress = photon_occ < .4 #if the photon ocupation < 0.5, do compress
print ("The non-zeros photon occupation is %s."%( photon_occ))
print("Will " + 'Always ' + ['NOT', 'DO'][compress] + " apply compress process.")
#good_start = 5 #make the good_start at least 0
t0= time.time()
filename = '/XF11ID/analysis/Compressed_Data' +'/uid_%s.cmp'%md['uid']
mask, avg_img, imgsum, bad_frame_list = compress_eigerdata(imgs, mask, md, filename,
force_compress= force_compress, para_compress= para_compress, bad_pixel_threshold= 1e14,
bins=bin_frame_number, num_sub= 100, num_max_para_process= 500, with_pickle=True )
min_inten = 10
good_start = max(good_start, np.where( np.array(imgsum) > min_inten )[0][0] )
print ('The good_start frame number is: %s '%good_start)
FD = Multifile(filename, good_start, len(imgs))
#FD = Multifile(filename, good_start, 100)
uid_ = uidstr + '_fra_%s_%s'%(FD.beg, FD.end)
print( uid_ )
plot1D( y = imgsum[ np.array( [i for i in np.arange(good_start, len(imgsum)) if i not in bad_frame_list])],
title =uidstr + '_imgsum', xlabel='Frame', ylabel='Total_Intensity', legend='imgsum' )
run_time(t0)
#%system free && sync && echo 3 > /proc/sys/vm/drop_caches && free
## Get bad frame list by a polynominal fit
bad_frame_list = get_bad_frame_list( imgsum, fit=True, plot=True,polyfit_order = 30,
scale= 5.5, good_start = good_start, uid= uidstr, path=data_dir)
print( 'The bad frame list length is: %s'%len(bad_frame_list) )
### Creat new mask by masking the bad pixels and get new avg_img
if False:
mask = mask_exclude_badpixel( bp, mask, md['uid'])
avg_img = get_avg_imgc( FD, sampling = 1, bad_frame_list=bad_frame_list )
show_img( avg_img, vmin=.001, vmax= np.max(avg_img), logs=True, aspect=1, #save_format='tif',
image_name= uidstr + '_img_avg', save=True, path=data_dir, cmap = cmap_albula )
imgsum_y = imgsum[ np.array( [i for i in np.arange( len(imgsum)) if i not in bad_frame_list])]
imgsum_x = np.arange( len( imgsum_y))
save_lists( [imgsum_x, imgsum_y], label=['Frame', 'Total_Intensity'],
filename=uidstr + '_img_sum_t', path= data_dir )
plot1D( y = imgsum_y, title = uidstr + '_img_sum_t', xlabel='Frame',
ylabel='Total_Intensity', legend='imgsum', save=True, path=data_dir)
############for SAXS and ANG_SAXS (Flow_SAXS)
if scat_geometry =='saxs' or scat_geometry =='ang_saxs':
#show_saxs_qmap( avg_img, setup_pargs, width=600, vmin=.1, vmax=np.max(avg_img*.1), logs=True,
# image_name= uidstr + '_img_avg', save=True)
#np.save( data_dir + 'uid=%s--img-avg'%uid, avg_img)
#try:
# hmask = create_hot_pixel_mask( avg_img, threshold = 1000, center=center, center_radius= 600)
#except:
# hmask=1
hmask=1
qp_saxs, iq_saxs, q_saxs = get_circular_average( avg_img, mask * hmask, pargs=setup_pargs, save=True )
plot_circular_average( qp_saxs, iq_saxs, q_saxs, pargs= setup_pargs,
xlim=[q_saxs.min(), q_saxs.max()], ylim = [iq_saxs.min(), iq_saxs.max()] )
#pd = trans_data_to_pd( np.where( hmask !=1),
# label=[md['uid']+'_hmask'+'x', md['uid']+'_hmask'+'y' ], dtype='list')
#pd.to_csv('/XF11ID/analysis/Commissioning/eiger4M_badpixel.csv', mode='a' )
#mask =np.array( mask * hmask, dtype=bool)
#show_img( mask )
if run_fit_form:
form_res = fit_form_factor( q_saxs,iq_saxs, guess_values={'radius': 2500, 'sigma':0.05,
'delta_rho':1E-10 }, fit_range=[0.0001, 0.015], fit_variables={'radius': T, 'sigma':T,
'delta_rho':T}, res_pargs=setup_pargs, xlim=[0.0001, 0.015])
show_ROI_on_image( avg_img, roi_mask, center, label_on = False, rwidth =700, alpha=.9,
save=True, path=data_dir, uid=uidstr, vmin= np.min(avg_img), vmax= np.max(avg_img) )
qr = np.array( [ qval_dict[k][0] for k in list( qval_dict.keys()) ] )
plot_qIq_with_ROI( q_saxs, iq_saxs, qr, logs=True, uid=uidstr, xlim=[q_saxs.min(), q_saxs.max()],
ylim = [iq_saxs.min(), iq_saxs.max()], save=True, path=data_dir)
if scat_geometry != 'ang_saxs':
Nimg = FD.end - FD.beg
time_edge = create_time_slice( N= Nimg, slice_num= 3, slice_width= 1, edges = None )
time_edge = np.array( time_edge ) + good_start
#print( time_edge )
qpt, iqst, qt = get_t_iqc( FD, time_edge, mask, pargs=setup_pargs, nx=1500 )
plot_t_iqc( qt, iqst, time_edge, pargs=setup_pargs, xlim=[qt.min(), qt.max()],
ylim = [iqst.min(), iqst.max()], save=True )
elif scat_geometry == 'gi_waxs':
#roi_mask[badpixel] = 0
qr = np.array( [ qval_dict[k][0] for k in list( qval_dict.keys()) ] )
show_ROI_on_image( avg_img, roi_mask, label_on = True, alpha=.5,save=True, path= data_dir, uid=uidstr)#, vmin=1, vmax=15)
elif scat_geometry == 'gi_saxs':
show_img( avg_img, vmin=.1, vmax=np.max(avg_img*.1),
logs=True, image_name= uidstr + '_img_avg', save=True, path=data_dir)
ticks_ = get_qzr_map( qr_map, qz_map, inc_x0, Nzline=10, Nrline=10 )
ticks = ticks_[:4]
plot_qzr_map( qr_map, qz_map, inc_x0, ticks = ticks_, data= avg_img, uid= uidstr, path = data_dir )
show_qzr_roi( avg_img, roi_mask, inc_x0, ticks, alpha=0.5, save=True, path=data_dir, uid=uidstr )
qr_1d_pds = cal_1d_qr( avg_img, Qr, Qz, qr_map, qz_map, inc_x0, setup_pargs=setup_pargs )
plot_qr_1d_with_ROI( qr_1d_pds, qr_center=np.unique( np.array(list( qval_dict.values() ) )[:,0] ),
loglog=False, save=True, uid=uidstr, path = data_dir)
Nimg = FD.end - FD.beg
time_edge = create_time_slice( N= Nimg, slice_num= 3, slice_width= 1, edges = None )
time_edge = np.array( time_edge ) + good_start
qrt_pds = get_t_qrc( FD, time_edge, Qr, Qz, qr_map, qz_map, path=data_dir, uid = uidstr )
plot_qrt_pds( qrt_pds, time_edge, qz_index = 0, uid = uidstr, path = data_dir )
##############################
##the below works for all the geometries
########################################
if scat_geometry !='ang_saxs':
roi_inten = check_ROI_intensity( avg_img, roi_mask, ring_number= qth_interest, uid =uidstr, save=True, path=data_dir )
if scat_geometry =='saxs' or scat_geometry =='gi_saxs' or scat_geometry =='gi_waxs':
if run_waterfall:
wat = cal_waterfallc( FD, roi_mask,
qindex= qth_interest, save = True, path=data_dir,uid=uidstr)
if run_waterfall:
plot_waterfallc( wat, qindex=qth_interest, aspect=None,
vmax= np.max(wat), uid=uidstr, save =True,
path=data_dir, beg= FD.beg)
ring_avg = None
if run_t_ROI_Inten:
times_roi, mean_int_sets = cal_each_ring_mean_intensityc(FD, roi_mask, timeperframe = None, multi_cor=True )
plot_each_ring_mean_intensityc( times_roi, mean_int_sets, uid = uidstr, save=True, path=data_dir )
roi_avg = np.average( mean_int_sets, axis=0)
uid_ = uidstr + '_fra_%s_%s'%(FD.beg, FD.end)
lag_steps = None
if use_sqnorm:
norm = get_pixelist_interp_iq( qp_saxs, iq_saxs, roi_mask, center)
else:
norm=None
define_good_series = False
if define_good_series:
FD = Multifile(filename, beg = good_start, end = Nimg)
uid_ = uidstr + '_fra_%s_%s'%(FD.beg, FD.end)
print( uid_ )
if 'g2_fit_variables' in list( run_pargs.keys() ):
g2_fit_variables = run_pargs['g2_fit_variables']
else:
g2_fit_variables = {'baseline':True, 'beta':True, 'alpha':False,'relaxation_rate':True}
if 'g2_guess_values' in list( run_pargs.keys() ):
g2_guess_values = run_pargs['g2_guess_values']
else:
g2_guess_values= {'baseline':1.0,'beta':0.05,'alpha':1.0,'relaxation_rate':0.01,}
if 'g2_guess_limits' in list( run_pargs.keys()):
g2_guess_limits = run_pargs['g2_guess_limits']
else:
g2_guess_limits = dict( baseline =[1, 2], alpha=[0, 2], beta = [0, 1], relaxation_rate= [0.001, 5000])
if run_one_time:
if use_imgsum_norm:
imgsum_ = imgsum
else:
imgsum_ = None
if scat_geometry !='ang_saxs':
t0 = time.time()
g2, lag_steps = cal_g2p( FD, roi_mask, bad_frame_list,good_start, num_buf = 8, num_lev= None,
imgsum= imgsum_, norm=norm )
run_time(t0)
taus = lag_steps * timeperframe
g2_pds = save_g2_general( g2, taus=taus,qr=np.array( list( qval_dict.values() ) )[:,0],
uid=uid_+'_g2.csv', path= data_dir, return_res=True )
g2_fit_result, taus_fit, g2_fit = get_g2_fit_general( g2, taus,
function = fit_g2_func, vlim=[0.95, 1.05], fit_range= None,
fit_variables= g2_fit_variables,
guess_values= g2_guess_values,
guess_limits = g2_guess_limits)
g2_fit_paras = save_g2_fit_para_tocsv(g2_fit_result, filename= uid_ +'_g2_fit_paras.csv', path=data_dir )
#if run_one_time:
#plot_g2_general( g2_dict={1:g2}, taus_dict={1:taus},vlim=[0.95, 1.05], qval_dict = qval_dict, fit_res= None,
# geometry='saxs',filename=uid_+'--g2',path= data_dir, ylabel='g2')
plot_g2_general( g2_dict={1:g2, 2:g2_fit}, taus_dict={1:taus, 2:taus_fit},vlim=[0.95, 1.05],
qval_dict = qval_dict, fit_res= g2_fit_result, geometry=scat_geometry,filename=uid_ + '_g2',
path= data_dir, function= fit_g2_func, ylabel='g2', append_name= '_fit')
D0, qrate_fit_res = get_q_rate_fit_general( qval_dict, g2_fit_paras['relaxation_rate'], geometry= scat_geometry )
plot_q_rate_fit_general( qval_dict, g2_fit_paras['relaxation_rate'], qrate_fit_res,
geometry= scat_geometry,uid=uid_ , path= data_dir )
else:
t0 = time.time()
g2_v, lag_steps_v = cal_g2p( FD, roi_mask_v, bad_frame_list,good_start, num_buf = 8, num_lev= None,
imgsum= imgsum_, norm=norm )
g2_p, lag_steps_p = cal_g2p( FD, roi_mask_p, bad_frame_list,good_start, num_buf = 8, num_lev= None,
imgsum= imgsum_, norm=norm )
run_time(t0)
taus_v = lag_steps_v * timeperframe
g2_pds_v = save_g2_general( g2_v, taus=taus_v,qr=np.array( list( qval_dict_v.values() ) )[:,0],
uid=uid_+'_g2v.csv', path= data_dir, return_res=True )
taus_p = lag_steps_p * timeperframe
g2_pds_p = save_g2_general( g2_p, taus=taus_p,qr=np.array( list( qval_dict_p.values() ) )[:,0],
uid=uid_+'_g2p.csv', path= data_dir, return_res=True )
fit_g2_func_v = 'stretched' #for vertical
g2_fit_result_v, taus_fit_v, g2_fit_v = get_g2_fit_general( g2_v, taus_v,
function = fit_g2_func_v, vlim=[0.95, 1.05], fit_range= None,
fit_variables={'baseline':True, 'beta':True, 'alpha':False,'relaxation_rate':True},
guess_values={'baseline':1.0,'beta':0.05,'alpha':1.0,'relaxation_rate':0.01,})
g2_fit_paras_v = save_g2_fit_para_tocsv(g2_fit_result_v, filename= uid_ +'_g2_fit_paras_v.csv', path=data_dir )
fit_g2_func_p ='flow_para' #for parallel
g2_fit_result_p, taus_fit_p, g2_fit_p = get_g2_fit_general( g2_p, taus_p,
function = fit_g2_func_p, vlim=[0.95, 1.05], fit_range= None,
fit_variables={'baseline':True, 'beta':True, 'alpha':False,'relaxation_rate':True,'flow_velocity':True},
guess_values={'baseline':1.0,'beta':0.05,'alpha':1.0,'relaxation_rate':0.01,'flow_velocity':1})
g2_fit_paras_p = save_g2_fit_para_tocsv(g2_fit_result_p, filename= uid_ +'_g2_fit_paras_p.csv', path=data_dir )
plot_g2_general( g2_dict={1:g2_v, 2:g2_fit_v}, taus_dict={1:taus_v, 2:taus_fit_v},vlim=[0.95, 1.05],
qval_dict = qval_dict_v, fit_res= g2_fit_result_v, geometry=scat_geometry,filename= uid_+'_g2_v',
path= data_dir, function= fit_g2_func_v, ylabel='g2_v', append_name= '_fit')
plot_g2_general( g2_dict={1:g2_p, 2:g2_fit_p}, taus_dict={1:taus_p, 2:taus_fit_p},vlim=[0.95, 1.05],
qval_dict = qval_dict_p, fit_res= g2_fit_result_p, geometry=scat_geometry,filename= uid_+'_g2_p',
path= data_dir, function= fit_g2_func_p, ylabel='g2_p', append_name= '_fit')
combine_images( [data_dir + uid_+'_g2_v_fit.png', data_dir + uid_+'_g2_p_fit.png'], data_dir + uid_+'_g2_fit.png', outsize=(2000, 2400) )
D0_v, qrate_fit_res_v = get_q_rate_fit_general( qval_dict_v, g2_fit_paras_v['relaxation_rate'], geometry= scat_geometry )
plot_q_rate_fit_general( qval_dict_v, g2_fit_paras_v['relaxation_rate'], qrate_fit_res_v,
geometry= scat_geometry,uid=uid_ +'_vert' , path= data_dir )
D0_p, qrate_fit_res_p = get_q_rate_fit_general( qval_dict_p, g2_fit_paras_p['relaxation_rate'], geometry= scat_geometry )
plot_q_rate_fit_general( qval_dict_p, g2_fit_paras_p['relaxation_rate'], qrate_fit_res_p,
geometry= scat_geometry,uid=uid_ +'_para' , path= data_dir )
combine_images( [data_dir + uid_+ '_vert_Q_Rate_fit.png', data_dir + uid_+ '_para_Q_Rate_fit.png'], data_dir + uid_+'_Q_Rate_fit.png', outsize=(2000, 2400) )
# For two-time
data_pixel = None
if run_two_time:
data_pixel = Get_Pixel_Arrayc( FD, pixelist, norm=norm ).get_data()
t0=time.time()
g12b = auto_two_Arrayc( data_pixel, roi_mask, index = None )
if run_dose:
np.save( data_dir + 'uid=%s_g12b'%uid, g12b)
if lag_steps is None:
num_bufs=8
noframes = FD.end - FD.beg
num_levels = int(np.log( noframes/(num_bufs-1))/np.log(2) +1) +1
tot_channels, lag_steps, dict_lag = multi_tau_lags(num_levels, num_bufs)
max_taus= lag_steps.max()
lag_steps = lag_steps[ lag_steps < Nimg - good_start ]
run_time( t0 )
show_C12(g12b, q_ind= qth_interest, N1= FD.beg, N2=min( FD.end,5000), vmin= 0.99, vmax=1.3,
timeperframe=timeperframe,save=True, cmap=cmap_albula,
path= data_dir, uid = uid_ )
#print('here')
#show_C12(g12b, q_ind= 3, N1= 5, N2=min(5000,5000), vmin=.8, vmax=1.31, cmap=cmap_albula,
# timeperframe= timeperframe,save=False, path= data_dir, uid = uid_ +'_' + k)
max_taus = Nimg
t0=time.time()
#g2b = get_one_time_from_two_time(g12b)[:max_taus]
g2b = get_one_time_from_two_time(g12b)[lag_steps]
tausb = lag_steps *timeperframe
run_time(t0)
#tausb = np.arange( g2b.shape[0])[:max_taus] *timeperframe
g2b_pds = save_g2_general( g2b, taus=tausb, qr= np.array( list( qval_dict.values() ) )[:,0],
qz=None, uid=uid_ +'_g2b.csv', path= data_dir, return_res=True )
g2_fit_resultb, taus_fitb, g2_fitb = get_g2_fit_general( g2b, tausb,
function = fit_g2_func, vlim=[0.95, 1.05], fit_range= None,
fit_variables=g2_fit_variables, guess_values=g2_guess_values, guess_limits =g2_guess_limits)
g2b_fit_paras = save_g2_fit_para_tocsv(g2_fit_resultb,
filename= uid_ + '_g2b_fit_paras.csv', path=data_dir )
D0b, qrate_fit_resb = get_q_rate_fit_general( qval_dict, g2b_fit_paras['relaxation_rate'],
fit_range=None, geometry= scat_geometry )
#print( qval_dict, g2b_fit_paras['relaxation_rate'], qrate_fit_resb )
plot_q_rate_fit_general( qval_dict, g2b_fit_paras['relaxation_rate'], qrate_fit_resb,
geometry= scat_geometry,uid=uid_ +'_two_time' , path= data_dir )
plot_g2_general( g2_dict={1:g2b, 2:g2_fitb}, taus_dict={1:tausb, 2:taus_fitb},vlim=[0.95, 1.05],
qval_dict=qval_dict, fit_res= g2_fit_resultb, geometry=scat_geometry,filename=uid_+'_g2',
path= data_dir, function= fit_g2_func, ylabel='g2', append_name= '_b_fit')
if run_two_time and run_one_time:
plot_g2_general( g2_dict={1:g2, 2:g2b}, taus_dict={1:taus, 2:tausb},vlim=[0.95, 1.05],
qval_dict=qval_dict, g2_labels=['from_one_time', 'from_two_time'],
geometry=scat_geometry,filename=uid_+'_g2_two_g2', path= data_dir, ylabel='g2', )
# Four Time Correlation
if run_four_time: #have to run one and two first
t0=time.time()
g4 = get_four_time_from_two_time(g12b, g2=g2b)[:max_taus]
run_time(t0)
taus4 = np.arange( g4.shape[0])*timeperframe
g4_pds = save_g2_general( g4, taus=taus4, qr=np.array( list( qval_dict.values() ) )[:,0],
qz=None, uid=uid_ +'_g4.csv', path= data_dir, return_res=True )
plot_g2_general( g2_dict={1:g4}, taus_dict={1:taus4},vlim=[0.95, 1.05], qval_dict=qval_dict, fit_res= None,
geometry=scat_geometry,filename=uid_+'_g4',path= data_dir, ylabel='g4')
if run_dose:
get_two_time_mulit_uids( [uid], roi_mask, norm= norm, bin_frame_number=bin_frame_number,
path= data_dir0, force_generate=False )
N = len(imgs)
try:
tr = md['transmission']
except:
tr = 1
if 'dose_frame' in list(run_pargs.keys()):
dose_frame = run_pargs['dose_frame']
else:
dose_frame = np.int_([ N/8, N/4 ,N/2, 3*N/4, N*0.99 ] )
#N/32, N/16, N/8, N/4 ,N/2, 3*N/4, N*0.99
exposure_dose = tr * exposuretime * dose_frame
taus_uids, g2_uids = get_series_one_time_mulit_uids( [ uid ], qval_dict, good_start=good_start,
path= data_dir0, exposure_dose = exposure_dose, num_bufs =8, save_g2= False,
dead_time = 0, trans = [ tr ] )
plot_dose_g2( taus_uids, g2_uids, ylim=[0.95, 1.2], vshift= 0.00,
qval_dict = qval_dict, fit_res= None, geometry= scat_geometry,
filename= '%s_dose_analysis'%uid_,
path= data_dir, function= None, ylabel='g2_Dose', g2_labels= None, append_name= '' )
# Speckel Visiblity
if run_xsvs:
max_cts = get_max_countc(FD, roi_mask )
qind, pixelist = roi.extract_label_indices( roi_mask )
noqs = len( np.unique(qind) )
nopr = np.bincount(qind, minlength=(noqs+1))[1:]
#time_steps = np.array( utils.geometric_series(2, len(imgs) ) )
time_steps = [0,1] #only run the first two levels
num_times = len(time_steps)
times_xsvs = exposuretime + (2**( np.arange( len(time_steps) ) ) -1 ) *timeperframe
print( 'The max counts are: %s'%max_cts )
### Do historam
if roi_avg is None:
times_roi, mean_int_sets = cal_each_ring_mean_intensityc(FD, roi_mask, timeperframe = None, )
roi_avg = np.average( mean_int_sets, axis=0)
t0=time.time()
spec_bins, spec_his, spec_std = xsvsp( FD, np.int_(roi_mask), norm=None,
max_cts=int(max_cts+2), bad_images=bad_frame_list, only_two_levels=True )
spec_kmean = np.array( [roi_avg * 2**j for j in range( spec_his.shape[0] )] )
run_time(t0)
run_xsvs_all_lags = False
if run_xsvs_all_lags:
times_xsvs = exposuretime + lag_steps * acquisition_period
if data_pixel is None:
data_pixel = Get_Pixel_Arrayc( FD, pixelist, norm=norm ).get_data()
t0=time.time()
spec_bins, spec_his, spec_std, spec_kmean = get_binned_his_std(data_pixel, np.int_(ro_mask), lag_steps )
run_time(t0)
spec_pds = save_bin_his_std( spec_bins, spec_his, spec_std, filename=uid_+'_spec_res.csv', path=data_dir )
ML_val, KL_val,K_ = get_xsvs_fit( spec_his, spec_kmean, spec_std, max_bins=2,varyK= False, )
#print( 'The observed average photon counts are: %s'%np.round(K_mean,4))
#print( 'The fitted average photon counts are: %s'%np.round(K_,4))
print( 'The difference sum of average photon counts between fit and data are: %s'%np.round(
abs(np.sum( spec_kmean[0,:] - K_ )),4))
print( '#'*30)
qth= 10
print( 'The fitted M for Qth= %s are: %s'%(qth, ML_val[qth]) )
print( K_[qth])
print( '#'*30)
plot_xsvs_fit( spec_his, ML_val, KL_val, K_mean = spec_kmean, spec_std=spec_std,
xlim = [0,10], vlim =[.9, 1.1],
uid=uid_, qth= qth_interest, logy= True, times= times_xsvs, q_ring_center=qr, path=data_dir)
plot_xsvs_fit( spec_his, ML_val, KL_val, K_mean = spec_kmean, spec_std = spec_std,
xlim = [0,15], vlim =[.9, 1.1],
uid=uid_, qth= None, logy= True, times= times_xsvs, q_ring_center=qr, path=data_dir )
### Get contrast
contrast_factorL = get_contrast( ML_val)
spec_km_pds = save_KM( spec_kmean, KL_val, ML_val, qs=qr, level_time=times_xsvs, uid=uid_ , path = data_dir )
#print( spec_km_pds )
plot_g2_contrast( contrast_factorL, g2, times_xsvs, taus, qr,
vlim=[0.8,1.2], qth = qth_interest, uid=uid_,path = data_dir, legend_size=14)
plot_g2_contrast( contrast_factorL, g2, times_xsvs, taus, qr,
vlim=[0.8,1.2], qth = None, uid=uid_,path = data_dir, legend_size=4)
md['mask_file']= mask_path + mask_name
md['mask'] = mask
md['NOTEBOOK_FULL_PATH'] = None
md['good_start'] = good_start
md['bad_frame_list'] = bad_frame_list
md['avg_img'] = avg_img
md['roi_mask'] = roi_mask
if scat_geometry == 'gi_saxs':
md['Qr'] = Qr
md['Qz'] = Qz
md['qval_dict'] = qval_dict
md['beam_center_x'] = inc_x0
md['beam_center_y']= inc_y0
md['beam_refl_center_x'] = refl_x0
md['beam_refl_center_y'] = refl_y0
elif scat_geometry == 'saxs' or 'gi_waxs':
md['qr']= qr
#md['qr_edge'] = qr_edge
md['qval_dict'] = qval_dict
md['beam_center_x'] = center[1]
md['beam_center_y']= center[0]
elif scat_geometry == 'ang_saxs':
md['qval_dict_v'] = qval_dict_v
md['qval_dict_p'] = qval_dict_p
md['beam_center_x'] = center[1]
md['beam_center_y']= center[0]
md['beg'] = FD.beg
md['end'] = FD.end
md['metadata_file'] = data_dir + 'md.csv-&-md.pkl'
psave_obj( md, data_dir + 'uid=%s_md'%uid[:6] ) #save the setup parameters
save_dict_csv( md, data_dir + 'uid=%s_md.csv'%uid, 'w')
Exdt = {}
if scat_geometry == 'gi_saxs':
for k,v in zip( ['md', 'roi_mask','qval_dict','avg_img','mask','pixel_mask', 'imgsum', 'bad_frame_list', 'qr_1d_pds'],
[md, roi_mask, qval_dict, avg_img,mask,pixel_mask, imgsum, bad_frame_list, qr_1d_pds] ):
Exdt[ k ] = v
elif scat_geometry == 'saxs':
for k,v in zip( ['md', 'q_saxs', 'iq_saxs','iqst','qt','roi_mask','qval_dict','avg_img','mask','pixel_mask', 'imgsum', 'bad_frame_list'],
[md, q_saxs, iq_saxs, iqst, qt,roi_mask, qval_dict, avg_img,mask,pixel_mask, imgsum, bad_frame_list] ):
Exdt[ k ] = v
elif scat_geometry == 'gi_waxs':
for k,v in zip( ['md', 'roi_mask','qval_dict','avg_img','mask','pixel_mask', 'imgsum', 'bad_frame_list'],
[md, roi_mask, qval_dict, avg_img,mask,pixel_mask, imgsum, bad_frame_list] ):
Exdt[ k ] = v
elif scat_geometry == 'ang_saxs':
for k,v in zip( ['md', 'q_saxs', 'iq_saxs','roi_mask_v','roi_mask_p',
'qval_dict_v','qval_dict_p','avg_img','mask','pixel_mask', 'imgsum', 'bad_frame_list'],
[md, q_saxs, iq_saxs, roi_mask_v,roi_mask_p,
qval_dict_v,qval_dict_p, avg_img,mask,pixel_mask, imgsum, bad_frame_list] ):
Exdt[ k ] = v
if run_waterfall:Exdt['wat'] = wat
if run_t_ROI_Inten:Exdt['times_roi'] = times_roi;Exdt['mean_int_sets']=mean_int_sets
if run_one_time:
if scat_geometry != 'ang_saxs':
for k,v in zip( ['taus','g2','g2_fit_paras'], [taus,g2,g2_fit_paras] ):Exdt[ k ] = v
else:
for k,v in zip( ['taus_v','g2_v','g2_fit_paras_v'], [taus_v,g2_v,g2_fit_paras_v] ):Exdt[ k ] = v
for k,v in zip( ['taus_p','g2_p','g2_fit_paras_p'], [taus_p,g2_p,g2_fit_paras_p] ):Exdt[ k ] = v
if run_two_time:
for k,v in zip( ['tausb','g2b','g2b_fit_paras', 'g12b'], [tausb,g2b,g2b_fit_paras,g12b] ):Exdt[ k ] = v
if run_four_time:
for k,v in zip( ['taus4','g4'], [taus4,g4] ):Exdt[ k ] = v
if run_xsvs:
for k,v in zip( ['spec_kmean','spec_pds','times_xsvs','spec_km_pds','contrast_factorL'],
[ spec_kmean,spec_pds,times_xsvs,spec_km_pds,contrast_factorL] ):Exdt[ k ] = v
export_xpcs_results_to_h5( 'uid=%s_Res.h5'%md['uid'], data_dir, export_dict = Exdt )
#extract_dict = extract_xpcs_results_from_h5( filename = 'uid=%s_Res.h5'%md['uid'], import_dir = data_dir )
# Creat PDF Report
pdf_out_dir = os.path.join('/XF11ID/analysis/', CYCLE, username, 'Results/')
pdf_filename = "XPCS_Analysis_Report_for_uid=%s%s.pdf"%(uid,pdf_version)
if run_xsvs:
pdf_filename = "XPCS_XSVS_Analysis_Report_for_uid=%s%s.pdf"%(uid,pdf_version)
#pdf_filename
print( data_dir, uid[:6], pdf_out_dir, pdf_filename, username )
make_pdf_report( data_dir, uid[:6], pdf_out_dir, pdf_filename, username,
run_fit_form, run_one_time, run_two_time, run_four_time, run_xsvs, run_dose=run_dose,
report_type= scat_geometry
)
## Attach the PDF report to Olog
if att_pdf_report:
os.environ['HTTPS_PROXY'] = 'https://proxy:8888'
os.environ['no_proxy'] = 'cs.nsls2.local,localhost,127.0.0.1'
pname = pdf_out_dir + pdf_filename
atch=[ Attachment(open(pname, 'rb')) ]
try:
update_olog_uid( uid= md['uid'], text='Add XPCS Analysis PDF Report', attachments= atch )
except:
print("I can't attach this PDF: %s due to a duplicated filename. Please give a different PDF file."%pname)
if show_plot:
plt.show()
#else:
# plt.close('all')
if clear_plot:
plt.close('all')
if return_res:
res = {}
if scat_geometry == 'saxs':
for k,v in zip( ['md', 'q_saxs', 'iq_saxs','iqst','qt','avg_img','mask', 'imgsum','bad_frame_list','roi_mask', 'qval_dict'],
[ md, q_saxs, iq_saxs, iqst, qt, avg_img,mask,imgsum,bad_frame_list,roi_mask, qval_dict ] ):
res[ k ] = v
elif scat_geometry == 'ang_saxs':
for k,v in zip( [ 'md', 'q_saxs', 'iq_saxs','roi_mask_v','roi_mask_p',
'qval_dict_v','qval_dict_p','avg_img','mask','pixel_mask', 'imgsum', 'bad_frame_list'],
[ md, q_saxs, iq_saxs, roi_mask_v,roi_mask_p,
qval_dict_v,qval_dict_p, avg_img,mask,pixel_mask, imgsum, bad_frame_list] ):
res[ k ] = v
elif scat_geometry == 'gi_saxs':
for k,v in zip( ['md', 'roi_mask','qval_dict','avg_img','mask','pixel_mask', 'imgsum', 'bad_frame_list', 'qr_1d_pds'],
[md, roi_mask, qval_dict, avg_img,mask,pixel_mask, imgsum, bad_frame_list, qr_1d_pds] ):
res[ k ] = v
elif scat_geometry == 'gi_waxs':
for k,v in zip( ['md', 'roi_mask','qval_dict','avg_img','mask','pixel_mask', 'imgsum', 'bad_frame_list'],
[md, roi_mask, qval_dict, avg_img,mask,pixel_mask, imgsum, bad_frame_list] ):
res[ k ] = v
if run_waterfall:
res['wat'] = wat
if run_t_ROI_Inten:
res['times_roi'] = times_roi;
res['mean_int_sets']=mean_int_sets
if run_one_time:
if scat_geometry != 'ang_saxs':
res['g2'] = g2
res['taus']=taus
else:
res['g2_p'] = g2_p
res['taus_p']=taus_p
res['g2_v'] = g2_v
res['taus_v']=taus_v
if run_two_time:
res['tausb'] = tausb
res['g12b'] = g12b
res['g2b'] = g2b
if run_four_time:
res['g4']= g4
res['taus4']=taus4
if run_xsvs:
res['spec_kmean']=spec_kmean
res['spec_pds']= spec_pds
res['contrast_factorL'] = contrast_factorL
res['times_xsvs']= times_xsvs
return res
#uid = '3ff4ee'
#run_xpcs_xsvs_single( uid, run_pargs )
<file_sep>from chxanalys.chx_libs import *
#from tqdm import *
from chxanalys.chx_libs import ( colors, markers )
from scipy.special import erf
from skimage.filters import prewitt
from skimage.draw import line_aa, line, polygon, ellipse, circle
from modest_image import ModestImage, imshow
import matplotlib.cm as mcm
import copy, scipy
import PIL
markers = ['o', 'D', 'v', '^', '<', '>', 'p', 's', 'H',
'h', '*', 'd',
'8', '1', '3', '2', '4', '+', 'x', '_', '|', ',', '1',]
markers = np.array( markers *100 )
def shrink_image(img, bins ):
'''YG Dec 12, 2017 dev@CHX shrink a two-d image by factor as bins, i.e., bins_x, bins_y
input:
img: 2d array,
bins: integer list, eg. [2,2]
output:
imgb: binned img
'''
m,n = img.shape
bx, by = bins
Nx, Ny = m//bx, n//by
#print(Nx*bx, Ny*by)
return img[:Nx*bx, :Ny*by].reshape( Nx,bx, Ny, by).mean(axis=(1,3) )
def get_diff_fv( g2_fit_paras, qval_dict, ang_init=137.2):
'''YG@CHX Nov 9,2017
Get flow velocity and diff from g2_fit_paras '''
g2_fit_para_ = g2_fit_paras.copy()
qr = np.array( [qval_dict[k][0] for k in sorted( qval_dict.keys())] )
qang = np.array( [qval_dict[k][1] for k in sorted( qval_dict.keys())] )
#x=g2_fit_para_.pop( 'relaxation_rate' )
#x=g2_fit_para_.pop( 'flow_velocity' )
g2_fit_para_['diff'] = g2_fit_paras[ 'relaxation_rate' ]/qr**2
cos_part = np.abs( np.cos( np.radians( qang - ang_init)) )
g2_fit_para_['fv'] = g2_fit_paras[ 'flow_velocity' ]/cos_part/qr
return g2_fit_para_
# function to get indices of local extrema (=indices of speckle echo maximum amplitudes):
def get_echos(dat_arr,min_distance=10):
"""
getting local maxima and minima from 1D data -> e.g. speckle echos
strategy: using peak_local_max (from skimage) with min_distance parameter to find well defined local maxima
using np.argmin to find absolute minima between relative maxima
returns [max_ind,min_ind] -> lists of indices corresponding to local maxima/minima
by LW 10/23/2018
"""
from skimage.feature import peak_local_max
max_ind=peak_local_max(dat_arr, min_distance) # !!! careful, skimage function reverses the order (wtf?)
min_ind=[]
for i in range(len(max_ind[:-1])):
min_ind.append(max_ind[i+1][0]+np.argmin(dat_arr[max_ind[i+1][0]:max_ind[i][0]]))
#unfortunately, skimage function fu$$s up the format: max_ind is an array of a list of lists...fix this:
mmax_ind=[]
for l in max_ind:
mmax_ind.append(l[0])
#return [mmax_ind,min_ind]
return [list(reversed(mmax_ind)),list(reversed(min_ind))]
def pad_length(arr,pad_val=np.nan):
"""
arr: 2D matrix
pad_val: values being padded
adds pad_val to each row, to make the length of each row equal to the lenght of the longest row of the original matrix
-> used to convert python generic data object to HDF5 native format
function fixes python bug in padding (np.pad) integer array with np.nan
by LW 12/30/2017
"""
max_len=[]
for i in range(np.shape(arr)[0]):
#print(np.size(arr[i]))
max_len.append([np.size(arr[i])])
#print(max_len)
max_len=np.max(max_len)
for l in range(np.shape(arr)[0]):
arr[l]=np.pad(arr[l]*1.,(0,max_len-np.size(arr[l])),mode='constant',constant_values=pad_val)
return arr
def save_array_to_tiff(array, output, verbose=True):
'''Y.G. Nov 1, 2017
Save array to a tif file
'''
img = PIL.Image.fromarray(array)
img.save( output )
if verbose:
print( 'The data is save to: %s.'%( output ))
def load_pilatus(filename):
'''Y.G. Nov 1, 2017
Load a pilatus 2D image
'''
return np.array( PIL.Image.open(filename).convert('I') )
def ls_dir(inDir, string=None):
'''Y.G. Nov 1, 2017
List all filenames in a filefolder (not include hidden files and subfolders)
inDir: fullpath of the inDir
string: if not None, only retrun filename containing the string
'''
from os import listdir
from os.path import isfile, join
if string is None:
tifs = np.array( [f for f in listdir(inDir) if isfile(join(inDir, f))] )
else:
tifs = np.array( [f for f in listdir(inDir) if (isfile(join(inDir, f)))&(string in f) ] )
return tifs
def re_filename( old_filename, new_filename, inDir=None, verbose=True ):
'''Y.G. Nov 28, 2017
Rename old_filename with new_filename in a inDir
inDir: fullpath of the inDir, if None, the filename should have the fullpath
old_filename/ new_filename: string
an example:
re_filename( 'uid=run20_pos1_fra_5_20000_tbins=0.010_ms_g2_two_g2.png',
'uid=run17_pos1_fra_5_20000_tbins=0.010_ms_g2_two_g2.png',
'/home/yuzhang/Analysis/Timepix/2017_3/Results/run17/run17_pos1/'
)
'''
if inDir is not None:
os.rename(inDir + old_filename, inDir+new_filename)
else:
os.rename( old_filename, new_filename)
print('The file: %s is changed to: %s.'%(old_filename, new_filename))
def re_filename_dir( old_pattern, new_pattern, inDir,verbose=True ):
'''Y.G. Nov 28, 2017
Rename all filenames with old_pattern with new_pattern in a inDir
inDir: fullpath of the inDir, if None, the filename should have the fullpath
old_pattern, new_pattern
an example,
re_filename_dir('20_', '17_', inDir )
'''
fps = ls_dir(inDir)
for fp in fps:
if old_pattern in fp:
old_filename = fp
new_filename = fp.replace(old_pattern, new_pattern)
re_filename( old_filename, new_filename, inDir,verbose= verbose )
def get_roi_nr(qdict,q,phi,q_nr=True,phi_nr=False, silent=True):
"""
function to return roi number from qval_dict, corresponding Q and phi, lists (sets) of all available Qs and phis
[roi_nr,Q,phi,Q_list,phi_list]=get_roi_nr(..)
calling sequence: get_roi_nr(qdict,q,phi,q_nr=True,phi_nr=False, verbose=True)
qdict: qval_dict from analysis pipeline/hdf5 result file
q: q of interest, can be either value (q_nr=False) or q-number (q_nr=True)
phi: phi of interest, can be either value (phi_nr=False) or q-number (phi_nr=True)
silent=True/False: Don't/Do print lists of available qs and phis, q and phi of interest
by LW 20/21/2017
"""
qs=[]
phis=[]
for i in qdict.keys():
qs.append(qdict[i][0])
phis.append(qdict[i][1])
from collections import OrderedDict
qslist=list(OrderedDict.fromkeys(qs))
phislist=list(OrderedDict.fromkeys(phis))
if q_nr:
qinterest=qslist[q]
else: qinterest=q
if phi_nr:
phiinterest=phislist[phi]
else: phiinterest=phi
qindices = [i for i,x in enumerate(qs) if x == qinterest]
phiindices = [i for i,x in enumerate(phis) if x == phiinterest]
ret_list=[list(set(qindices).intersection(phiindices))[0],qinterest,phiinterest,qslist,phislist]
if silent == False:
print('list of available Qs:')
print(qslist)
print('list of available phis:')
print(phislist)
print('Roi number for Q= '+str(ret_list[1])+' and phi= '+str(ret_list[2])+': '+str(ret_list[0]))
return ret_list
def get_fit_by_two_linear(x,y, mid_xpoint1, mid_xpoint2=None, xrange=None, ):
'''YG Octo 16,2017 Fit a curve with two linear func, the curve is splitted by mid_xpoint,
namely, fit the curve in two regions defined by (xmin,mid_xpoint ) and (mid_xpoint2, xmax)
Input:
x: 1D np.array
y: 1D np.array
mid_xpoint: float, the middle point of x
xrange: [x1,x2]
Return:
D1, gmfit1, D2, gmfit2 :
fit parameter (slope, background) of linear fit1
convinent fit class, gmfit1(x) gives yvale
fit parameter (slope, background) of linear fit2
convinent fit class, gmfit2(x) gives yvale
'''
if xrange is None:
x1,x2 = min(x), max(x)
x1,x2=xrange
if mid_xpoint2 is None:
mid_xpoint2= mid_xpoint1
D1, gmfit1 = linear_fit( x,y, xrange= [ x1,mid_xpoint1 ])
D2, gmfit2 = linear_fit( x,y, xrange= [mid_xpoint2, x2 ])
return D1, gmfit1, D2, gmfit2
def get_cross_point( x, gmfit1, gmfit2 ):
'''YG Octo 16,2017
Get croess point of two curve
'''
y1 = gmfit1(x)
y2 = gmfit2(x)
return x[np.argmin( np.abs(y1-y2) )]
def get_curve_turning_points( x, y, mid_xpoint1, mid_xpoint2=None, xrange=None, ):
'''YG Octo 16,2017
Get a turning point of a curve by doing a two-linear fit
'''
D1, gmfit1, D2, gmfit2 = get_fit_by_two_linear(x,y, mid_xpoint1, mid_xpoint2, xrange )
return get_cross_point( x, gmfit1, gmfit2 )
def plot_fit_two_linear_fit(x,y, gmfit1, gmfit2, ax=None ):
'''YG Octo 16,2017 Plot data with two fitted linear func
'''
if ax is None:
fig, ax =plt.subplots()
plot1D( x = x, y = y, ax =ax, c='k', legend='data', m='o', ls='')#logx=True, logy=True )
plot1D( x = x, y = gmfit1(x), ax =ax, c='r', m='', ls='-',legend='fit1' )
plot1D( x = x, y = gmfit2(x), ax =ax, c='b', m='', ls='-',legend='fit2' )
return ax
def linear_fit( x,y, xrange=None):
'''YG Octo 16,2017 copied from XPCS_SAXS
a linear fit
'''
if xrange is not None:
xmin, xmax = xrange
x1,x2 = find_index( x,xmin,tolerance= None),find_index( x,xmax,tolerance= None)
x_ = x[x1:x2]
y_ = y[x1:x2]
else:
x_=x
y_=y
D0 = np.polyfit(x_, y_, 1)
gmfit = np.poly1d(D0)
return D0, gmfit
def find_index( x,x0,tolerance= None):
'''YG Octo 16,2017 copied from SAXS
find index of x0 in x
#find the position of P in a list (plist) with tolerance
'''
N=len(x)
i=0
if x0 > max(x):
position= len(x) -1
elif x0<min(x):
position=0
else:
position = np.argmin( np.abs( x - x0 ) )
return position
def find_index_old( x,x0,tolerance= None):
'''YG Octo 16,2017 copied from SAXS
find index of x0 in x
#find the position of P in a list (plist) with tolerance
'''
N=len(x)
i=0
position=None
if tolerance==None:
tolerance = (x[1]-x[0])/2.
if x0 > max(x):
position= len(x) -1
elif x0<min(x):
position=0
else:
for item in x:
if abs(item-x0)<=tolerance:
position=i
#print 'Found Index!!!'
break
i+=1
return position
def sgolay2d( z, window_size, order, derivative=None):
"""YG Octo 16, 2017
Modified from http://scipy-cookbook.readthedocs.io/items/SavitzkyGolay.html
Procedure for sg2D:
https://en.wikipedia.org/wiki/Savitzky%E2%80%93Golay_filter#Two-dimensional_convolution_coefficients
Two-dimensional smoothing and differentiation can also be applied to tables of data values, such as intensity values in a photographic image which is composed of a rectangular grid of pixels.[16] [17] The trick is to transform part of the table into a row by a simple ordering of the indices of the pixels. Whereas the one-dimensional filter coefficients are found by fitting a polynomial in the subsidiary variable, z to a set of m data points, the two-dimensional coefficients are found by fitting a polynomial in subsidiary variables v and w to a set of m × m data points. The following example, for a bicubic polynomial and m = 5, illustrates the process, which parallels the process for the one dimensional case, above.[18]
enter image description here enter image description here
The square of 25 data values, d1 − d25
enter image description here
becomes a vector when the rows are placed one after another.
enter image description here
The Jacobian has 10 columns, one for each of the parameters a00 − a03 and 25 rows, one for each pair of v and w values. Each row has the form
enter image description here
The convolution coefficients are calculated as
enter image description here
The first row of C contains 25 convolution coefficients which can be multiplied with the 25 data values to provide a smoothed value for the central data point (13) of the 25.
"""
# number of terms in the polynomial expression
n_terms = ( order + 1 ) * ( order + 2) / 2.0
if window_size % 2 == 0:
raise ValueError('window_size must be odd')
if window_size**2 < n_terms:
raise ValueError('order is too high for the window size')
half_size = window_size // 2
# exponents of the polynomial.
# p(x,y) = a0 + a1*x + a2*y + a3*x^2 + a4*y^2 + a5*x*y + ...
# this line gives a list of two item tuple. Each tuple contains
# the exponents of the k-th term. First element of tuple is for x
# second element for y.
# Ex. exps = [(0,0), (1,0), (0,1), (2,0), (1,1), (0,2), ...]
exps = [ (k-n, n) for k in range(order+1) for n in range(k+1) ]
# coordinates of points
ind = np.arange(-half_size, half_size+1, dtype=np.float64)
dx = np.repeat( ind, window_size )
dy = np.tile( ind, [window_size, 1]).reshape(window_size**2, )
# build matrix of system of equation
A = np.empty( (window_size**2, len(exps)) )
for i, exp in enumerate( exps ):
A[:,i] = (dx**exp[0]) * (dy**exp[1])
# pad input array with appropriate values at the four borders
new_shape = z.shape[0] + 2*half_size, z.shape[1] + 2*half_size
Z = np.zeros( (new_shape) )
# top band
band = z[0, :]
Z[:half_size, half_size:-half_size] = band - np.abs( np.flipud( z[1:half_size+1, :] ) - band )
# bottom band
band = z[-1, :]
Z[-half_size:, half_size:-half_size] = band + np.abs( np.flipud( z[-half_size-1:-1, :] ) -band )
# left band
band = np.tile( z[:,0].reshape(-1,1), [1,half_size])
Z[half_size:-half_size, :half_size] = band - np.abs( np.fliplr( z[:, 1:half_size+1] ) - band )
# right band
band = np.tile( z[:,-1].reshape(-1,1), [1,half_size] )
Z[half_size:-half_size, -half_size:] = band + np.abs( np.fliplr( z[:, -half_size-1:-1] ) - band )
# central band
Z[half_size:-half_size, half_size:-half_size] = z
# top left corner
band = z[0,0]
Z[:half_size,:half_size] = band - np.abs( np.flipud(np.fliplr(z[1:half_size+1,1:half_size+1]) ) - band )
# bottom right corner
band = z[-1,-1]
Z[-half_size:,-half_size:] = band + np.abs( np.flipud(np.fliplr(z[-half_size-1:-1,-half_size-1:-1]) ) - band )
# top right corner
band = Z[half_size,-half_size:]
Z[:half_size,-half_size:] = band - np.abs( np.flipud(Z[half_size+1:2*half_size+1,-half_size:]) - band )
# bottom left corner
band = Z[-half_size:,half_size].reshape(-1,1)
Z[-half_size:,:half_size] = band - np.abs( np.fliplr(Z[-half_size:, half_size+1:2*half_size+1]) - band )
# solve system and convolve
if derivative == None:
m = np.linalg.pinv(A)[0].reshape((window_size, -1))
return scipy.signal.fftconvolve(Z, m, mode='valid')
elif derivative == 'col':
c = np.linalg.pinv(A)[1].reshape((window_size, -1))
return scipy.signal.fftconvolve(Z, -c, mode='valid')
elif derivative == 'row':
r = np.linalg.pinv(A)[2].reshape((window_size, -1))
return scipy.signal.fftconvolve(Z, -r, mode='valid')
elif derivative == 'both':
c = np.linalg.pinv(A)[1].reshape((window_size, -1))
r = np.linalg.pinv(A)[2].reshape((window_size, -1))
return scipy.signal.fftconvolve(Z, -r, mode='valid'), scipy.signal.fftconvolve(Z, -c, mode='valid')
def extract_data_from_file( filename, filepath, good_line_pattern, good_cols=None, labels=None,):
'''YG Develop Octo 17, 2018
Extract data from a file
Input:
filename: str, filename of the data
filepath: str, path of the data
good_line_pattern: str, data will be extract below this good_line_pattern
good_cols: list of integer, good index of cols
lables: the label of the good_cols
#save: False, if True will save the data into a csv file with filename appending csv ??
Return:
a pds.dataframe
Example:
filepath = '/XF11ID/analysis/2017_3/lwiegart/Link_files/Exports/'
filename = 'ANPES2 15-10-17 16-31-11-84Exported.txt'
good_cols = [ 1,2,4,6,8,10 ]
labels = [ 'time', 'temperature', 'force', 'distance', 'stress', 'strain' ]
good_line_pattern = "Index\tX\tY\tX\tY\tX\tY"
df = extract_data_from_file( filename, filepath, good_line_pattern, good_cols, labels)
'''
import pandas as pds
with open( filepath + filename, 'r' ) as fin:
p=fin.readlines()
di = 1e20
for i, line in enumerate(p):
if good_line_pattern in line:
di = i
if i == di+1:
els = line.split()
if good_cols is None:
data = np.array( els, dtype=float )
else:
data = np.array( [els[j] for j in good_cols], dtype=float )
elif i > di:
try:
els = line.split()
if good_cols is None:
temp = np.array( els, dtype=float )
else:
temp= np.array( [els[j] for j in good_cols], dtype=float )
data=np.vstack( (data,temp))
except:
pass
if labels is None:
labels = np.arange(data.shape[1])
df = pds.DataFrame( data, index= np.arange(data.shape[0]), columns= labels )
return df
def get_print_uids( start_time, stop_time):
'''YG. Octo 3, 2017@CHX
Get full uids and print uid plus Measurement contents by giving start_time, stop_time
'''
hdrs = list( db(start_time= start_time, stop_time = stop_time) )
uids = np.zeros( len(hdrs),dtype=object)
sids = np.zeros( len(hdrs), dtype=object)
n=0
for i in range(len(hdrs)):
uid = hdrs[i]['start']['uid'][:6]
sid = hdrs[i]['start']['scan_id']
uids[n]=uid
sids[n]=sid
n +=1
try:
m = hdrs[i]['start']['Measurement']
except:
m=''
print( " uid = '%s' #(scan num: %s (Measurement: %s "%(uid,sid,m) )
return uids, sids
def get_last_uids( n=-1 ):
'''YG Sep 26, 2017
A Convinient function to copy uid to jupyter for analysis'''
uid = db[n]['start']['uid'][:6]
sid = db[n]['start']['scan_id']
m = db[n]['start']['Measurement']
return " uid = '%s' #(scan num: %s (Measurement: %s "%(uid,sid,m)
def get_base_all_filenames( inDir, base_filename_cut_length = -7 ):
'''YG Sep 26, 2017
Get base filenames and their related all filenames
Input:
inDir, str, input data dir
base_filename_cut_length: to which length the base name is unique
Output:
dict: keys, base filename
vales, all realted filename
'''
from os import listdir
from os.path import isfile, join
tifs = np.array( [f for f in listdir(inDir) if isfile(join(inDir, f))] )
tifsc = list(tifs.copy())
utifs = np.sort( np.unique( np.array([ f[:base_filename_cut_length] for f in tifs] ) ) )[::-1]
files = {}
for uf in utifs:
files[uf] = []
i = 0
reName = []
for i in range(len(tifsc)):
if uf in tifsc[i]:
files[uf].append( tifsc[i] )
reName.append(tifsc[i])
for fn in reName:
tifsc.remove(fn)
return files
def create_ring_mask( shape, r1, r2, center, mask=None):
'''YG. Sep 20, 2017 Develop@CHX
Create 2D ring mask
input:
shape: two integer number list, mask shape, e.g., [100,100]
r1: the inner radius
r2: the outer radius
center: two integer number list, [cx,cy], ring center, e.g., [30,50]
output:
2D numpy array, 0,1 type
'''
m = np.zeros( shape, dtype= bool)
rr,cc = circle( center[1], center[0], r2, shape=shape )
m[rr,cc] = 1
rr,cc = circle( center[1], center[0], r1,shape=shape )
m[rr,cc] = 0
if mask is not None:
m += mask
return m
def get_image_edge(img):
'''
Y.G. Developed at Sep 8, 2017 @CHX
Get sharp edges of an image
img: two-D array, e.g., a roi mask
'''
edg_ = prewitt(img/1.0)
edg = np.zeros_like(edg_)
w = np.where(edg_ > 1e-10)
edg[w] = img[w]
edg[np.where(edg==0)] = 1
return edg
def get_image_with_roi( img, roi_mask, scale_factor = 2):
'''
Y.G. Developed at Sep 8, 2017 @CHX
Get image with edges of roi_mask by doing
i) get edges of roi_mask by function get_image_edge
ii) scale img at region of interest (ROI) by scale_factor
img: two-D array for image
roi_mask: two-D array for ROI
scale_factor: scaling factor of ROI in image
'''
edg = get_image_edge( roi_mask )
img_ = img.copy()
w = np.where(roi_mask)
img_[w] = img[w] * scale_factor
return img_ * edg
def get_today_date( ):
from time import gmtime, strftime
return strftime("%m-%d-%Y", gmtime() )
def move_beamstop( mask, xshift, yshift ):
'''Y.G. Developed at July 18, 2017 @CHX
Create new mask by shift the old one with xshift, yshift
Input
---
mask: 2D numpy array, 0 for bad pixels, 1 for good pixels
xshift, integer, shift value along x direction
yshift, integer, shift value along y direction
Output
---
mask, 2D numpy array,
'''
m = np.ones_like(mask)
W,H = mask.shape
w = np.where(mask==0)
nx, ny = w[0]+ int(yshift), w[1]+ int(xshift )
gw = np.where( (nx >= 0) & (nx<W) & (ny >= 0) & (ny<H) )
nx = nx[ gw ]
ny = ny[ gw ]
m[ nx,ny ] = 0
return m
def validate_uid(uid):
'''check uid whether be able to load data'''
try:
sud = get_sid_filenames(db[uid])
print(sud)
md = get_meta_data( uid )
imgs = load_data( uid, md['detector'], reverse= True )
print(imgs)
return 1
except:
print("Can't load this uid=%s!"%uid)
return 0
def validate_uid_dict( uid_dict ):
''' Y.G. developed July 17, 2017 @CHX
Check each uid in a dict can load data or not
uids: dict, val: meaningful decription, key: a list of uids
'''
badn = 0
badlist=[]
for k in list(uids.keys()):
for uid in uids[k]:
flag = validate_uid(uid)
if not flag:
badn += 1
badlist.append( uid )
print( 'There are %s bad uids:%s in this uid_dict.'%(badn, badlist))
def get_mass_center_one_roi(FD, roi_mask, roi_ind):
'''Get the mass center (in pixel unit) of one roi in a time series FD
FD: handler for a compressed time series
roi_mask: the roi array
roi_ind: the interest index of the roi
'''
import scipy
m = (roi_mask == roi_ind)
cx, cy = np.zeros( int( ( FD.end - FD.beg)/1 ) ), np.zeros( int( ( FD.end - FD.beg)/1 ) )
n =0
for i in tqdm(range( FD.beg, FD.end, 1 ), desc= 'Get mass center of one ROI of each frame' ):
img = FD.rdframe(i) * m
c = scipy.ndimage.measurements.center_of_mass(img)
cx[n], cy[n] = int(c[0]), int(c[1])
n +=1
return cx,cy
def get_current_pipeline_filename(NOTEBOOK_FULL_PATH):
'''Y.G. April 25, 2017
Get the current running pipeline filename and path
Assume the piple is located in /XF11ID/
Return, path and filename
'''
from IPython.core.magics.display import Javascript
if False:
Javascript( '''
var nb = IPython.notebook;
var kernel = IPython.notebook.kernel;
var command = "NOTEBOOK_FULL_PATH = '" + nb.base_url + nb.notebook_path + "'";
kernel.execute(command);
''' )
print(NOTEBOOK_FULL_PATH)
filename = NOTEBOOK_FULL_PATH.split('/')[-1]
path = '/XF11ID/'
for s in NOTEBOOK_FULL_PATH.split('/')[3:-1]:
path += s + '/'
return path, filename
def get_current_pipeline_fullpath(NOTEBOOK_FULL_PATH):
'''Y.G. April 25, 2017
Get the current running pipeline full filepath
Assume the piple is located in /XF11ID/
Return, the fullpath (path + filename)
'''
p,f = get_current_pipeline_filename(NOTEBOOK_FULL_PATH)
return p + f
def save_current_pipeline(NOTEBOOK_FULL_PATH, outDir):
'''Y.G. April 25, 2017
Save the current running pipeline to outDir
The save pipeline should be the snapshot of the current state.
'''
import shutil
path, fp = get_current_pipeline_filename(NOTEBOOK_FULL_PATH)
shutil.copyfile( path + fp, outDir + fp )
print('This pipeline: %s is saved in %s.'%(fp, outDir))
def plot_g1( taus, g2, g2_fit_paras, qr=None, ylim=[0,1], title=''):
'''Dev Apr 19, 2017,
Plot one-time correlation, giving taus, g2, g2_fit'''
noqs = g2.shape[1]
fig,ax=plt.subplots()
if qr is None:
qr = np.arange(noqs)
for i in range(noqs):
b = g2_fit_paras['baseline'][i]
beta = g2_fit_paras['beta'][i]
y= np.sqrt( np.abs(g2[1:,i] - b)/beta )
plot1D( x = taus[1:], y= y, ax=ax, legend= 'q=%s'%qr[i], ls='-', lw=2,
m=markers[i], c= colors[i], title=title, ylim=ylim,
logx=True, legend_size= 8 )
ax.set_ylabel( r"$g_1$" + '(' + r'$\tau$' + ')' )
ax.set_xlabel(r"$\tau $ $(s)$", fontsize=16)
return ax
def filter_roi_mask( filter_dict, roi_mask, avg_img, filter_type= 'ylim' ):
'''Remove bad pixels in roi_mask. The bad pixel is defined by the filter_dict,
if filter_type ='ylim', the filter_dict wit key as q and each value gives a high and low limit thresholds. The value of the pixels in avg_img above or below the limit are considered as bad pixels.
if filter_type='badpix': the filter_dict wit key as q and each value gives a list of bad pixel.
avg_img, the averaged image
roi_mask: two-d array, the same shape as image, the roi mask, value is integer, e.g., 1 ,2 ,...
filter_dict: keys, as roi_mask integer, value, by default is [None,None], is the limit,
example, {2:[4,5], 10:[0.1,1.1]}
NOTE: first q = 1 (not 0)
'''
rm = roi_mask.copy()
rf = np.ravel(rm)
for k in list(filter_dict.keys()):
pixel = roi.roi_pixel_values(avg_img, roi_mask, [k] )[0][0]
#print( np.max(pixel), np.min(pixel) )
if filter_type == 'ylim':
xmin,xmax = filter_dict[k]
badp =np.where( (pixel>= xmax) | ( pixel <= xmin) )[0]
else:
badp = filter_dict[k]
if len(badp)!=0:
pls = np.where([rf==k])[1]
rf[ pls[badp] ] = 0
return rm
##
#Dev at March 31 for create Eiger chip mask
def create_chip_edges_mask( det='1M' ):
''' Create a chip edge mask for Eiger detector
'''
if det == '1M':
shape = [1065, 1030]
w = 4
mask = np.ones( shape , dtype = np.int32)
cx = [ 1030//4 *i for i in range(1,4) ]
#cy = [ 1065//4 *i for i in range(1,4) ]
cy = [808, 257 ]
#print (cx, cy )
for c in cx:
mask[:, c-w//2:c+w//2 ] = 0
for c in cy:
mask[ c-w//2:c+w//2, : ] = 0
return mask
def create_ellipse_donut( cx, cy , wx_inner, wy_inner, wx_outer, wy_outer, roi_mask, gap=0):
Nmax = np.max( np.unique( roi_mask ) )
rr1, cc1 = ellipse( cy,cx, wy_inner, wx_inner )
rr2, cc2 = ellipse( cy, cx, wy_inner + gap, wx_inner +gap )
rr3, cc3 = ellipse( cy, cx, wy_outer,wx_outer )
roi_mask[rr3,cc3] = 2 + Nmax
roi_mask[rr2,cc2] = 0
roi_mask[rr1,cc1] = 1 + Nmax
return roi_mask
def create_box( cx, cy, wx, wy, roi_mask):
Nmax = np.max( np.unique( roi_mask ) )
for i, [cx_,cy_] in enumerate(list( zip( cx,cy ))): #create boxes
x = np.array( [ cx_-wx, cx_+wx, cx_+wx, cx_-wx])
y = np.array( [ cy_-wy, cy_-wy, cy_+wy, cy_+wy])
rr, cc = polygon( y,x)
roi_mask[rr,cc] = i +1 + Nmax
return roi_mask
def create_folder( base_folder, sub_folder ):
'''
Crate a subfolder under base folder
Input:
base_folder: full path of the base folder
sub_folder: sub folder name to be created
Return:
Created full path of the created folder
'''
data_dir0 = os.path.join( base_folder, sub_folder )
##Or define data_dir here, e.g.,#data_dir = '/XF11ID/analysis/2016_2/rheadric/test/'
os.makedirs(data_dir0, exist_ok=True)
print('Results from this analysis will be stashed in the directory %s' % data_dir0)
return data_dir0
def create_user_folder( CYCLE, username = None ):
'''
Crate a folder for saving user data analysis result
Input:
CYCLE: run cycle
username: if None, get username from the jupyter username
Return:
Created folder name
'''
if username is None:
username = getpass.getuser()
data_dir0 = os.path.join('/XF11ID/analysis/', CYCLE, username, 'Results/')
##Or define data_dir here, e.g.,#data_dir = '/XF11ID/analysis/2016_2/rheadric/test/'
os.makedirs(data_dir0, exist_ok=True)
print('Results from this analysis will be stashed in the directory %s' % data_dir0)
return data_dir0
##################################
#########For dose analysis #######
##################################
def get_fra_num_by_dose( exp_dose, exp_time, att=1, dead_time =2 ):
'''
Calculate the frame number to be correlated by giving a X-ray exposure dose
Paramters:
exp_dose: a list, the exposed dose, e.g., in unit of exp_time(ms)*N(fram num)*att( attenuation)
exp_time: float, the exposure time for a xpcs time sereies
dead_time: dead time for the fast shutter reponse time, CHX = 2ms
Return:
noframes: the frame number to be correlated, exp_dose/( exp_time + dead_time )
e.g.,
no_dose_fra = get_fra_num_by_dose( exp_dose = [ 3.34* 20, 3.34*50, 3.34*100, 3.34*502, 3.34*505 ],
exp_time = 1.34, dead_time = 2)
--> no_dose_fra will be array([ 20, 50, 100, 502, 504])
'''
return np.int_( np.array( exp_dose )/( exp_time + dead_time)/ att )
def get_multi_tau_lag_steps( fra_max, num_bufs = 8 ):
'''
Get taus in log steps ( a multi-taus defined taus ) for a time series with max frame number as fra_max
Parameters:
fra_max: integer, the maximun frame number
buf_num (default=8),
Return:
taus_in_log, a list
e.g.,
get_multi_tau_lag_steps( 20, 8 ) --> array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 12, 14, 16])
'''
num_levels = int(np.log( fra_max/(num_bufs-1))/np.log(2) +1) +1
tot_channels, lag_steps, dict_lag = multi_tau_lags(num_levels, num_bufs)
return lag_steps[lag_steps < fra_max]
def get_series_g2_taus( fra_max_list, acq_time=1, max_fra_num=None, log_taus = True,
num_bufs = 8):
'''
Get taus for dose dependent analysis
Parameters:
fra_max_list: a list, a lsit of largest available frame number
acq_time: acquistion time for each frame
log_taus: if true, will use the multi-tau defined taus bu using buf_num (default=8),
otherwise, use deltau =1
Return:
tausd, a dict, with keys as taus_max_list items
e.g.,
get_series_g2_taus( fra_max_list=[20,30,40], acq_time=1, max_fra_num=None, log_taus = True, num_bufs = 8)
-->
{20: array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 12, 14, 16]),
30: array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 12, 14, 16, 20, 24, 28]),
40: array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 12, 14, 16, 20, 24, 28, 32])
}
'''
tausd = {}
for n in fra_max_list:
if max_fra_num is not None:
L = max_fra_num
else:
L = np.infty
if n>L:
warnings.warn("Warning: the dose value is too large, and please"
"check the maxium dose in this data set and give a smaller dose value."
"We will use the maxium dose of the data.")
n = L
if log_taus:
lag_steps = get_multi_tau_lag_steps(n, num_bufs)
else:
lag_steps = np.arange( n )
tausd[n] = lag_steps * acq_time
return tausd
def check_lost_metadata(md, Nimg=None, inc_x0 =None, inc_y0= None, pixelsize=7.5*10*(-5) ):
'''Y.G. Dec 31, 2016, check lost metadata
Parameter:
md: dict, meta data dictionay
Nimg: number of frames for this uid metadata
inc_x0/y0: incident beam center x0/y0, if None, will over-write the md['beam_center_x/y']
pixelsize: if md don't have ['x_pixel_size'], the pixelsize will add it
Return:
dpix: pixelsize, in mm
lambda_: wavelegth of the X-rays in Angstroms
exposuretime: exposure time in sec
timeperframe: acquisition time is sec
center: list, [x,y], incident beam center in pixel
Will also update md
'''
if 'number of images' not in list(md.keys()):
md['number of images'] = Nimg
if 'x_pixel_size' not in list(md.keys()):
md['x_pixel_size'] = 7.5000004e-05
dpix = md['x_pixel_size'] * 1000. #in mm, eiger 4m is 0.075 mm
lambda_ =md['incident_wavelength'] # wavelegth of the X-rays in Angstroms
try:
Ldet = md['det_distance']
if Ldet<=1000:
Ldet *=1000
md['det_distance'] = Ldet
except:
Ldet = md['detector_distance']
if Ldet<=1000:
Ldet *=1000
md['detector_distance'] = Ldet
try:#try exp time from detector
exposuretime= md['count_time'] #exposure time in sec
except:
exposuretime= md['cam_acquire_time'] #exposure time in sec
try:#try acq time from detector
acquisition_period = md['frame_time']
except:
try:
acquisition_period = md['acquire period']
except:
uid = md['uid']
acquisition_period = float( db[uid]['start']['acquire period'] )
timeperframe = acquisition_period
if inc_x0 is not None:
md['beam_center_x']= inc_y0
print( 'The metadata: beam_center_x has been changed to %s.'%inc_y0)
if inc_y0 is not None:
md['beam_center_y']= inc_x0
print( 'The metadata: beam_center_y has been changed to %s.'%inc_x0)
center = [ int(md['beam_center_x']),int( md['beam_center_y'] ) ] #beam center [y,x] for python image
center=[center[1], center[0]]
return dpix, lambda_, Ldet, exposuretime, timeperframe, center
def combine_images( filenames, outputfile, outsize=(2000, 2400)):
'''Y.G. Dec 31, 2016
Combine images together to one image using PIL.Image
Input:
filenames: list, the images names to be combined
outputfile: str, the filename to generate
outsize: the combined image size
Output:
save a combined image file
'''
N = len( filenames)
#nx = np.int( np.ceil( np.sqrt(N)) )
#ny = np.int( np.ceil( N / float(nx) ) )
ny = np.int( np.ceil( np.sqrt(N)) )
nx = np.int( np.ceil( N / float(ny) ) )
#print(nx,ny)
result = Image.new("RGB", outsize, color=(255,255,255,0))
basewidth = int( outsize[0]/nx )
hsize = int( outsize[1]/ny )
for index, file in enumerate(filenames):
path = os.path.expanduser(file)
img = Image.open(path)
bands = img.split()
ratio = img.size[1]/ img.size[0] #h/w
if hsize > basewidth * ratio:
basewidth_ = basewidth
hsize_ = int( basewidth * ratio )
else:
basewidth_ = int( hsize/ratio )
hsize_ = hsize
#print( index, file, basewidth, hsize )
size = (basewidth_,hsize_)
bands = [b.resize(size, Image.LINEAR) for b in bands]
img = Image.merge('RGBA', bands)
x = index % nx * basewidth
y = index // nx * hsize
w, h = img.size
#print('pos {0},{1} size {2},{3}'.format(x, y, w, h))
result.paste(img, (x, y, x + w, y + h ))
result.save( outputfile,quality=100, optimize=True )
print( 'The combined image is saved as: %s'%outputfile)
def get_qval_dict( qr_center, qz_center=None, qval_dict = None, multi_qr_for_one_qz= True,
one_qz_multi_qr = True):
'''Y.G. Dec 27, 2016
Map the roi label array with qr or (qr,qz) or (q//, q|-) values
Parameters:
qr_center: list, a list of qr
qz_center: list, a list of qz,
multi_qr_for_one_qz: by default=True,
if one_qz_multi_qr:
one qz_center corresponds to all qr_center, in other words, there are totally, len(qr_center)* len(qz) qs
else:
one qr_center corresponds to all qz_center,
else: one qr with one qz
qval_dict: if not None, will append the new dict to the qval_dict
Return:
qval_dict, a dict, each key (a integer) with value as qr or (qr,qz) or (q//, q|-)
'''
if qval_dict is None:
qval_dict = {}
maxN = 0
else:
maxN = np.max( list( qval_dict.keys() ) ) +1
if qz_center is not None:
if multi_qr_for_one_qz:
if one_qz_multi_qr:
for qzind in range( len( qz_center)):
for qrind in range( len( qr_center)):
qval_dict[ maxN + qzind* len( qr_center) + qrind ] = np.array( [qr_center[qrind], qz_center[qzind] ] )
else:
for qrind in range( len( qr_center)):
for qzind in range( len( qz_center)):
qval_dict[ maxN + qrind* len( qz_center) + qzind ] = np.array( [qr_center[qrind], qz_center[qzind] ] )
else:
for i, [qr, qz] in enumerate(zip( qr_center, qz_center)):
qval_dict[ maxN + i ] = np.array( [ qr, qz ] )
else:
for qrind in range( len( qr_center)):
qval_dict[ maxN + qrind ] = np.array( [ qr_center[qrind] ] )
return qval_dict
def update_qval_dict( qval_dict1, qval_dict2 ):
''' Y.G. Dec 31, 2016
Update qval_dict1 with qval_dict2
Input:
qval_dict1, a dict, each key (a integer) with value as qr or (qr,qz) or (q//, q|-)
qval_dict2, a dict, each key (a integer) with value as qr or (qr,qz) or (q//, q|-)
Output:
qval_dict, a dict, with the same key as dict1, and all key in dict2 but which key plus max(dict1.keys())
'''
maxN = np.max( list( qval_dict1.keys() ) ) +1
qval_dict = {}
qval_dict.update( qval_dict1 )
for k in list( qval_dict2.keys() ):
qval_dict[k + maxN ] = qval_dict2[k]
return qval_dict
def update_roi_mask( roi_mask1, roi_mask2 ):
''' Y.G. Dec 31, 2016
Update qval_dict1 with qval_dict2
Input:
roi_mask1, 2d-array, label array, same shape as xpcs frame,
roi_mask2, 2d-array, label array, same shape as xpcs frame,
Output:
roi_mask, 2d-array, label array, same shape as xpcs frame, update roi_mask1 with roi_mask2
'''
roi_mask = roi_mask1.copy()
w= np.where( roi_mask2 )
roi_mask[w] = roi_mask2[w] + np.max( roi_mask )
return roi_mask
def check_bad_uids(uids, mask, img_choice_N = 10, bad_uids_index = None ):
'''Y.G. Dec 22, 2016
Find bad uids by checking the average intensity by a selection of the number img_choice_N of frames for the uid. If the average intensity is zeros, the uid will be considered as bad uid.
Parameters:
uids: list, a list of uid
mask: array, bool type numpy.array
img_choice_N: random select number of the uid
bad_uids_index: a list of known bad uid list, default is None
Return:
guids: list, good uids
buids, list, bad uids
'''
import random
buids = []
guids = list( uids )
#print( guids )
if bad_uids_index is None:
bad_uids_index = []
for i, uid in enumerate(uids):
#print( i, uid )
if i not in bad_uids_index:
detector = get_detector( db[uid ] )
imgs = load_data( uid, detector )
img_samp_index = random.sample( range(len(imgs)), img_choice_N)
imgsa = apply_mask( imgs, mask )
avg_img = get_avg_img( imgsa, img_samp_index, plot_ = False, uid =uid)
if avg_img.max() == 0:
buids.append( uid )
guids.pop( list( np.where( np.array(guids) == uid)[0] )[0] )
print( 'The bad uid is: %s'%uid )
else:
guids.pop( list( np.where( np.array(guids) == uid)[0] )[0] )
buids.append( uid )
print( 'The bad uid is: %s'%uid )
print( 'The total and bad uids number are %s and %s, repsectively.'%( len(uids), len(buids) ) )
return guids, buids
def find_uids(start_time, stop_time ):
'''Y.G. Dec 22, 2016
A wrap funciton to find uids by giving start and end time
Return:
sids: list, scan id
uids: list, uid with 8 character length
fuids: list, uid with full length
'''
hdrs = db(start_time= start_time, stop_time = stop_time)
try:
print ('Totally %s uids are found.'%(len(list(hdrs))))
except:
pass
sids=[]
uids=[]
fuids=[]
for hdr in hdrs:
s= get_sid_filenames( hdr)
#print (s[1][:8])
sids.append( s[0] )
uids.append( s[1][:8] )
fuids.append( s[1] )
sids=sids[::-1]
uids=uids[::-1]
fuids=fuids[::-1]
return np.array(sids), np.array(uids), np.array(fuids)
def ployfit( y, x=None, order = 20 ):
'''
fit data (one-d array) by a ploynominal function
return the fitted one-d array
'''
if x is None:
x = range(len(y))
pol = np.polyfit(x, y, order)
return np.polyval(pol, x)
def check_bad_data_points( data, fit=True, polyfit_order = 30, legend_size = 12,
plot=True, scale=1.0, good_start=None, good_end=None, path=None, return_ylim=False ):
'''
data: 1D array
scale: the scale of deviation
fit: if True, use a ploynominal function to fit the imgsum, to get a mean-inten(array), then use the scale to get low and high threshold, it's good to remove bad frames/pixels on top of not-flatten curve
else: use the mean (a value) of imgsum and scale to get low and high threshold, it's good to remove bad frames/pixels on top of flatten curve
'''
if good_start is None:
good_start=0
if good_end is None:
good_end = len( data )
bd1 = [i for i in range(0, good_start)]
bd3 = [i for i in range(good_end,len( data ) )]
d_ = data[good_start:good_end]
if fit:
pfit = ployfit( d_, order = polyfit_order)
d = d_ - pfit
else:
d = d_
pfit = np.ones_like(d) * data.mean()
ymin = d.mean()-scale *d.std()
ymax = d.mean()+scale *d.std()
if plot:
fig = plt.figure( )
ax = fig.add_subplot(2,1,1 )
plot1D( d_, ax = ax, color='k', legend='data',legend_size=legend_size )
plot1D( pfit,ax=ax, color='b', legend='ploy-fit', title='Find Bad Points',legend_size=legend_size )
ax2 = fig.add_subplot(2,1,2 )
plot1D( d, ax = ax2,legend='difference',marker='s', color='b', )
#print('here')
plot1D(x=[0,len(d_)], y=[ymin,ymin], ax = ax2, ls='--',lw= 3, marker='o', color='r', legend='low_thresh', legend_size=legend_size )
plot1D(x=[0,len(d_)], y=[ymax,ymax], ax = ax2 , ls='--', lw= 3,marker='o', color='r',legend='high_thresh',title='',legend_size=legend_size )
if path is not None:
fp = path + '%s'%( uid ) + '_find_bad_points' + '.png'
plt.savefig( fp, dpi=fig.dpi)
bd2= list( np.where( np.abs(d -d.mean()) > scale *d.std() )[0] + good_start )
if return_ylim:
return np.array( bd1 + bd2 + bd3 ), ymin, ymax,pfit
else:
return np.array( bd1 + bd2 + bd3 ), pfit
def get_bad_frame_list( imgsum, fit=True, polyfit_order = 30,legend_size = 12,
plot=True, scale=1.0, good_start=None, good_end=None, uid='uid',path=None,
return_ylim=False):
'''
imgsum: the sum intensity of a time series
scale: the scale of deviation
fit: if True, use a ploynominal function to fit the imgsum, to get a mean-inten(array), then use the scale to get low and high threshold, it's good to remove bad frames/pixels on top of not-flatten curve
else: use the mean (a value) of imgsum and scale to get low and high threshold, it's good to remove bad frames/pixels on top of flatten curve
'''
if good_start is None:
good_start=0
if good_end is None:
good_end = len( imgsum )
bd1 = [i for i in range(0, good_start)]
bd3 = [i for i in range(good_end,len( imgsum ) )]
imgsum_ = imgsum[good_start:good_end]
if fit:
pfit = ployfit( imgsum_, order = polyfit_order)
data = imgsum_ - pfit
else:
data = imgsum_
pfit = np.ones_like(data) * data.mean()
ymin = data.mean()-scale *data.std()
ymax = data.mean()+scale *data.std()
if plot:
fig = plt.figure( )
ax = fig.add_subplot(2,1,1 )
plot1D( imgsum_, ax = ax, color='k', legend='data',legend_size=legend_size )
plot1D( pfit,ax=ax, color='b', legend='ploy-fit', title=uid + '_imgsum',legend_size=legend_size )
ax2 = fig.add_subplot(2,1,2 )
plot1D( data, ax = ax2,legend='difference',marker='s', color='b', )
#print('here')
plot1D(x=[0,len(imgsum_)], y=[ymin,ymin], ax = ax2, ls='--',lw= 3, marker='o', color='r', legend='low_thresh', legend_size=legend_size )
plot1D(x=[0,len(imgsum_)], y=[ymax,ymax], ax = ax2 , ls='--', lw= 3,marker='o', color='r',legend='high_thresh',title='imgsum_to_find_bad_frame',legend_size=legend_size )
if path is not None:
fp = path + '%s'%( uid ) + '_imgsum_analysis' + '.png'
plt.savefig( fp, dpi=fig.dpi)
bd2= list( np.where( np.abs(data -data.mean()) > scale *data.std() )[0] + good_start )
if return_ylim:
return np.array( bd1 + bd2 + bd3 ), ymin, ymax
else:
return np.array( bd1 + bd2 + bd3 )
def save_dict_csv( mydict, filename, mode='w'):
import csv
with open(filename, mode) as csv_file:
spamwriter = csv.writer(csv_file)
for key, value in mydict.items():
spamwriter.writerow([key, value])
def read_dict_csv( filename ):
import csv
with open(filename, 'r') as csv_file:
reader = csv.reader(csv_file)
mydict = dict(reader)
return mydict
def find_bad_pixels( FD, bad_frame_list, uid='uid'):
bpx = []
bpy=[]
for n in bad_frame_list:
if n>= FD.beg and n<=FD.end:
f = FD.rdframe(n)
w = np.where( f == f.max())
if len(w[0])==1:
bpx.append( w[0][0] )
bpy.append( w[1][0] )
return trans_data_to_pd( [bpx,bpy], label=[ uid+'_x', uid +'_y' ], dtype='list')
def mask_exclude_badpixel( bp, mask, uid ):
for i in range( len(bp)):
mask[ int( bp[bp.columns[0]][i] ), int( bp[bp.columns[1]][i] )]=0
return mask
def print_dict( dicts, keys=None):
'''
print keys: values in a dicts
if keys is None: print all the keys
'''
if keys is None:
keys = list( dicts.keys())
for k in keys:
try:
print('%s--> %s'%(k, dicts[k]) )
except:
pass
def get_meta_data( uid,*argv,**kwargs ):
'''
Y.G. Dev Dec 8, 2016
Get metadata from a uid
Parameters:
uid: the unique data acquisition id
kwargs: overwrite the meta data, for example
get_meta_data( uid = uid, sample = 'test') --> will overwrtie the meta's sample to test
return:
meta data of the uid: a dictionay
with keys:
detector
suid: the simple given uid
uid: full uid
filename: the full path of the data
start_time: the data acquisition starting time in a human readable manner
And all the input metadata
'''
import time
md ={}
md['detector'] = get_detector( db[uid ] )
md['suid'] = uid #short uid
md['filename'] = get_sid_filenames(db[uid])[2][0]
#print( md )
ev, = get_events(db[uid], [md['detector']], fill= False)
dec = list( ev['descriptor']['configuration'].keys() )[0]
for k,v in ev['descriptor']['configuration'][dec]['data'].items():
md[ k[len(dec)+1:] ]= v
for k,v in ev['descriptor']['run_start'].items():
if k!= 'plan_args':
md[k]= v
md['start_time'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(md['time']))
md['stop_time'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime( ev['time'] ))
md['img_shape'] = ev['descriptor']['data_keys'][md['detector']]['shape'][:2][::-1]
for k,v in kwargs.items():
md[k] =v
return md
def get_max_countc(FD, labeled_array ):
"""YG. 2016, Nov 18
Compute the max intensity of ROIs in the compressed file (FD)
Parameters
----------
FD: Multifile class
compressed file
labeled_array : array
labeled array; 0 is background.
Each ROI is represented by a nonzero integer. It is not required that
the ROI labels are contiguous
index : int, list, optional
The ROI's to use. If None, this function will extract averages for all
ROIs
Returns
-------
max_intensity : a float
index : list
The labels for each element of the `mean_intensity` list
"""
qind, pixelist = roi.extract_label_indices( labeled_array )
timg = np.zeros( FD.md['ncols'] * FD.md['nrows'] , dtype=np.int32 )
timg[pixelist] = np.arange( 1, len(pixelist) + 1 )
if labeled_array.shape != ( FD.md['ncols'],FD.md['nrows']):
raise ValueError(
" `image` shape (%d, %d) in FD is not equal to the labeled_array shape (%d, %d)" %( FD.md['ncols'],FD.md['nrows'], labeled_array.shape[0], labeled_array.shape[1]) )
max_inten =0
for i in tqdm(range( FD.beg, FD.end, 1 ), desc= 'Get max intensity of ROIs in all frames' ):
try:
(p,v) = FD.rdrawframe(i)
w = np.where( timg[p] )[0]
max_inten = max( max_inten, np.max(v[w]) )
except:
pass
return max_inten
def create_polygon_mask( image, xcorners, ycorners ):
'''
Give image and x/y coners to create a polygon mask
image: 2d array
xcorners, list, points of x coners
ycorners, list, points of y coners
Return:
the polygon mask: 2d array, the polygon pixels with values 1 and others with 0
Example:
'''
from skimage.draw import line_aa, line, polygon, circle
imy, imx = image.shape
bst_mask = np.zeros_like( image , dtype = bool)
rr, cc = polygon( ycorners,xcorners)
bst_mask[rr,cc] =1
#full_mask= ~bst_mask
return bst_mask
def create_rectangle_mask( image, xcorners, ycorners ):
'''
Give image and x/y coners to create a rectangle mask
image: 2d array
xcorners, list, points of x coners
ycorners, list, points of y coners
Return:
the polygon mask: 2d array, the polygon pixels with values 1 and others with 0
Example:
'''
from skimage.draw import line_aa, line, polygon, circle
imy, imx = image.shape
bst_mask = np.zeros_like( image , dtype = bool)
rr, cc = polygon( ycorners,xcorners)
bst_mask[rr,cc] =1
#full_mask= ~bst_mask
return bst_mask
def create_multi_rotated_rectangle_mask( image, center=None, length=100, width=50, angles=[0] ):
''' Developed at July 10, 2017 by Y.G.@CHX, NSLS2
Create multi rectangle-shaped mask by rotating a rectangle with a list of angles
The original rectangle is defined by four corners, i.e.,
[ (center[1] - width//2, center[0]),
(center[1] + width//2, center[0]),
(center[1] + width//2, center[0] + length),
(center[1] - width//2, center[0] + length)
]
Parameters:
image: 2D numpy array, to give mask shape
center: integer list, if None, will be the center of the image
length: integer, the length of the non-ratoted rectangle
width: integer, the width of the non-ratoted rectangle
angles: integer list, a list of rotated angles
Return:
mask: 2D bool-type numpy array
'''
from skimage.draw import polygon
from skimage.transform import rotate
cx,cy = center
imy, imx = image.shape
mask = np.zeros( image.shape, dtype = bool)
wy = length
wx = width
x = np.array( [ max(0, cx - wx//2), min(imx, cx+wx//2), min(imx, cx+wx//2), max(0,cx-wx//2 ) ])
y = np.array( [ cy, cy, min( imy, cy + wy) , min(imy, cy + wy) ])
rr, cc = polygon( y,x)
mask[rr,cc] =1
mask_rot= np.zeros( image.shape, dtype = bool)
for angle in angles:
mask_rot += np.array( rotate( mask, angle, center= center ), dtype=bool) #, preserve_range=True)
return ~mask_rot
def create_wedge( image, center, radius, wcors, acute_angle=True) :
'''YG develop at June 18, 2017, @CHX
Create a wedge by a combination of circle and a triangle defined by center and wcors
wcors: [ [x1,x2,x3...], [y1,y2,y3..]
'''
from skimage.draw import line_aa, line, polygon, circle
imy, imx = image.shape
cy,cx = center
x = [cx] + list(wcors[0])
y = [cy] + list(wcors[1])
maskc = np.zeros_like( image , dtype = bool)
rr, cc = circle( cy, cx, radius, shape = image.shape)
maskc[rr,cc] =1
maskp = np.zeros_like( image , dtype = bool)
x = np.array( x )
y = np.array( y )
print(x,y)
rr, cc = polygon( y,x)
maskp[rr,cc] =1
if acute_angle:
return maskc*maskp
else:
return maskc*~maskp
def create_cross_mask( image, center, wy_left=4, wy_right=4, wx_up=4, wx_down=4,
center_circle = True, center_radius=10
):
'''
Give image and the beam center to create a cross-shaped mask
wy_left: the width of left h-line
wy_right: the width of rigth h-line
wx_up: the width of up v-line
wx_down: the width of down v-line
center_circle: if True, create a circle with center and center_radius
Return:
the cross mask
'''
from skimage.draw import line_aa, line, polygon, circle
imy, imx = image.shape
cx,cy = center
bst_mask = np.zeros_like( image , dtype = bool)
###
#for right part
wy = wy_right
x = np.array( [ cx, imx, imx, cx ])
y = np.array( [ cy-wy, cy-wy, cy + wy, cy + wy])
rr, cc = polygon( y,x)
bst_mask[rr,cc] =1
###
#for left part
wy = wy_left
x = np.array( [0, cx, cx,0 ])
y = np.array( [ cy-wy, cy-wy, cy + wy, cy + wy])
rr, cc = polygon( y,x)
bst_mask[rr,cc] =1
###
#for up part
wx = wx_up
x = np.array( [ cx-wx, cx + wx, cx+wx, cx-wx ])
y = np.array( [ cy, cy, imy, imy])
rr, cc = polygon( y,x)
bst_mask[rr,cc] =1
###
#for low part
wx = wx_down
x = np.array( [ cx-wx, cx + wx, cx+wx, cx-wx ])
y = np.array( [ 0,0, cy, cy])
rr, cc = polygon( y,x)
bst_mask[rr,cc] =1
if center_radius!=0:
rr, cc = circle( cy, cx, center_radius, shape = bst_mask.shape)
bst_mask[rr,cc] =1
full_mask= ~bst_mask
return full_mask
def generate_edge( centers, width):
'''YG. 10/14/2016
give centers and width (number or list) to get edges'''
edges = np.zeros( [ len(centers),2])
edges[:,0] = centers - width
edges[:,1] = centers + width
return edges
def export_scan_scalar( uid, x='dcm_b', y= ['xray_eye1_stats1_total'],
path='/XF11ID/analysis/2016_3/commissioning/Results/' ):
'''YG. 10/17/2016
export uid data to a txt file
uid: unique scan id
x: the x-col
y: the y-cols
path: save path
Example:
data = export_scan_scalar( uid, x='dcm_b', y= ['xray_eye1_stats1_total'],
path='/XF11ID/analysis/2016_3/commissioning/Results/exported/' )
A plot for the data:
d.plot(x='dcm_b', y = 'xray_eye1_stats1_total', marker='o', ls='-', color='r')
'''
from databroker import DataBroker as db, get_images, get_table, get_events, get_fields
from chxanalys.chx_generic_functions import trans_data_to_pd
hdr = db[uid]
print( get_fields( hdr ) )
data = get_table( db[uid] )
xp = data[x]
datap = np.zeros( [len(xp), len(y)+1])
datap[:,0] = xp
for i, yi in enumerate(y):
datap[:,i+1] = data[yi]
datap = trans_data_to_pd( datap, label=[x] + [yi for yi in y])
datap.to_csv( path + 'uid=%s.csv'%uid)
return datap
#####
#load data by databroker
def get_flatfield( uid, reverse=False ):
import h5py
detector = get_detector( db[uid ] )
sud = get_sid_filenames(db[uid])
master_path = '%s_master.h5'%(sud[2][0])
print( master_path)
f= h5py.File(master_path, 'r')
k= 'entry/instrument/detector/detectorSpecific/' #data_collection_date'
d= np.array( f[ k]['flatfield'] )
f.close()
if reverse:
d = reverse_updown( d )
return d
def get_detector( header ):
keys = [k for k, v in header.descriptors[0]['data_keys'].items() if 'external' in v]
return keys[0]
def get_sid_filenames(header, fill=True):
"""get a bluesky scan_id, unique_id, filename by giveing uid and detector
Parameters
----------
header: a header of a bluesky scan, e.g. db[-1]
Returns
-------
scan_id: integer
unique_id: string, a full string of a uid
filename: sring
Usuage:
sid,uid, filenames = get_sid_filenames(db[uid])
"""
keys = [k for k, v in header.descriptors[0]['data_keys'].items() if 'external' in v]
events = get_events( header, keys, handler_overrides={key: RawHandler for key in keys}, fill=fill)
key, = keys
try:
filenames = [ str( ev['data'][key][0]) + '_'+ str(ev['data'][key][2]['seq_id']) for ev in events]
except:
filenames='unknown'
sid = header['start']['scan_id']
uid= header['start']['uid']
return sid,uid, filenames
def load_data( uid , detector = 'eiger4m_single_image', fill=True, reverse=False):
"""load bluesky scan data by giveing uid and detector
Parameters
----------
uid: unique ID of a bluesky scan
detector: the used area detector
fill: True to fill data
reverse: if True, reverse the image upside down to match the "real" image geometry (should always be True in the future)
Returns
-------
image data: a pims frames series
if not success read the uid, will return image data as 0
Usuage:
imgs = load_data( uid, detector )
md = imgs.md
"""
hdr = db[uid]
flag =1
while flag<2 and flag !=0:
try:
ev, = get_events(hdr, [detector], fill=fill)
flag = 0
except:
flag += 1
print ('Trying again ...!')
if flag:
try:
imgs = get_images( hdr, detector)
#print( 'here')
print('This should be dscan data.')
print('You also can use get_images( hdr, detector) function to retrive data.')
if len(imgs[0])>=1:
md = imgs[0].md
imgs = pims.pipeline(lambda img: img[0])(imgs)
imgs.md = md
except:
print ("Can't Load Data!")
uid = '00000' #in case of failling load data
imgs = 0
else:
imgs = ev['data'][detector]
#print (imgs)
if reverse:
md=imgs.md
imgs = reverse_updown( imgs )
imgs.md = md
return imgs
def mask_badpixels( mask, detector ):
'''
Mask known bad pixel from the giveing mask
'''
if detector =='eiger1m_single_image':
#to be determined
mask = mask
elif detector =='eiger4m_single_image' or detector == 'image':
mask[513:552,:] =0
mask[1064:1103,:] =0
mask[1615:1654,:] =0
mask[:,1029:1041] = 0
mask[:, 0] =0
mask[0:, 2069] =0
mask[0] =0
mask[2166] =0
elif detector =='eiger500K_single_image':
#to be determined
mask = mask
else:
mask = mask
return mask
def load_data2( uid , detector = 'eiger4m_single_image' ):
"""load bluesky scan data by giveing uid and detector
Parameters
----------
uid: unique ID of a bluesky scan
detector: the used area detector
Returns
-------
image data: a pims frames series
if not success read the uid, will return image data as 0
Usuage:
imgs = load_data( uid, detector )
md = imgs.md
"""
hdr = db[uid]
flag =1
while flag<4 and flag !=0:
try:
ev, = get_events(hdr, [detector])
flag =0
except:
flag += 1
print ('Trying again ...!')
if flag:
print ("Can't Load Data!")
uid = '00000' #in case of failling load data
imgs = 0
else:
imgs = ev['data'][detector]
#print (imgs)
return imgs
def psave_obj(obj, filename ):
'''save an object with filename by pickle.dump method
This function automatically add '.pkl' as filename extension
Input:
obj: the object to be saved
filename: filename (with full path) to be saved
Return:
None
'''
with open( filename + '.pkl', 'wb') as f:
pickle.dump(obj, f, pickle.HIGHEST_PROTOCOL)
def pload_obj(filename ):
'''load a pickled filename
This function automatically add '.pkl' to filename extension
Input:
filename: filename (with full path) to be saved
Return:
load the object by pickle.load method
'''
with open( filename + '.pkl', 'rb') as f:
return pickle.load(f)
def load_mask( path, mask_name, plot_ = False, reverse=False, *argv,**kwargs):
"""load a mask file
the mask is a numpy binary file (.npy)
Parameters
----------
path: the path of the mask file
mask_name: the name of the mask file
plot_: a boolen type
reverse: if True, reverse the image upside down to match the "real" image geometry (should always be True in the future)
Returns
-------
mask: array
if plot_ =True, will show the mask
Usuage:
mask = load_mask( path, mask_name, plot_ = True )
"""
mask = np.load( path + mask_name )
mask = np.array(mask, dtype = np.int32)
if reverse:
mask = mask[::-1,:]
if plot_:
show_img( mask, *argv,**kwargs)
return mask
def create_hot_pixel_mask(img, threshold, center=None, center_radius=300 ):
'''create a hot pixel mask by giving threshold
Input:
img: the image to create hot pixel mask
threshold: the threshold above which will be considered as hot pixels
center: optional, default=None
else, as a two-element list (beam center), i.e., [center_x, center_y]
if center is not None, the hot pixel will not include a circle region
which is defined by center and center_radius ( in unit of pixel)
Output:
a bool types numpy array (mask), 1 is good and 0 is excluded
'''
bst_mask = np.ones_like( img , dtype = bool)
if center is not None:
from skimage.draw import circle
imy, imx = img.shape
cy,cx = center
rr, cc = circle( cy, cx, center_radius)
bst_mask[rr,cc] =0
hmask = np.ones_like( img )
hmask[np.where( img * bst_mask > threshold)]=0
return hmask
def apply_mask( imgs, mask):
'''apply mask to imgs to produce a generator
Usuages:
imgsa = apply_mask( imgs, mask )
good_series = apply_mask( imgs[good_start:], mask )
'''
return pims.pipeline(lambda img: np.int_(mask) * img)(imgs) # lazily apply mask
def reverse_updown( imgs):
'''reverse imgs upside down to produce a generator
Usuages:
imgsr = reverse_updown( imgs)
'''
return pims.pipeline(lambda img: img[::-1,:])(imgs) # lazily apply mask
def RemoveHot( img,threshold= 1E7, plot_=True ):
'''Remove hot pixel from img'''
mask = np.ones_like( np.array( img ) )
badp = np.where( np.array(img) >= threshold )
if len(badp[0])!=0:
mask[badp] = 0
if plot_:
show_img( mask )
return mask
############
###plot data
def show_img( image, ax=None,label_array=None, alpha=0.5, interpolation='nearest',
xlim=None, ylim=None, save=False,image_name=None,path=None,
aspect=None, logs=False,vmin=None,vmax=None,return_fig=False,cmap='viridis',
show_time= False, file_name =None, ylabel=None, xlabel=None, extent=None,
show_colorbar=True, tight=True, show_ticks=True, save_format = 'png', dpi= None,
center=None,origin='lower', lab_fontsize = 16, tick_size = 12, colorbar_fontsize = 8,
title_size =12,
*argv,**kwargs ):
"""YG. Sep26, 2017 Add label_array/alpha option to show a mask on top of image
a simple function to show image by using matplotlib.plt imshow
pass *argv,**kwargs to imshow
Parameters
----------
image : array
Image to show
Returns
-------
None
"""
if ax is None:
if RUN_GUI:
fig = Figure()
ax = fig.add_subplot(111)
else:
fig, ax = plt.subplots()
else:
fig, ax=ax
if center is not None:
plot1D(center[1],center[0],ax=ax, c='b', m='o', legend='')
if not logs:
im=imshow(ax, image, origin=origin,cmap=cmap,interpolation=interpolation, vmin=vmin,vmax=vmax,
extent=extent) #vmin=0,vmax=1,
else:
im=imshow(ax, image, origin=origin,cmap=cmap,
interpolation=interpolation, norm=LogNorm(vmin, vmax),extent=extent)
if label_array is not None:
im2=show_label_array(ax, label_array, alpha= alpha, cmap=cmap, interpolation=interpolation )
ax.set_title( image_name, fontsize=title_size )
if xlim is not None:
ax.set_xlim( xlim )
if ylim is not None:
ax.set_ylim( ylim )
if not show_ticks:
ax.set_yticks([])
ax.set_xticks([])
else:
ax.tick_params(axis='both', which='major', labelsize=tick_size )
ax.tick_params(axis='both', which='minor', labelsize=tick_size )
#mpl.rcParams['xtick.labelsize'] = tick_size
#mpl.rcParams['ytick.labelsize'] = tick_size
#print(tick_size)
if ylabel is not None:
#ax.set_ylabel(ylabel)#, fontsize = 9)
ax.set_ylabel( ylabel , fontsize = lab_fontsize )
if xlabel is not None:
ax.set_xlabel(xlabel , fontsize = lab_fontsize )
if aspect is not None:
#aspect = image.shape[1]/float( image.shape[0] )
ax.set_aspect(aspect)
else:
ax.set_aspect(aspect='auto')
if show_colorbar:
cbar = fig.colorbar(im, extend='neither', spacing='proportional',
orientation='vertical' )
cbar.ax.tick_params(labelsize=colorbar_fontsize)
fig.set_tight_layout(tight)
if save:
if show_time:
dt =datetime.now()
CurTime = '_%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute)
fp = path + '%s'%( file_name ) + CurTime + '.' + save_format
else:
fp = path + '%s'%( image_name ) + '.' + save_format
if dpi is None:
dpi = fig.dpi
plt.savefig( fp, dpi= dpi)
#fig.set_tight_layout(tight)
if return_fig:
return im #fig
def plot1D( y,x=None, yerr=None, ax=None,return_fig=False, ls='-', figsize=None,legend=None,
legend_size=None, lw=None, markersize=None, tick_size=8, *argv,**kwargs):
"""a simple function to plot two-column data by using matplotlib.plot
pass *argv,**kwargs to plot
Parameters
----------
y: column-y
x: column-x, by default x=None, the plot will use index of y as x-axis
the other paramaters are defined same as plt.plot
Returns
-------
None
"""
if ax is None:
if RUN_GUI:
fig = Figure()
ax = fig.add_subplot(111)
else:
if figsize is not None:
fig, ax = plt.subplots(figsize=figsize)
else:
fig, ax = plt.subplots()
if legend is None:
legend = ' '
try:
logx = kwargs['logx']
except:
logx=False
try:
logy = kwargs['logy']
except:
logy=False
try:
logxy = kwargs['logxy']
except:
logxy= False
if logx==True and logy==True:
logxy = True
try:
marker = kwargs['marker']
except:
try:
marker = kwargs['m']
except:
marker= next( markers_ )
try:
color = kwargs['color']
except:
try:
color = kwargs['c']
except:
color = next( colors_ )
if x is None:
x=range(len(y))
if yerr is None:
ax.plot(x,y, marker=marker,color=color,ls=ls,label= legend, lw=lw,
markersize=markersize, )#,*argv,**kwargs)
else:
ax.errorbar(x,y,yerr, marker=marker,color=color,ls=ls,label= legend,
lw=lw,markersize=markersize,)#,*argv,**kwargs)
if logx:
ax.set_xscale('log')
if logy:
ax.set_yscale('log')
if logxy:
ax.set_xscale('log')
ax.set_yscale('log')
ax.tick_params(axis='both', which='major', labelsize=tick_size )
ax.tick_params(axis='both', which='minor', labelsize=tick_size )
if 'xlim' in kwargs.keys():
ax.set_xlim( kwargs['xlim'] )
if 'ylim' in kwargs.keys():
ax.set_ylim( kwargs['ylim'] )
if 'xlabel' in kwargs.keys():
ax.set_xlabel(kwargs['xlabel'])
if 'ylabel' in kwargs.keys():
ax.set_ylabel(kwargs['ylabel'])
if 'title' in kwargs.keys():
title = kwargs['title']
else:
title = 'plot'
ax.set_title( title )
#ax.set_xlabel("$Log(q)$"r'($\AA^{-1}$)')
if (legend!='') and (legend!=None):
ax.legend(loc = 'best', fontsize=legend_size )
if 'save' in kwargs.keys():
if kwargs['save']:
#dt =datetime.now()
#CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute)
#fp = kwargs['path'] + '%s'%( title ) + CurTime + '.png'
fp = kwargs['path'] + '%s'%( title ) + '.png'
plt.savefig( fp, dpi=fig.dpi)
if return_fig:
return fig
###
def check_shutter_open( data_series, min_inten=0, time_edge = [0,10], plot_ = False, *argv,**kwargs):
'''Check the first frame with shutter open
Parameters
----------
data_series: a image series
min_inten: the total intensity lower than min_inten is defined as shtter close
time_edge: the searching frame number range
return:
shutter_open_frame: a integer, the first frame number with open shutter
Usuage:
good_start = check_shutter_open( imgsa, min_inten=5, time_edge = [0,20], plot_ = False )
'''
imgsum = np.array( [np.sum(img ) for img in data_series[time_edge[0]:time_edge[1]:1]] )
if plot_:
fig, ax = plt.subplots()
ax.plot(imgsum,'bo')
ax.set_title('uid=%s--imgsum'%uid)
ax.set_xlabel( 'Frame' )
ax.set_ylabel( 'Total_Intensity' )
#plt.show()
shutter_open_frame = np.where( np.array(imgsum) > min_inten )[0][0]
print ('The first frame with open shutter is : %s'%shutter_open_frame )
return shutter_open_frame
def get_each_frame_intensity( data_series, sampling = 50,
bad_pixel_threshold=1e10,
plot_ = False, save= False, *argv,**kwargs):
'''Get the total intensity of each frame by sampling every N frames
Also get bad_frame_list by check whether above bad_pixel_threshold
Usuage:
imgsum, bad_frame_list = get_each_frame_intensity(good_series ,sampling = 1000,
bad_pixel_threshold=1e10, plot_ = True)
'''
#print ( argv, kwargs )
imgsum = np.array( [np.sum(img ) for img in tqdm( data_series[::sampling] , leave = True ) ] )
if plot_:
uid = 'uid'
if 'uid' in kwargs.keys():
uid = kwargs['uid']
fig, ax = plt.subplots()
ax.plot(imgsum,'bo')
ax.set_title('uid= %s--imgsum'%uid)
ax.set_xlabel( 'Frame_bin_%s'%sampling )
ax.set_ylabel( 'Total_Intensity' )
if save:
#dt =datetime.now()
#CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute)
path = kwargs['path']
if 'uid' in kwargs:
uid = kwargs['uid']
else:
uid = 'uid'
#fp = path + "Uid= %s--Waterfall-"%uid + CurTime + '.png'
fp = path + "uid=%s--imgsum-"%uid + '.png'
fig.savefig( fp, dpi=fig.dpi)
#plt.show()
bad_frame_list = np.where( np.array(imgsum) > bad_pixel_threshold )[0]
if len(bad_frame_list):
print ('Bad frame list are: %s' %bad_frame_list)
else:
print ('No bad frames are involved.')
return imgsum,bad_frame_list
def create_time_slice( N, slice_num, slice_width, edges=None ):
'''create a ROI time regions '''
if edges is not None:
time_edge = edges
else:
if slice_num==1:
time_edge = [ [0,N] ]
else:
tstep = N // slice_num
te = np.arange( 0, slice_num +1 ) * tstep
tc = np.int_( (te[:-1] + te[1:])/2 )[1:-1]
if slice_width%2:
sw = slice_width//2 +1
time_edge = [ [0,slice_width], ] + [ [s-sw+1,s+sw] for s in tc ] + [ [N-slice_width,N]]
else:
sw= slice_width//2
time_edge = [ [0,slice_width], ] + [ [s-sw,s+sw] for s in tc ] + [ [N-slice_width,N]]
return np.array(time_edge)
def show_label_array(ax, label_array, cmap=None, aspect=None,interpolation='nearest',**kwargs):
"""
YG. Sep 26, 2017
Modified show_label_array(ax, label_array, cmap=None, **kwargs)
from https://github.com/Nikea/xray-vision/blob/master/xray_vision/mpl_plotting/roi.py
Display a labeled array nicely
Additional kwargs are passed through to `ax.imshow`.
If `vmin` is in kwargs, it is clipped to minimum of 0.5.
Parameters
----------
ax : Axes
The `Axes` object to add the artist too
label_array: ndarray
Expected to be an unsigned integer array. 0 is background,
positive integers label region of interest
cmap : str or colormap, optional
Color map to use, defaults to 'Paired'
Returns
-------
img : AxesImage
The artist added to the axes
"""
if cmap is None:
cmap = 'viridis'
#print(cmap)
_cmap = copy.copy((mcm.get_cmap(cmap)))
_cmap.set_under('w', 0)
vmin = max(.5, kwargs.pop('vmin', .5))
im = ax.imshow(label_array, cmap=cmap,
interpolation=interpolation,
vmin=vmin,
**kwargs)
if aspect is None:
ax.set_aspect(aspect='auto')
#ax.set_aspect('equal')
return im
def show_label_array_on_image(ax, image, label_array, cmap=None,norm=None, log_img=True,alpha=0.3, vmin=0.1, vmax=5,
imshow_cmap='gray', **kwargs): #norm=LogNorm(),
"""
This will plot the required ROI's(labeled array) on the image
Additional kwargs are passed through to `ax.imshow`.
If `vmin` is in kwargs, it is clipped to minimum of 0.5.
Parameters
----------
ax : Axes
The `Axes` object to add the artist too
image : array
The image array
label_array : array
Expected to be an unsigned integer array. 0 is background,
positive integers label region of interest
cmap : str or colormap, optional
Color map to use for plotting the label_array, defaults to 'None'
imshow_cmap : str or colormap, optional
Color map to use for plotting the image, defaults to 'gray'
norm : str, optional
Normalize scale data, defaults to 'Lognorm()'
Returns
-------
im : AxesImage
The artist added to the axes
im_label : AxesImage
The artist added to the axes
"""
ax.set_aspect('equal')
#print (vmin, vmax )
if log_img:
im = ax.imshow(image, cmap=imshow_cmap, interpolation='none',norm=LogNorm(vmin, vmax),**kwargs) #norm=norm,
else:
im = ax.imshow(image, cmap=imshow_cmap, interpolation='none',vmin=vmin, vmax=vmax,**kwargs) #norm=norm,
im_label = mpl_plot.show_label_array(ax, label_array, cmap=cmap, vmin=vmin, vmax=vmax, alpha=alpha,
**kwargs) # norm=norm,
return im, im_label
def show_ROI_on_image( image, ROI, center=None, rwidth=400,alpha=0.3, label_on = True,
save=False, return_fig = False, rect_reqion=None, log_img = True, vmin=0.01, vmax=5,
show_ang_cor = False,
uid='uid', path='', aspect = 1, show_colorbar=True, show_roi_edge=False, *argv,**kwargs):
'''show ROI on an image
image: the data frame
ROI: the interested region
center: the plot center
rwidth: the plot range around the center
'''
if RUN_GUI:
fig = Figure(figsize=(8,8))
axes = fig.add_subplot(111)
else:
fig, axes = plt.subplots( ) #plt.subplots(figsize=(8,8))
#print( vmin, vmax)
#norm=LogNorm(vmin, vmax)
axes.set_title( "%s_ROI_on_Image"%uid )
if log_img:
if vmin==0:
vmin += 1e-10
vmax = max(1, vmax )
if not show_roi_edge:
im,im_label = show_label_array_on_image(axes, image, ROI, imshow_cmap='viridis',
cmap='Paired',alpha=alpha, log_img=log_img,
vmin=vmin, vmax=vmax, origin="lower")
else:
edg = get_image_edge( ROI )
image_ = get_image_with_roi( image, ROI, scale_factor = 2)
#fig, axes = plt.subplots( )
show_img( image_, ax=[fig,axes], vmin=vmin, vmax=vmax,
logs= log_img, image_name= "%s_ROI_on_Image"%uid,
cmap = cmap_albula )
if rect_reqion is None:
if center is not None:
x1,x2 = [center[1] - rwidth, center[1] + rwidth]
y1,y2 = [center[0] - rwidth, center[0] + rwidth]
axes.set_xlim( [x1,x2])
axes.set_ylim( [y1,y2])
else:
x1,x2,y1,y2= rect_reqion
axes.set_xlim( [x1,x2])
axes.set_ylim( [y1,y2])
if label_on:
num_qzr = len(np.unique( ROI )) -1
for i in range( 1, num_qzr + 1 ):
ind = np.where( ROI == i)[1]
indz = np.where( ROI == i)[0]
c = '%i'%i
y_val = int( indz.mean() )
x_val = int( ind.mean() )
#print (xval, y)
axes.text(x_val, y_val, c, color='b',va='center', ha='center')
if show_ang_cor:
axes.text(-0.0, 0.5, '-/+180' + r'$^0$', color='r', va='center', ha='center',transform=axes.transAxes)
axes.text(1.0, 0.5, '0' + r'$^0$', color='r', va='center', ha='center',transform=axes.transAxes)
axes.text(0.5, -0.0, '-90'+ r'$^0$', color='r', va='center', ha='center',transform=axes.transAxes)
axes.text(0.5, 1.0, '90' + r'$^0$', color='r', va='center', ha='center',transform=axes.transAxes)
axes.set_aspect(aspect)
#fig.colorbar(im_label)
if show_colorbar:
if not show_roi_edge:
fig.colorbar(im)
if save:
fp = path + "%s_ROI_on_Image"%uid + '.png'
plt.savefig( fp, dpi=fig.dpi)
#plt.show()
if return_fig:
return fig, axes, im
def crop_image( image, crop_mask ):
''' Crop the non_zeros pixels of an image to a new image
'''
from skimage.util import crop, pad
pxlst = np.where(crop_mask.ravel())[0]
dims = crop_mask.shape
imgwidthy = dims[1] #dimension in y, but in plot being x
imgwidthx = dims[0] #dimension in x, but in plot being y
#x and y are flipped???
#matrix notation!!!
pixely = pxlst%imgwidthy
pixelx = pxlst//imgwidthy
minpixelx = np.min(pixelx)
minpixely = np.min(pixely)
maxpixelx = np.max(pixelx)
maxpixely = np.max(pixely)
crops = crop_mask*image
img_crop = crop( crops, ((minpixelx, imgwidthx - maxpixelx -1 ),
(minpixely, imgwidthy - maxpixely -1 )) )
return img_crop
def get_avg_img( data_series, img_samp_index=None, sampling = 100, plot_ = False , save=False, *argv,**kwargs):
'''Get average imagef from a data_series by every sampling number to save time'''
if img_samp_index is None:
avg_img = np.average(data_series[:: sampling], axis=0)
else:
avg_img = np.zeros_like( data_series[0] )
n=0
for i in img_samp_index:
avg_img += data_series[i]
n +=1
avg_img = np.array( avg_img) / n
if plot_:
fig, ax = plt.subplots()
uid = 'uid'
if 'uid' in kwargs.keys():
uid = kwargs['uid']
im = ax.imshow(avg_img , cmap='viridis',origin='lower',
norm= LogNorm(vmin=0.001, vmax=1e2))
#ax.set_title("Masked Averaged Image")
ax.set_title('uid= %s--Masked Averaged Image'%uid)
fig.colorbar(im)
if save:
#dt =datetime.now()
#CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute)
path = kwargs['path']
if 'uid' in kwargs:
uid = kwargs['uid']
else:
uid = 'uid'
#fp = path + "uid= %s--Waterfall-"%uid + CurTime + '.png'
fp = path + "uid=%s--avg-img-"%uid + '.png'
fig.savefig( fp, dpi=fig.dpi)
#plt.show()
return avg_img
def check_ROI_intensity( avg_img, ring_mask, ring_number=3 , save=False, plot=True, *argv,**kwargs):
"""plot intensity versus pixel of a ring
Parameters
----------
avg_img: 2D-array, the image
ring_mask: 2D-array
ring_number: which ring to plot
Returns
-------
"""
#print('here')
uid = 'uid'
if 'uid' in kwargs.keys():
uid = kwargs['uid']
pixel = roi.roi_pixel_values(avg_img, ring_mask, [ring_number] )
if plot:
fig, ax = plt.subplots()
ax.set_title('%s--check-RIO-%s-intensity'%(uid, ring_number) )
ax.plot( pixel[0][0] ,'bo', ls='-' )
ax.set_ylabel('Intensity')
ax.set_xlabel('pixel')
if save:
path = kwargs['path']
fp = path + "%s_Mean_intensity_of_one_ROI"%uid + '.png'
fig.savefig( fp, dpi=fig.dpi)
if save:
path = kwargs['path']
save_lists( [range( len( pixel[0][0] )), pixel[0][0]], label=['pixel_list', 'roi_intensity'],
filename="%s_Mean_intensity_of_one_ROI"%uid, path= path)
#plt.show()
return pixel[0][0]
#from tqdm import tqdm
def cal_g2( image_series, ring_mask, bad_image_process,
bad_frame_list=None,good_start=0, num_buf = 8, num_lev = None ):
'''calculation g2 by using a multi-tau algorithm'''
noframes = len( image_series) # number of frames, not "no frames"
#num_buf = 8 # number of buffers
if bad_image_process:
import skbeam.core.mask as mask_image
bad_img_list = np.array( bad_frame_list) - good_start
new_imgs = mask_image.bad_to_nan_gen( image_series, bad_img_list)
if num_lev is None:
num_lev = int(np.log( noframes/(num_buf-1))/np.log(2) +1) +1
print ('In this g2 calculation, the buf and lev number are: %s--%s--'%(num_buf,num_lev))
print ('%s frames will be processed...'%(noframes))
print( 'Bad Frames involved!')
g2, lag_steps = corr.multi_tau_auto_corr(num_lev, num_buf, ring_mask, tqdm( new_imgs) )
print( 'G2 calculation DONE!')
else:
if num_lev is None:
num_lev = int(np.log( noframes/(num_buf-1))/np.log(2) +1) +1
print ('In this g2 calculation, the buf and lev number are: %s--%s--'%(num_buf,num_lev))
print ('%s frames will be processed...'%(noframes))
g2, lag_steps = corr.multi_tau_auto_corr(num_lev, num_buf, ring_mask, tqdm(image_series) )
print( 'G2 calculation DONE!')
return g2, lag_steps
def run_time(t0):
'''Calculate running time of a program
Parameters
----------
t0: time_string, t0=time.time()
The start time
Returns
-------
Print the running time
One usage
---------
t0=time.time()
.....(the running code)
run_time(t0)
'''
elapsed_time = time.time() - t0
if elapsed_time<60:
print ('Total time: %.3f sec' %(elapsed_time ))
else:
print ('Total time: %.3f min' %(elapsed_time/60.))
def trans_data_to_pd(data, label=None,dtype='array'):
'''
convert data into pandas.DataFrame
Input:
data: list or np.array
label: the coloum label of the data
dtype: list or array [[NOT WORK or dict (for dict only save the scalar not arrays values)]]
Output:
a pandas.DataFrame
'''
#lists a [ list1, list2...] all the list have the same length
from numpy import arange,array
import pandas as pd,sys
if dtype == 'list':
data=array(data).T
N,M=data.shape
elif dtype == 'array':
data=array(data)
N,M=data.shape
else:
print("Wrong data type! Now only support 'list' and 'array' tpye")
index = arange( N )
if label is None:label=['data%s'%i for i in range(M)]
#print label
df = pd.DataFrame( data, index=index, columns= label )
return df
def save_lists( data, label=None, filename=None, path=None, return_res = False):
'''
save_lists( data, label=None, filename=None, path=None)
save lists to a CSV file with filename in path
Parameters
----------
data: list
label: the column name, the length should be equal to the column number of list
filename: the filename to be saved
path: the filepath to be saved
Example:
save_arrays( [q,iq], label= ['q_A-1', 'Iq'], filename='uid=%s-q-Iq'%uid, path= data_dir )
'''
M,N = len(data[0]),len(data)
d = np.zeros( [N,M] )
for i in range(N):
d[i] = data[i]
df = trans_data_to_pd(d.T, label, 'array')
#dt =datetime.now()
#CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute)
if filename is None:
filename = 'data'
filename = os.path.join(path, filename )#+'.csv')
df.to_csv(filename)
if return_res:
return df
def get_pos_val_overlap( p1, v1, p2,v2, Nl):
'''get the overlap of v1 and v2
p1: the index of array1 in array with total length as Nl
v1: the corresponding value of p1
p2: the index of array2 in array with total length as Nl
v2: the corresponding value of p2
Return:
The values in v1 with the position in overlap of p1 and p2
The values in v2 with the position in overlap of p1 and p2
An example:
Nl =10
p1= np.array( [1,3,4,6,8] )
v1 = np.array( [10,20,30,40,50])
p2= np.array( [ 0,2,3,5,7,8])
v2=np.array( [10,20,30,40,50,60,70])
get_pos_val_overlap( p1, v1, p2,v2, Nl)
'''
ind = np.zeros( Nl, dtype=np.int32 )
ind[p1] = np.arange( len(p1) ) +1
w2 = np.where( ind[p2] )[0]
w1 = ind[ p2[w2]] -1
return v1[w1], v2[w2]
def save_arrays( data, label=None, dtype='array', filename=None, path=None, return_res = False):
'''
July 10, 2016, Y.G.@CHX
save_arrays( data, label=None, dtype='array', filename=None, path=None):
save data to a CSV file with filename in path
Parameters
----------
data: arrays
label: the column name, the length should be equal to the column number of data
dtype: array or list
filename: the filename to be saved
path: the filepath to be saved
Example:
save_arrays( qiq, label= ['q_A-1', 'Iq'], dtype='array', filename='uid=%s-q-Iq'%uid, path= data_dir )
'''
df = trans_data_to_pd(data, label,dtype)
#dt =datetime.now()
#CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute)
if filename is None:
filename = 'data'
filename_ = os.path.join(path, filename)# +'.csv')
df.to_csv(filename_)
print( 'The file: %s is saved in %s'%(filename, path) )
#print( 'The g2 of uid= %s is saved in %s with filename as g2-%s-%s.csv'%(uid, path, uid, CurTime))
if return_res:
return df
def cal_particle_g2( radius, viscosity, qr, taus, beta=0.2, T=298):
'''YG Dev Nov 20, 2017@CHX
calculate particle g2 fucntion by giving particle radius, Q , and solution viscosity using a simple
exponetional model
Input:
radius: m
qr, list, in A-1
visocity: N*s/m^2 (water at 25K = 8.9*10^(-4) )
T: temperture, in K
e.g., for a 250 nm sphere in glycerol/water (90:10) at RT (298K) gives:
1.38064852*10**(−23) *298 / ( 6*np.pi* 0.20871 * 250 *10**(-9)) * 10**20 /1e5 = 4.18*10^5 A2/s
taus: time
beta: contrast
cal_particle_g2( radius=125 *10**(-9), qr=[0.01,0.015], viscosity= 8.9*1e-4)
'''
D0 = get_diffusion_coefficient( viscosity, radius, T=T)
g2_q1 = np.zeros(len(qr), dtype = object)
for i, q1 in enumerate(qr):
relaxation_rate = D0 * q1**2
g2_q1[i] = simple_exponential( taus, beta=beta, relaxation_rate = relaxation_rate, baseline=1)
return g2_q1
def get_diffusion_coefficient( visocity, radius, T=298):
'''July 10, 2016, Y.G.@CHX
get diffusion_coefficient of a Brownian motion particle with radius in fuild with visocity
visocity: N*s/m^2 (water at 25K = 8.9*10^(-4) )
radius: m
T: K
k: 1.38064852(79)×10−23 J/T, Boltzmann constant
return diffusion_coefficient in unit of A^2/s
e.g., for a 250 nm sphere in glycerol/water (90:10) at RT (298K) gives:
1.38064852*10**(−23) *298 / ( 6*np.pi* 0.20871 * 250 *10**(-9)) * 10**20 /1e5 = 4.18*10^5 A2/s
get_diffusion_coefficient( 0.20871, 250 *10**(-9), T=298)
'''
k= 1.38064852*10**(-23)
return k*T / ( 6*np.pi* visocity * radius) * 10**20
def ring_edges(inner_radius, width, spacing=0, num_rings=None):
"""
Aug 02, 2016, Y.G.@CHX
ring_edges(inner_radius, width, spacing=0, num_rings=None)
Calculate the inner and outer radius of a set of rings.
The number of rings, their widths, and any spacing between rings can be
specified. They can be uniform or varied.
Parameters
----------
inner_radius : float
inner radius of the inner-most ring
width : float or list of floats
ring thickness
If a float, all rings will have the same thickness.
spacing : float or list of floats, optional
margin between rings, 0 by default
If a float, all rings will have the same spacing. If a list,
the length of the list must be one less than the number of
rings.
num_rings : int, optional
number of rings
Required if width and spacing are not lists and number
cannot thereby be inferred. If it is given and can also be
inferred, input is checked for consistency.
Returns
-------
edges : array
inner and outer radius for each ring
Example
-------
# Make two rings starting at r=1px, each 5px wide
>>> ring_edges(inner_radius=1, width=5, num_rings=2)
[(1, 6), (6, 11)]
# Make three rings of different widths and spacings.
# Since the width and spacings are given individually, the number of
# rings here is simply inferred.
>>> ring_edges(inner_radius=1, width=(5, 4, 3), spacing=(1, 2))
[(1, 6), (7, 11), (13, 16)]
"""
# All of this input validation merely checks that width, spacing, and
# num_rings are self-consistent and complete.
width_is_list = isinstance(width, collections.Iterable)
spacing_is_list = isinstance(spacing, collections.Iterable)
if (width_is_list and spacing_is_list):
if len(width) != len(spacing) + 1:
raise ValueError("List of spacings must be one less than list "
"of widths.")
if num_rings is None:
try:
num_rings = len(width)
except TypeError:
try:
num_rings = len(spacing) + 1
except TypeError:
raise ValueError("Since width and spacing are constant, "
"num_rings cannot be inferred and must be "
"specified.")
else:
if width_is_list:
if num_rings != len(width):
raise ValueError("num_rings does not match width list")
if spacing_is_list:
if num_rings-1 != len(spacing):
raise ValueError("num_rings does not match spacing list")
# Now regularlize the input.
if not width_is_list:
width = np.ones(num_rings) * width
if spacing is None:
spacing = []
else:
if not spacing_is_list:
spacing = np.ones(num_rings - 1) * spacing
# The inner radius is the first "spacing."
all_spacings = np.insert(spacing, 0, inner_radius)
steps = np.array([all_spacings, width]).T.ravel()
edges = np.cumsum(steps).reshape(-1, 2)
return edges
def get_non_uniform_edges( centers, width = 4, number_rings=1, spacing=0, ):
'''
YG CHX Spe 6
get_non_uniform_edges( centers, width = 4, number_rings=3 )
Calculate the inner and outer radius of a set of non uniform distributed
rings by giving ring centers
For each center, there are number_rings with each of width
Parameters
----------
centers : float
the center of the rings
width : float or list of floats
ring thickness
If a float, all rings will have the same thickness.
num_rings : int, optional
number of rings
Required if width and spacing are not lists and number
cannot thereby be inferred. If it is given and can also be
inferred, input is checked for consistency.
Returns
-------
edges : array
inner and outer radius for each ring
'''
if number_rings is None:
number_rings = 1
edges = np.zeros( [len(centers)*number_rings, 2] )
#print( width )
if not isinstance(width, collections.Iterable):
width = np.ones_like( centers ) * width
for i, c in enumerate(centers):
edges[i*number_rings:(i+1)*number_rings,:] = ring_edges( inner_radius = c - width[i]*number_rings/2,
width= width[i], spacing= spacing, num_rings=number_rings)
return edges
def trans_tf_to_td(tf, dtype = 'dframe'):
'''July 02, 2015, Y.G.@CHX
Translate epoch time to string
'''
import pandas as pd
import numpy as np
import datetime
'''translate time.float to time.date,
td.type dframe: a dataframe
td.type list, a list
'''
if dtype is 'dframe':ind = tf.index
else:ind = range(len(tf))
td = np.array([ datetime.datetime.fromtimestamp(tf[i]) for i in ind ])
return td
def trans_td_to_tf(td, dtype = 'dframe'):
'''July 02, 2015, Y.G.@CHX
Translate string to epoch time
'''
import time
import numpy as np
'''translate time.date to time.float,
td.type dframe: a dataframe
td.type list, a list
'''
if dtype is 'dframe':ind = td.index
else:ind = range(len(td))
#tf = np.array([ time.mktime(td[i].timetuple()) for i in range(len(td)) ])
tf = np.array([ time.mktime(td[i].timetuple()) for i in ind])
return tf
def get_averaged_data_from_multi_res( multi_res, keystr='g2', different_length= True ):
'''Y.G. Dec 22, 2016
get average data from multi-run analysis result
Parameters:
multi_res: dict, generated by function run_xpcs_xsvs_single
each key is a uid, inside each uid are also dict with key as 'g2','g4' et.al.
keystr: string, get the averaged keystr
different_length: if True, do careful average for different length results
return:
array, averaged results
'''
maxM = 0
mkeys = multi_res.keys()
if not different_length:
n=0
for i, key in enumerate( list( mkeys) ):
keystri = multi_res[key][keystr]
if i ==0:
keystr_average = keystri
else:
keystr_average += keystri
n +=1
keystr_average /=n
else:
length_dict = {}
D= 1
for i, key in enumerate( list( mkeys) ):
shapes = multi_res[key][keystr].shape
M=shapes[0]
if i ==0:
if len(shapes)==2:
D=2
maxN = shapes[1]
elif len(shapes)==3:
D=3
maxN = shapes[2] #in case of two-time correlation
if (M) not in length_dict:
length_dict[(M) ] =1
else:
length_dict[(M) ] += 1
maxM = max( maxM, M )
#print( length_dict )
avg_count = {}
sk = np.array( sorted(length_dict) )
for i, k in enumerate( sk ):
avg_count[k] = np.sum( np.array( [ length_dict[k] for k in sk[i:] ] ) )
#print(length_dict, avg_count)
if D==2:
keystr_average = np.zeros( [maxM, maxN] )
elif D==3:
keystr_average = np.zeros( [maxM, maxM, maxN ] )
else:
keystr_average = np.zeros( [maxM] )
for i, key in enumerate( list( mkeys) ):
keystri = multi_res[key][keystr]
Mi = keystri.shape[0]
if D!=3:
keystr_average[:Mi] += keystri
else:
keystr_average[:Mi,:Mi,:] += keystri
if D!=3:
keystr_average[:sk[0]] /= avg_count[sk[0]]
else:
keystr_average[:sk[0],:sk[0], : ] /= avg_count[sk[0]]
for i in range( 0, len(sk)-1 ):
if D!=3:
keystr_average[sk[i]:sk[i+1]] /= avg_count[sk[i+1]]
else:
keystr_average[sk[i]:sk[i+1],sk[i]:sk[i+1],:] /= avg_count[sk[i+1]]
return keystr_average
def save_g2_general( g2, taus, qr=None, qz=None, uid='uid', path=None, return_res= False ):
'''Y.G. Dec 29, 2016
save g2 results,
res_pargs should contain
g2: one-time correlation function
taus, lags of g2
qr: the qr center, same length as g2
qz: the qz or angle center, same length as g2
path:
uid:
'''
df = DataFrame( np.hstack( [ (taus).reshape( len(g2),1) , g2] ) )
t,qs = g2.shape
if qr is None:
qr = range( qs )
if qz is None:
df.columns = ( ['tau'] + [str(qr_) for qr_ in qr ] )
else:
df.columns = ( ['tau'] + [ str(qr_) +'_'+ str(qz_) for (qr_,qz_) in zip(qr,qz) ] )
#dt =datetime.now()
#CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute)
#if filename is None:
filename = uid
#filename = 'uid=%s--g2.csv' % (uid)
#filename += '-uid=%s-%s.csv' % (uid,CurTime)
#filename += '-uid=%s.csv' % (uid)
filename1 = os.path.join(path, filename)
df.to_csv(filename1)
print( 'The correlation function is saved in %s with filename as %s'%( path, filename))
if return_res:
return df
###########
#*for g2 fit and plot
def stretched_auto_corr_scat_factor(x, beta, relaxation_rate, alpha=1.0, baseline=1):
return beta * np.exp(-2 * (relaxation_rate * x)**alpha ) + baseline
def simple_exponential(x, beta, relaxation_rate, baseline=1):
return beta * np.exp(-2 * relaxation_rate * x) + baseline
def simple_exponential_with_vibration(x, beta, relaxation_rate, freq, amp, baseline=1):
return beta * (1 + amp*np.cos( 2*np.pi*freq* x) )* np.exp(-2 * relaxation_rate * x) + baseline
def stretched_auto_corr_scat_factor_with_vibration(x, beta, relaxation_rate, alpha, freq, amp, baseline=1):
return beta * (1 + amp*np.cos( 2*np.pi*freq* x) )* np.exp(-2 * (relaxation_rate * x)**alpha ) + baseline
def flow_para_function_with_vibration( x, beta, relaxation_rate, flow_velocity, freq, amp, baseline=1):
vibration_part = (1 + amp*np.cos( 2*np.pi*freq* x) )
Diff_part= np.exp(-2 * relaxation_rate * x)
Flow_part = np.pi**2/(16*x*flow_velocity) * abs( erf( np.sqrt( 4/np.pi * 1j* x * flow_velocity ) ) )**2
return beta* vibration_part* Diff_part * Flow_part + baseline
def flow_para_function( x, beta, relaxation_rate, flow_velocity, baseline=1):
'''flow_velocity: q.v (q vector dot v vector = q*v*cos(angle) )'''
Diff_part= np.exp(-2 * relaxation_rate * x)
Flow_part = np.pi**2/(16*x*flow_velocity) * abs( erf( np.sqrt( 4/np.pi * 1j* x * flow_velocity ) ) )**2
return beta*Diff_part * Flow_part + baseline
def flow_para_function_explicitq( x, beta, relaxation_rate, flow_velocity, baseline=1, qr=1, q_ang=0 ):
'''Nov 9, 2017 Basically, make q vector to (qr, angle), relaxation_rate is actually a diffusion rate
flow_velocity: q.v (q vector dot v vector = q*v*cos(angle) )'''
Diff_part= np.exp(-2 * relaxation_rate* qr**2 * x)
Flow_part = np.pi**2/(16*x*flow_velocity*qr* np.cos(q_ang) ) * abs( erf( np.sqrt( 4/np.pi * 1j* x * flow_velocity * qr* np.cos(q_ang) ) ) )**2
return beta*Diff_part * Flow_part + baseline
def get_flow_velocity( average_velocity, shape_factor):
return average_velocity * (1- shape_factor)/(1+ shape_factor)
def stretched_flow_para_function( x, beta, relaxation_rate, alpha, flow_velocity, baseline=1):
'''
flow_velocity: q.v (q vector dot v vector = q*v*cos(angle) )
'''
Diff_part= np.exp(-2 * (relaxation_rate * x)**alpha )
Flow_part = np.pi**2/(16*x*flow_velocity) * abs( erf( np.sqrt( 4/np.pi * 1j* x * flow_velocity ) ) )**2
return beta*Diff_part * Flow_part + baseline
def get_g2_fit_general_two_steps( g2, taus, function='simple_exponential',
second_fit_range=[0,20],
sequential_fit=False, *argv,**kwargs):
'''
Fit g2 in two steps,
i) Using the "function" to fit whole g2 to get baseline and beta (contrast)
ii) Then using the obtained baseline and beta to fit g2 in a "second_fit_range" by using simple_exponential function
'''
g2_fit_result, taus_fit, g2_fit = get_g2_fit_general( g2, taus, function, sequential_fit, *argv,**kwargs)
guess_values = {}
for k in list (g2_fit_result[0].params.keys()):
guess_values[k] = np.array( [ g2_fit_result[i].params[k].value
for i in range( g2.shape[1] ) ])
if 'guess_limits' in kwargs:
guess_limits = kwargs['guess_limits']
else:
guess_limits = dict( baseline =[1, 1.8], alpha=[0, 2],
beta = [0., 1], relaxation_rate= [0.001, 10000])
g2_fit_result, taus_fit, g2_fit = get_g2_fit_general( g2, taus, function ='simple_exponential',
sequential_fit= sequential_fit, fit_range=second_fit_range,
fit_variables={'baseline':False, 'beta': False, 'alpha':False,'relaxation_rate':True},
guess_values= guess_values, guess_limits = guess_limits )
return g2_fit_result, taus_fit, g2_fit
def get_g2_fit_general( g2, taus, function='simple_exponential',
sequential_fit=False, qval_dict = None,
ang_init = 137.3, *argv,**kwargs):
'''
Nov 9, 2017, give qval_dict for using function of flow_para_function_explicitq
qval_dict: a dict with qr and ang (in unit of degrees).")
Dec 29,2016, Y.G.@CHX
Fit one-time correlation function
The support functions include simple exponential and stretched/compressed exponential
Parameters
----------
g2: one-time correlation function for fit, with shape as [taus, qs]
taus: the time delay
sequential_fit: if True, will use the low-q fit result as initial value to fit the higher Qs
function:
supported function include:
'simple_exponential' (or 'simple'): fit by a simple exponential function, defined as
beta * np.exp(-2 * relaxation_rate * lags) + baseline
'streched_exponential'(or 'streched'): fit by a streched exponential function, defined as
beta * (np.exp(-2 * relaxation_rate * lags))**alpha + baseline
'stretched_vibration': fit by a streched exponential function with vibration, defined as
beta * (1 + amp*np.cos( 2*np.pi*60* x) )* np.exp(-2 * relaxation_rate * x)**alpha + baseline
'flow_para_function' (or flow): fit by a flow function
kwargs:
could contains:
'fit_variables': a dict, for vary or not,
keys are fitting para, including
beta, relaxation_rate , alpha ,baseline
values: a False or True, False for not vary
'guess_values': a dict, for initial value of the fitting para,
the defalut values are
dict( beta=.1, alpha=1.0, relaxation_rate =0.005, baseline=1.0)
'guess_limits': a dict, for the limits of the fittting para, for example:
dict( beta=[0, 10],, alpha=[0,100] )
the default is:
dict( baseline =[0.5, 2.5], alpha=[0, inf] ,beta = [0, 1], relaxation_rate= [0.0,1000] )
Returns
-------
fit resutls: a instance in limfit
tau_fit
fit_data by the model, it has the q number of g2
an example:
fit_g2_func = 'stretched'
g2_fit_result, taus_fit, g2_fit = get_g2_fit_general( g2, taus,
function = fit_g2_func, vlim=[0.95, 1.05], fit_range= None,
fit_variables={'baseline':True, 'beta':True, 'alpha':True,'relaxation_rate':True},
guess_values={'baseline':1.0,'beta':0.05,'alpha':1.0,'relaxation_rate':0.01,})
g2_fit_paras = save_g2_fit_para_tocsv(g2_fit_result, filename= uid_ +'_g2_fit_paras.csv', path=data_dir )
'''
if 'fit_range' in kwargs.keys():
fit_range = kwargs['fit_range']
else:
fit_range=None
num_rings = g2.shape[1]
if 'fit_variables' in kwargs:
additional_var = kwargs['fit_variables']
_vars =[ k for k in list( additional_var.keys()) if additional_var[k] is False]
else:
_vars = []
if function=='simple_exponential' or function=='simple':
_vars = np.unique ( _vars + ['alpha'])
mod = Model(stretched_auto_corr_scat_factor)#, independent_vars= list( _vars) )
elif function=='stretched_exponential' or function=='stretched':
mod = Model(stretched_auto_corr_scat_factor)#, independent_vars= _vars)
elif function=='stretched_vibration':
mod = Model(stretched_auto_corr_scat_factor_with_vibration)#, independent_vars= _vars)
elif function=='flow_para_function' or function=='flow_para':
mod = Model(flow_para_function)#, independent_vars= _vars)
elif function=='flow_para_function_explicitq' or function=='flow_para_qang':
mod = Model(flow_para_function_explicitq)#, independent_vars= _vars)
elif function=='flow_para_function_with_vibration' or function=='flow_vibration':
mod = Model( flow_para_function_with_vibration )
else:
print ("The %s is not supported.The supported functions include simple_exponential and stretched_exponential"%function)
mod.set_param_hint( 'baseline', min=0.5, max= 2.5 )
mod.set_param_hint( 'beta', min=0.0, max=1.0 )
mod.set_param_hint( 'alpha', min=0.0 )
mod.set_param_hint( 'relaxation_rate', min=0.0, max= 1000 )
if 'guess_limits' in kwargs:
guess_limits = kwargs['guess_limits']
for k in list( guess_limits.keys() ):
mod.set_param_hint( k, min= guess_limits[k][0], max= guess_limits[k][1] )
if function=='flow_para_function' or function=='flow_para' or function=='flow_vibration':
mod.set_param_hint( 'flow_velocity', min=0)
if function=='flow_para_function_explicitq' or function=='flow_para_qang':
mod.set_param_hint( 'flow_velocity', min=0)
if function=='stretched_vibration' or function=='flow_vibration':
mod.set_param_hint( 'freq', min=0)
mod.set_param_hint( 'amp', min=0)
_guess_val = dict( beta=.1, alpha=1.0, relaxation_rate =0.005, baseline=1.0)
if 'guess_values' in kwargs:
guess_values = kwargs['guess_values']
_guess_val.update( guess_values )
_beta=_guess_val['beta']
_alpha=_guess_val['alpha']
_relaxation_rate = _guess_val['relaxation_rate']
_baseline= _guess_val['baseline']
pars = mod.make_params( beta=_beta, alpha=_alpha, relaxation_rate =_relaxation_rate, baseline= _baseline)
if function=='flow_para_function' or function=='flow_para':
_flow_velocity =_guess_val['flow_velocity']
pars = mod.make_params( beta=_beta, alpha=_alpha, flow_velocity=_flow_velocity,
relaxation_rate =_relaxation_rate, baseline= _baseline)
if function=='flow_para_function_explicitq' or function=='flow_para_qang':
_flow_velocity =_guess_val['flow_velocity']
_guess_val['qr'] = 1
_guess_val['q_ang'] = 0
pars = mod.make_params( beta=_beta, alpha=_alpha, flow_velocity=_flow_velocity,
relaxation_rate =_relaxation_rate, baseline= _baseline,
qr=1, q_ang=0
)
if function=='stretched_vibration':
_freq =_guess_val['freq']
_amp = _guess_val['amp']
pars = mod.make_params( beta=_beta, alpha=_alpha, freq=_freq, amp = _amp,
relaxation_rate =_relaxation_rate, baseline= _baseline)
if function=='flow_vibration':
_flow_velocity =_guess_val['flow_velocity']
_freq =_guess_val['freq']
_amp = _guess_val['amp']
pars = mod.make_params( beta=_beta, freq=_freq, amp = _amp,flow_velocity=_flow_velocity,
relaxation_rate =_relaxation_rate, baseline= _baseline)
for v in _vars:
pars['%s'%v].vary = False
#print( pars )
fit_res = []
model_data = []
for i in range(num_rings):
if fit_range is not None:
y=g2[1:, i][fit_range[0]:fit_range[1]]
lags=taus[1:][fit_range[0]:fit_range[1]]
else:
y=g2[1:, i]
lags=taus[1:]
#print( _relaxation_rate )
for k in list(pars.keys()):
#print(k, _guess_val[k] )
if isinstance( _guess_val[k], (np.ndarray, list) ):
pars[k].value = _guess_val[k][i]
if False:
if isinstance( _beta, (np.ndarray, list) ):
pars['beta'].value = _guess_val['beta'][i]
if isinstance( _baseline, (np.ndarray, list) ):
pars['baseline'].value = _guess_val['baseline'][i]
if isinstance( _relaxation_rate, (np.ndarray, list) ):
pars['relaxation_rate'].value = _guess_val['relaxation_rate'][i]
if isinstance( _alpha, (np.ndarray, list) ):
pars['alpha'].value = _guess_val['alpha'][i]
#for k in list(pars.keys()):
#print(k, _guess_val[k] )
# pars[k].value = _guess_val[k][i]
if function=='flow_para_function_explicitq' or function=='flow_para_qang':
if qval_dict is None:
print("Please provide qval_dict, a dict with qr and ang (in unit of degrees).")
else:
pars = mod.make_params(
beta=_beta, alpha=_alpha, flow_velocity=_flow_velocity,
relaxation_rate =_relaxation_rate, baseline= _baseline,
qr = qval_dict[i][0], q_ang = abs(np.radians( qval_dict[i][1] - ang_init) ) )
pars['qr'].vary = False
pars['q_ang'].vary = False
#if i==20:
# print(pars)
#print( pars )
result1 = mod.fit(y, pars, x =lags )
#print(qval_dict[i][0], qval_dict[i][1], y)
if sequential_fit:
for k in list(pars.keys()):
#print( pars )
if k in list(result1.best_values.keys()):
pars[k].value = result1.best_values[k]
fit_res.append( result1)
model_data.append( result1.best_fit )
return fit_res, lags, np.array( model_data ).T
def get_short_long_labels_from_qval_dict(qval_dict, geometry='saxs'):
'''Y.G. 2016, Dec 26
Get short/long labels from a qval_dict
Parameters
----------
qval_dict, dict, with key as roi number,
format as {1: [qr1, qz1], 2: [qr2,qz2] ...} for gi-saxs
format as {1: [qr1], 2: [qr2] ...} for saxs
format as {1: [qr1, qa1], 2: [qr2,qa2], ...] for ang-saxs
geometry:
'saxs': a saxs with Qr partition
'ang_saxs': a saxs with Qr and angular partition
'gi_saxs': gisaxs with Qz, Qr
'''
Nqs = len( qval_dict.keys())
len_qrz = len( list( qval_dict.values() )[0] )
#qr_label = sorted( np.array( list( qval_dict.values() ) )[:,0] )
qr_label = np.array( list( qval_dict.values() ) )[:,0]
if geometry=='gi_saxs' or geometry=='ang_saxs':# or geometry=='gi_waxs':
if len_qrz < 2:
print( "please give qz or qang for the q-label")
else:
#qz_label = sorted( np.array( list( qval_dict.values() ) )[:,1] )
qz_label = np.array( list( qval_dict.values() ) )[:,1]
else:
qz_label = np.array( [0] )
uqz_label = np.unique( qz_label )
num_qz = len( uqz_label)
uqr_label = np.unique( qr_label )
num_qr = len( uqr_label)
#print( uqr_label, uqz_label )
if len( uqr_label ) >= len( uqz_label ):
master_plot= 'qz' #one qz for many sub plots of each qr
else:
master_plot= 'qr'
mastp= master_plot
if geometry == 'ang_saxs':
mastp= 'ang'
num_short = min(num_qz, num_qr)
num_long = max(num_qz, num_qr)
#print( mastp, num_short, num_long)
if num_qz != num_qr:
short_label = [qz_label,qr_label][ np.argmin( [num_qz, num_qr] ) ]
long_label = [qz_label,qr_label][ np.argmax( [num_qz, num_qr] ) ]
short_ulabel = [uqz_label,uqr_label][ np.argmin( [num_qz, num_qr] ) ]
long_ulabel = [uqz_label,uqr_label][ np.argmax( [num_qz, num_qr] ) ]
else:
short_label = qz_label
long_label = qr_label
short_ulabel = uqz_label
long_ulabel = uqr_label
#print( long_ulabel )
#print( qz_label,qr_label )
#print( short_label, long_label )
if geometry == 'saxs' or geometry == 'gi_waxs':
ind_long = [ range( num_long ) ]
else:
ind_long = [ np.where( short_label == i)[0] for i in short_ulabel ]
if Nqs == 1:
long_ulabel = list( qval_dict.values() )[0]
long_label = list( qval_dict.values() )[0]
return qr_label, qz_label, num_qz, num_qr, num_short,num_long, short_label, long_label,short_ulabel,long_ulabel, ind_long, master_plot, mastp
############################################
##a good func to plot g2 for all types of geogmetries
############################################
def plot_g2_general( g2_dict, taus_dict, qval_dict, fit_res=None, geometry='saxs',filename='g2',
path=None, function='simple_exponential', g2_labels=None,
fig_ysize= 12, qth_interest = None,
ylabel='g2', return_fig=False, append_name='', outsize=(2000, 2400),
max_plotnum_fig=16, figsize=(10, 12), show_average_ang_saxs=True,
qphi_analysis = False,
*argv,**kwargs):
'''
Oct31, 2017 add qth_interest option
Dec 26,2016, Y.G.@CHX
Plot one/four-time correlation function (with fit) for different geometry
The support functions include simple exponential and stretched/compressed exponential
Parameters
----------
g2_dict: dict, format as {1: g2_1, 2: g2_2, 3: g2_3...} one-time correlation function, g1,g2, g3,...must have the same shape
taus_dict, dict, format {1: tau_1, 2: tau_2, 3: tau_3...}, tau1,tau2, tau3,...must have the same shape
qval_dict, dict, with key as roi number,
format as {1: [qr1, qz1], 2: [qr2,qz2] ...} for gi-saxs
format as {1: [qr1], 2: [qr2] ...} for saxs
format as {1: [qr1, qa1], 2: [qr2,qa2], ...] for ang-saxs
fit_res: give all the fitting parameters for showing in the plot
qth_interest: if not None: should be a list, and will only plot the qth_interest qs
filename: for the title of plot
append_name: if not None, will save as filename + append_name as filename
path: the path to save data
outsize: for gi/ang_saxs, will combine all the different qz images together with outsize
function:
'simple_exponential': fit by a simple exponential function, defined as
beta * np.exp(-2 * relaxation_rate * lags) + baseline
'streched_exponential': fit by a streched exponential function, defined as
beta * (np.exp(-2 * relaxation_rate * lags))**alpha + baseline
geometry:
'saxs': a saxs with Qr partition
'ang_saxs': a saxs with Qr and angular partition
'gi_saxs': gisaxs with Qz, Qr
one_plot: if True, plot all images in one pannel
kwargs:
Returns
-------
None
ToDoList: plot an average g2 for ang_saxs for each q
'''
if ylabel=='g2':
ylabel='g_2'
if ylabel=='g4':
ylabel='g_4'
if geometry =='saxs':
if qphi_analysis:
geometry = 'ang_saxs'
if qth_interest is not None:
if not isinstance(qth_interest, list):
print('Please give a list for qth_interest')
else:
#g2_dict0, taus_dict0, qval_dict0, fit_res0= g2_dict, taus_dict, qval_dict, fit_res
qth_interest = np.array( qth_interest ) -1
g2_dict_ = {}
#taus_dict_ = {}
for k in list(g2_dict.keys()):
g2_dict_[k] = g2_dict[k][:,[i for i in qth_interest]]
#for k in list(taus_dict.keys()):
# taus_dict_[k] = taus_dict[k][:,[i for i in qth_interest]]
taus_dict_ = taus_dict
qval_dict_ = {k:qval_dict[k] for k in qth_interest}
if fit_res is not None:
fit_res_ = [ fit_res[k] for k in qth_interest ]
else:
fit_res_ = None
else:
g2_dict_, taus_dict_, qval_dict_, fit_res_ = g2_dict, taus_dict, qval_dict, fit_res
(qr_label, qz_label, num_qz, num_qr, num_short,
num_long, short_label, long_label,short_ulabel,
long_ulabel,ind_long, master_plot,
mastp) = get_short_long_labels_from_qval_dict(qval_dict_, geometry=geometry)
fps = []
#$print( num_short, num_long )
for s_ind in range( num_short ):
ind_long_i = ind_long[ s_ind ]
num_long_i = len( ind_long_i )
#if show_average_ang_saxs:
# if geometry=='ang_saxs':
# num_long_i += 1
if RUN_GUI:
fig = Figure(figsize=(10, 12))
else:
#fig = plt.figure( )
if num_long_i <=4:
if master_plot != 'qz':
fig = plt.figure(figsize=(8, 6))
else:
if num_short>1:
fig = plt.figure(figsize=(8, 4))
else:
fig = plt.figure(figsize=(10, 6))
#print('Here')
elif num_long_i > max_plotnum_fig:
num_fig = int(np.ceil(num_long_i/max_plotnum_fig)) #num_long_i //16
fig = [ plt.figure(figsize=figsize) for i in range(num_fig) ]
#print( figsize )
else:
#print('Here')
if master_plot != 'qz':
fig = plt.figure(figsize=figsize)
else:
fig = plt.figure(figsize=(10, 10))
if master_plot == 'qz':
if geometry=='ang_saxs':
title_short = 'Angle= %.2f'%( short_ulabel[s_ind] ) + r'$^\circ$'
elif geometry=='gi_saxs':
title_short = r'$Q_z= $' + '%.4f'%( short_ulabel[s_ind] ) + r'$\AA^{-1}$'
else:
title_short = ''
else: #qr
if geometry=='ang_saxs' or geometry=='gi_saxs':
title_short = r'$Q_r= $' + '%.5f '%( short_ulabel[s_ind] ) + r'$\AA^{-1}$'
else:
title_short=''
#filename =''
til = '%s:--->%s'%(filename, title_short )
if num_long_i <=4:
plt.title( til,fontsize= 14, y =1.15)
#plt.title( til,fontsize=20, y =1.06)
else:
plt.title( til,fontsize=20, y =1.06)
#print( num_long )
if num_long!=1:
#print( 'here')
plt.axis('off')
#sy = min(num_long_i,4)
sy = min(num_long_i, int( np.ceil( min(max_plotnum_fig,num_long_i)/4)) )
#fig.set_size_inches(10, 12)
#fig.set_size_inches(10, fig_ysize )
else:
sy =1
#fig.set_size_inches(8,6)
#plt.axis('off')
sx = min(4, int( np.ceil( min(max_plotnum_fig,num_long_i)/float(sy) ) ))
temp = sy
sy = sx
sx = temp
#print( num_long_i, sx, sy )
#print( master_plot )
#print(ind_long_i, len(ind_long_i) )
for i, l_ind in enumerate( ind_long_i ):
if num_long_i <= max_plotnum_fig:
#if s_ind ==2:
# print('Here')
# print(i, l_ind, short_label[s_ind], long_label[l_ind], sx, sy, i+1 )
ax = fig.add_subplot(sx,sy, i + 1 )
if sx==1:
if sy==1:
plt.axis('on')
else:
#fig_subnum = l_ind//max_plotnum_fig
#ax = fig[fig_subnum].add_subplot(sx,sy, i + 1 - fig_subnum*max_plotnum_fig)
fig_subnum = i//max_plotnum_fig
#print( i, sx,sy, fig_subnum, max_plotnum_fig, i + 1 - fig_subnum*max_plotnum_fig )
ax = fig[fig_subnum].add_subplot(sx,sy, i + 1 - fig_subnum*max_plotnum_fig)
ax.set_ylabel( r"$%s$"%ylabel + '(' + r'$\tau$' + ')' )
ax.set_xlabel(r"$\tau $ $(s)$", fontsize=16)
if master_plot == 'qz' or master_plot == 'angle':
title_long = r'$Q_r= $'+'%.5f '%( long_label[l_ind] ) + r'$\AA^{-1}$'
#print( title_long,long_label,l_ind )
else:
if geometry=='ang_saxs':
#title_long = 'Ang= ' + '%.2f'%( long_label[l_ind] ) + r'$^\circ$' + '( %d )'%(l_ind)
title_long = 'Ang= ' + '%.2f'%( long_label[l_ind] ) #+ r'$^\circ$' + '( %d )'%(l_ind)
elif geometry=='gi_saxs':
title_long = r'$Q_z= $'+ '%.5f '%( long_label[l_ind] ) + r'$\AA^{-1}$'
else:
title_long = ''
if master_plot != 'qz':
ax.set_title(title_long + ' (%s )'%(1+l_ind), y =1.1, fontsize=12)
else:
ax.set_title(title_long + ' (%s )'%(1+l_ind), y =1.05, fontsize=12)
if qth_interest is not None:#it might have a bug here, todolist!!!
lab = sorted(list(qval_dict_.keys()))
#print( lab, l_ind)
ax.set_title(title_long + ' (%s )'%( lab[l_ind] +1), y =1.05, fontsize=12)
for ki, k in enumerate( list(g2_dict_.keys()) ):
if ki==0:
c='b'
if fit_res is None:
m='-o'
else:
m='o'
elif ki==1:
c='r'
if fit_res is None:
m='s'
else:
m='-'
elif ki==2:
c='g'
m='-D'
else:
c = colors[ki+2]
m= '-%s'%markers[ki+2]
try:
dumy = g2_dict_[k].shape
#print( 'here is the shape' )
islist = False
except:
islist_n = len( g2_dict_[k] )
islist = True
#print( 'here is the list' )
if islist:
for nlst in range( islist_n ):
m = '-%s'%markers[ nlst ]
#print(m)
y=g2_dict_[k][nlst][:, l_ind ]
x = taus_dict_[k][nlst]
if ki==0:
ymin,ymax = min(y), max(y[1:])
if g2_labels is None:
ax.semilogx(x, y, m, color=c, markersize=6)
else:
#print('here ki ={} nlst = {}'.format( ki, nlst ))
if nlst==0:
ax.semilogx(x, y, m, color=c,markersize=6, label=g2_labels[ki])
else:
ax.semilogx(x, y, m, color=c,markersize=6)
if nlst==0:
if l_ind==0:
ax.legend(loc='best', fontsize = 8, fancybox=True, framealpha=0.5)
else:
y=g2_dict_[k][:, l_ind ]
x = taus_dict_[k]
if ki==0:
ymin,ymax = min(y), max(y[1:])
if g2_labels is None:
ax.semilogx(x, y, m, color=c, markersize=6)
else:
ax.semilogx(x, y, m, color=c,markersize=6, label=g2_labels[ki])
if l_ind==0:
ax.legend(loc='best', fontsize = 8, fancybox=True, framealpha=0.5)
if fit_res_ is not None:
result1 = fit_res_[l_ind]
#print (result1.best_values)
rate = result1.best_values['relaxation_rate']
beta = result1.best_values['beta']
baseline = result1.best_values['baseline']
if function=='simple_exponential' or function=='simple':
alpha =1.0
elif function=='stretched_exponential' or function=='stretched':
alpha = result1.best_values['alpha']
elif function=='stretched_vibration':
alpha = result1.best_values['alpha']
freq = result1.best_values['freq']
elif function=='flow_vibration':
freq = result1.best_values['freq']
if function=='flow_para_function' or function=='flow_para' or function=='flow_vibration':
flow = result1.best_values['flow_velocity']
if function=='flow_para_function_explicitq' or function=='flow_para_qang':
flow = result1.best_values['flow_velocity']
if qval_dict_ is None:
print("Please provide qval_dict, a dict with qr and ang (in unit of degrees).")
else:
pass
if rate!=0:
txts = r'$\tau_0$' + r'$ = %.3f$'%(1/rate) + r'$ s$'
else:
txts = r'$\tau_0$' + r'$ = inf$' + r'$ s$'
x=0.25
y0=0.9
fontsize = 12
ax.text(x =x, y= y0, s=txts, fontsize=fontsize, transform=ax.transAxes)
#print(function)
dt=0
if function!='flow_para_function' and function!='flow_para' and function!='flow_vibration' and function!='flow_para_qang':
txts = r'$\alpha$' + r'$ = %.3f$'%(alpha)
dt +=0.1
#txts = r'$\beta$' + r'$ = %.3f$'%(beta[i]) + r'$ s^{-1}$'
ax.text(x =x, y= y0-dt, s=txts, fontsize=fontsize, transform=ax.transAxes)
txts = r'$baseline$' + r'$ = %.3f$'%( baseline)
dt +=0.1
ax.text(x =x, y= y0- dt, s=txts, fontsize=fontsize, transform=ax.transAxes)
if function=='flow_para_function' or function=='flow_para' or function=='flow_vibration' or function=='flow_para_qang':
txts = r'$flow_v$' + r'$ = %.3f$'%( flow)
dt += 0.1
ax.text(x =x, y= y0- dt, s=txts, fontsize=fontsize, transform=ax.transAxes)
if function=='stretched_vibration' or function=='flow_vibration':
txts = r'$vibration$' + r'$ = %.1f Hz$'%( freq)
dt += 0.1
ax.text(x =x, y= y0-dt, s=txts, fontsize=fontsize, transform=ax.transAxes)
txts = r'$\beta$' + r'$ = %.3f$'%( beta )
dt +=0.1
ax.text(x =x, y= y0- dt, s=txts, fontsize=fontsize, transform=ax.transAxes)
if 'ylim' in kwargs:
ax.set_ylim( kwargs['ylim'])
elif 'vlim' in kwargs:
vmin, vmax =kwargs['vlim']
ax.set_ylim([ymin*vmin, ymax*vmax ])
else:
pass
if 'xlim' in kwargs:
ax.set_xlim( kwargs['xlim'])
if num_short == 1:
fp = path + filename
else:
fp = path + filename + '_%s_%s'%(mastp, s_ind)
if append_name is not '':
fp = fp + append_name
fps.append( fp + '.png' )
#if num_long_i <= 16:
if num_long_i <= max_plotnum_fig:
fig.set_tight_layout(True)
#fig.tight_layout()
#print(fig)
try:
plt.savefig( fp + '.png', dpi=fig.dpi)
except:
print('Can not save figure here.')
else:
fps=[]
for fn, f in enumerate(fig):
f.set_tight_layout(True)
fp = path + filename + '_q_%s_%s'%(fn*16, (fn+1)*16)
if append_name is not '':
fp = fp + append_name
fps.append( fp + '.png' )
f.savefig( fp + '.png', dpi=f.dpi)
#plt.savefig( fp + '.png', dpi=fig.dpi)
#combine each saved images together
if (num_short !=1) or (num_long_i > 16):
outputfile = path + filename + '.png'
if append_name is not '':
outputfile = path + filename + append_name + '__joint.png'
else:
outputfile = path + filename + '__joint.png'
combine_images( fps, outputfile, outsize= outsize )
if return_fig:
return fig
def power_func(x, D0, power=2):
return D0 * x**power
def get_q_rate_fit_general( qval_dict, rate, geometry ='saxs', weights=None, *argv,**kwargs):
'''
Dec 26,2016, Y.G.@CHX
Fit q~rate by a power law function and fit curve pass (0,0)
Parameters
----------
qval_dict, dict, with key as roi number,
format as {1: [qr1, qz1], 2: [qr2,qz2] ...} for gi-saxs
format as {1: [qr1], 2: [qr2] ...} for saxs
format as {1: [qr1, qa1], 2: [qr2,qa2], ...] for ang-saxs
rate: relaxation_rate
Option:
if power_variable = False, power =2 to fit q^2~rate,
Otherwise, power is variable.
Return:
D0
qrate_fit_res
'''
power_variable=False
if 'fit_range' in kwargs.keys():
fit_range = kwargs['fit_range']
else:
fit_range= None
mod = Model( power_func )
#mod.set_param_hint( 'power', min=0.5, max= 10 )
#mod.set_param_hint( 'D0', min=0 )
pars = mod.make_params( power = 2, D0=1*10^(-5) )
if power_variable:
pars['power'].vary = True
else:
pars['power'].vary = False
(qr_label, qz_label, num_qz, num_qr, num_short,
num_long, short_label, long_label,short_ulabel,
long_ulabel,ind_long, master_plot,
mastp) = get_short_long_labels_from_qval_dict(qval_dict, geometry=geometry)
Nqr = num_long
Nqz = num_short
D0= np.zeros( Nqz )
power= 2 #np.zeros( Nqz )
qrate_fit_res=[]
#print(Nqz)
for i in range(Nqz):
ind_long_i = ind_long[ i ]
y = np.array( rate )[ind_long_i]
x = long_label[ind_long_i]
#print(y,x)
if fit_range is not None:
y=y[fit_range[0]:fit_range[1]]
x=x[fit_range[0]:fit_range[1]]
#print (i, y,x)
_result = mod.fit(y, pars, x = x ,weights=weights )
qrate_fit_res.append( _result )
D0[i] = _result.best_values['D0']
#power[i] = _result.best_values['power']
print ('The fitted diffusion coefficient D0 is: %.3e A^2S-1'%D0[i])
return D0, qrate_fit_res
def plot_q_rate_fit_general( qval_dict, rate, qrate_fit_res, geometry ='saxs', ylim = None,
plot_all_range=True, plot_index_range = None, show_text=True,return_fig=False,
show_fit=True,
*argv,**kwargs):
'''
Dec 26,2016, Y.G.@CHX
plot q~rate fitted by a power law function and fit curve pass (0,0)
Parameters
----------
qval_dict, dict, with key as roi number,
format as {1: [qr1, qz1], 2: [qr2,qz2] ...} for gi-saxs
format as {1: [qr1], 2: [qr2] ...} for saxs
format as {1: [qr1, qa1], 2: [qr2,qa2], ...] for ang-saxs
rate: relaxation_rate
plot_index_range:
Option:
if power_variable = False, power =2 to fit q^2~rate,
Otherwise, power is variable.
show_fit:, bool, if False, not show the fit
'''
if 'uid' in kwargs.keys():
uid = kwargs['uid']
else:
uid = 'uid'
if 'path' in kwargs.keys():
path = kwargs['path']
else:
path = ''
(qr_label, qz_label, num_qz, num_qr, num_short,
num_long, short_label, long_label,short_ulabel,
long_ulabel,ind_long, master_plot,
mastp) = get_short_long_labels_from_qval_dict(qval_dict, geometry=geometry)
power = 2
fig,ax = plt.subplots()
plt.title(r'$Q^%s$'%(power) + '-Rate-%s_Fit'%(uid),fontsize=20, y =1.06)
Nqz = num_short
if Nqz!=1:
ls = '--'
else:
ls=''
for i in range(Nqz):
ind_long_i = ind_long[ i ]
y = np.array( rate )[ind_long_i]
x = long_label[ind_long_i]
D0 = qrate_fit_res[i].best_values['D0']
#print(i, x, y, D0 )
if Nqz!=1:
label=r'$q_z=%.5f$'%short_ulabel[i]
else:
label=''
ax.plot(x**power, y, marker = 'o', ls =ls, label=label)
yfit = qrate_fit_res[i].best_fit
if show_fit:
if plot_all_range:
ax.plot(x**power, x**power*D0, '-r')
else:
ax.plot( (x**power)[:len(yfit) ], yfit, '-r')
if show_text:
txts = r'$D0: %.3e$'%D0 + r' $A^2$' + r'$s^{-1}$'
dy=0.1
ax.text(x =0.15, y=.65 -dy *i, s=txts, fontsize=14, transform=ax.transAxes)
if Nqz!=1:legend = ax.legend(loc='best')
if plot_index_range is not None:
d1,d2 = plot_index_range
d2 = min( len(x)-1, d2 )
ax.set_xlim( (x**power)[d1], (x**power)[d2] )
ax.set_ylim( y[d1],y[d2])
if ylim is not None:
ax.set_ylim( ylim )
ax.set_ylabel('Relaxation rate 'r'$\gamma$'"($s^{-1}$)")
ax.set_xlabel("$q^%s$"r'($\AA^{-2}$)'%power)
fp = path + '%s_Q_Rate'%(uid) + '_fit.png'
fig.savefig( fp, dpi=fig.dpi)
fig.tight_layout()
if return_fig:
return fig,ax
def save_g2_fit_para_tocsv( fit_res, filename, path):
'''Y.G. Dec 29, 2016,
save g2 fitted parameter to csv file
'''
col = list( fit_res[0].best_values.keys() )
m,n = len( fit_res ), len( col )
data = np.zeros( [m,n] )
for i in range( m ):
data[i] = list( fit_res[i].best_values.values() )
df = DataFrame( data )
df.columns = col
filename1 = os.path.join(path, filename) # + '.csv')
df.to_csv(filename1)
print( "The g2 fitting parameters are saved in %s"%filename1)
return df
<file_sep>def xsvsc(FD_sets, label_array, number_of_img, only_first_level= True, timebin_num=2, time_bin=None,
max_cts=None, bad_images = None, threshold=None, imgsum=None, norm=None):
"""
This function will provide the probability density of detecting photons
for different integration times.
The experimental probability density P(K) of detecting photons K is
obtained by histogramming the speckle counts over an ensemble of
equivalent pixels and over a number of speckle patterns recorded
with the same integration time T under the same condition.
Parameters
----------
image_iterable : FD, a compressed eiger file by Multifile class
label_array : array
labeled array; 0 is background.
Each ROI is represented by a distinct label (i.e., integer).
number_of_img : int
number of images (how far to go with integration times when finding
the time_bin, using skxray.utils.geometric function)
timebin_num : int, optional
integration time; default is 2
max_cts : int, optional
the brightest pixel in any ROI in any image in the image set.
defaults to using skxray.core.roi.roi_max_counts to determine
the brightest pixel in any of the ROIs
bad_images: array, optional
the bad images number list, the XSVS will not analyze the binning image groups which involve any bad images
threshold: float, optional
If one image involves a pixel with intensity above threshold, such image will be considered as a bad image.
Returns
-------
prob_k_all : array
probability density of detecting photons
prob_k_std_dev : array
standard deviation of probability density of detecting photons
Notes
-----
These implementation is based on following references
References: text [1]_, text [2]_
.. [1] <NAME>, <NAME>, <NAME>, <NAME>, <NAME>,
<NAME> and <NAME> , "Photon statistics and speckle visibility
spectroscopy with partially coherent x-rays" J. Synchrotron Rad.,
vol 21, p 1288-1295, 2014.
.. [2] <NAME>, <NAME>, <NAME>, <NAME> and
<NAME> "Speckle-visibilty Spectroscopy: A tool to study
time-varying dynamics" Rev. Sci. Instrum. vol 76, p 093110, 2005.
There is an example in https://github.com/scikit-xray/scikit-xray-examples
It will demonstrate the use of these functions in this module for
experimental data.
"""
if max_cts is None:
max_cts = roi.roi_max_counts(FD_sets, label_array)
# find the label's and pixel indices for ROI's
labels, indices = roi.extract_label_indices(label_array)
nopixels = len(indices)
# number of ROI's
u_labels = list(np.unique(labels))
num_roi = len(u_labels)
# create integration times
if time_bin is None:time_bin = geometric_series(timebin_num, number_of_img)
if only_first_level:
time_bin = [1]
# number of times in the time bin
num_times = len(time_bin)
# number of pixels per ROI
num_pixels = np.bincount(labels, minlength=(num_roi+1))[1:]
# probability density of detecting photons
prob_k_all = np.zeros([num_times, num_roi], dtype=np.object)
# square of probability density of detecting photons
prob_k_pow_all = np.zeros_like(prob_k_all)
# standard deviation of probability density of detecting photons
prob_k_std_dev = np.zeros_like(prob_k_all)
# get the bin edges for each time bin for each ROI
bin_edges = np.zeros(prob_k_all.shape[0], dtype=prob_k_all.dtype)
for i in range(num_times):
bin_edges[i] = np.arange(max_cts*2**i)
#start_time = time.time() # used to log the computation time (optionally)
bad_frame_list = bad_images
if bad_frame_list is None:
bad_frame_list=[]
pixelist = indices
fra_pix = np.zeros_like( pixelist, dtype=np.float64)
for n, FD in enumerate(FD_sets):
timg = np.zeros( FD.md['ncols'] * FD.md['nrows'] , dtype=np.int32 )
timg[pixelist] = np.arange( 1, len(pixelist) + 1 )
#print( n, FD, num_times, timebin_num, nopixels )
buf = np.zeros([num_times, timebin_num, nopixels])
# to track processing each time level
track_level = np.zeros( num_times )
track_bad_level = np.zeros( num_times )
# to increment buffer
cur = np.int_( np.full(num_times, timebin_num) )
#cur = np.full(num_times, timebin_num)
# to track how many images processed in each level
img_per_level = np.zeros(num_times, dtype=np.int64)
prob_k = np.zeros_like(prob_k_all)
prob_k_pow = np.zeros_like(prob_k_all)
noframes= FD.end - FD.beg +1
for i in tqdm(range( FD.beg , FD.end )):
# Ring buffer, a buffer with periodic boundary conditions.
# Images must be keep for up to maximum delay in buf.
#buf = np.zeros([num_times, timebin_num], dtype=np.object) # matrix of buffers
if i in bad_frame_list:
fra_pix[:]= np.nan
#print( 'here is a bad frmae--%i'%i )
else:
fra_pix[:]=0
(p,v) = FD.rdrawframe(i)
w = np.where( timg[p] )[0]
pxlist = timg[ p[w] ] -1
if imgsum is None:
if norm is None:
fra_pix[ pxlist] = v[w]
else:
fra_pix[ pxlist] = v[w]/ norm[pxlist] #-1.0
else:
if norm is None:
fra_pix[ pxlist] = v[w] / imgsum[i]
else:
fra_pix[ pxlist] = v[w]/ imgsum[i]/ norm[pxlist]
#level =0
cur[0] = 1 + cur[0]% timebin_num
# read each frame
# Put the image into the ring buffer.
img_ = fra_pix
# Put the ROI pixels into the ring buffer.
#fra_pix[:]=0
if threshold is not None:
if img_.max() >= threshold:
print ('bad image: %s here!'%n )
img_[:]= np.nan
buf[0, cur[0] - 1] = img_
_process(num_roi, 0, cur[0] - 1, buf, img_per_level, labels,
max_cts, bin_edges[0], prob_k, prob_k_pow,track_bad_level)
# check whether the number of levels is one, otherwise
# continue processing the next level
level = 1
if number_of_img>1:
processing=1
else:
processing=0
if only_first_level:
processing = 0
while processing:
#print( 'here')
if track_level[level]:
prev = 1 + (cur[level - 1] - 2) % timebin_num
cur[level] = 1 + cur[level] % timebin_num
bufa = buf[level-1,prev-1]
bufb= buf[level-1,cur[level-1]-1]
buf[level, cur[level]-1] = bufa + bufb
#print( buf[level, cur[level]-1] )
track_level[level] = 0
_process(num_roi, level, cur[level]-1, buf, img_per_level,
labels, max_cts, bin_edges[level], prob_k,
prob_k_pow,track_bad_level)
level += 1
if level < num_times:
processing = 1
else:
processing = 0
else:
track_level[level] = 1
processing = 0
prob_k_all += (prob_k - prob_k_all)/(n + 1)
prob_k_pow_all += (prob_k_pow - prob_k_pow_all)/(n + 1)
prob_k_std_dev = np.power((prob_k_pow_all -
np.power(prob_k_all, 2)), .5)
for i in range(num_times):
if isinstance(prob_k_all[i,0], float ):
for j in range( len(u_labels)):
prob_k_all[i,j] = np.array( [0] * (len(bin_edges[i]) -1 ) )
prob_k_std_dev[i,j] = np.array( [0] * (len(bin_edges[i]) -1 ) )
return prob_k_all, prob_k_std_dev
<file_sep>import pickle as cpk
from skimage.draw import line_aa, line, polygon
from skbeam.core.utils import radial_grid, angle_grid
from chxanalys.chx_libs import (np, roi, time, datetime, os, get_events,
getpass, db, get_images,LogNorm, plt,tqdm, utils, Model,
multi_tau_lags, random, warnings)
from chxanalys.chx_libs import cmap_vge, cmap_albula, Javascript
from chxanalys.chx_generic_functions import (get_detector, get_fields, get_sid_filenames,
load_data, load_mask,get_fields, reverse_updown, ring_edges,get_avg_img,check_shutter_open,
apply_mask, show_img,check_ROI_intensity,run_time, plot1D, get_each_frame_intensity,
create_hot_pixel_mask,show_ROI_on_image,create_time_slice,save_lists,
save_arrays, psave_obj,pload_obj, get_non_uniform_edges,
get_meta_data, print_dict, save_dict_csv, read_dict_csv,
get_bad_frame_list, find_bad_pixels, mask_exclude_badpixel, trans_data_to_pd,
get_max_countc,find_uids , check_bad_uids, get_averaged_data_from_multi_res,
get_qval_dict, save_g2_general, get_g2_fit_general, plot_g2_general,
get_q_rate_fit_general, plot_q_rate_fit_general, save_g2_fit_para_tocsv,
update_qval_dict, update_roi_mask, combine_images, create_rectangle_mask, create_cross_mask, create_polygon_mask, check_lost_metadata,
get_fra_num_by_dose, get_multi_tau_lag_steps, get_series_g2_taus, create_user_folder,
get_current_pipeline_filename, get_current_pipeline_fullpath,save_current_pipeline,
filter_roi_mask, mask_badpixels,
validate_uid,
move_beamstop, get_today_date, get_print_uids, get_last_uids,get_base_all_filenames,create_ring_mask,
get_image_edge, get_image_with_roi,
extract_data_from_file, sgolay2d,
get_roi_nr, get_mass_center_one_roi,
get_echos, pad_length, save_array_to_tiff, load_pilatus, ls_dir,
get_fit_by_two_linear,get_cross_point,get_curve_turning_points, plot_fit_two_linear_fit,linear_fit,
find_index,
)
from chxanalys.XPCS_SAXS import (get_circular_average,save_lists,get_ring_mask, get_each_ring_mean_intensity,
plot_qIq_with_ROI, cal_g2, create_hot_pixel_mask,get_circular_average,get_t_iq,
get_t_iqc,multi_uids_saxs_xpcs_analysis,
plot_t_iqc, plot_circular_average, get_seg_from_ring_mask,
recover_img_from_iq,get_cirucular_average_std,
get_angular_mask, combine_two_roi_mask
)
from chxanalys.Two_Time_Correlation_Function import (show_C12, get_one_time_from_two_time,
get_four_time_from_two_time,rotate_g12q_to_rectangle)
from chxanalys.chx_compress import (combine_binary_files,
segment_compress_eigerdata, create_compress_header,
para_segment_compress_eigerdata,para_compress_eigerdata)
from chxanalys.chx_compress_analysis import ( compress_eigerdata, read_compressed_eigerdata,
Multifile,get_avg_imgc, get_each_frame_intensityc,
get_each_ring_mean_intensityc, mean_intensityc,cal_waterfallc,plot_waterfallc,
cal_each_ring_mean_intensityc,plot_each_ring_mean_intensityc, get_time_edge_avg_img
)
from chxanalys.SAXS import fit_form_factor,show_saxs_qmap
from chxanalys.chx_correlationc import ( cal_g2c,Get_Pixel_Arrayc,auto_two_Arrayc,get_pixelist_interp_iq,)
from chxanalys.chx_correlationp import (cal_g2p, auto_two_Arrayp)
from chxanalys.Create_Report import (create_pdf_report,
create_multi_pdf_reports_for_uids,create_one_pdf_reports_for_uids,
make_pdf_report, export_xpcs_results_to_h5, extract_xpcs_results_from_h5 )
from chxanalys.chx_olog import (LogEntry,Attachment, update_olog_uid, update_olog_id,
update_olog_uid_with_file)
from chxanalys.XPCS_GiSAXS import (get_qedge,get_qmap_label,get_qr_tick_label, get_reflected_angles,
convert_gisaxs_pixel_to_q, show_qzr_map, get_1d_qr, get_qzrmap, show_qzr_roi,get_each_box_mean_intensity,
plot_gisaxs_two_g2,plot_qr_1d_with_ROI,fit_qr_qz_rate,
multi_uids_gisaxs_xpcs_analysis,plot_gisaxs_g4,get_t_qrc, plot_t_qrc,
get_qzr_map, plot_qzr_map, get_gisaxs_roi, cal_1d_qr,
get_t_qrc, plot_qrt_pds )
from chxanalys.chx_specklecp import ( xsvsc, xsvsp, get_xsvs_fit,plot_xsvs_fit, save_KM,plot_g2_contrast,
get_binned_his_std, get_contrast, save_bin_his_std, get_his_std_from_pds )
from chxanalys.DataGonio import (qphiavg)
<file_sep>#python XPCS_XSVS_SAXS_Multi_2017_V4.py
from chxanalys.chx_packages import *
from chxanalys.chx_xpcs_xsvs_jupyter import run_xpcs_xsvs_single
def XPCS_XSVS_SAXS_Multi( start_time, stop_time, run_pargs, suf_ids = None,
uid_average= 'Au50_7p5PEGX1_vs_slow_120116',
):
scat_geometry = run_pargs['scat_geometry']
force_compress = run_pargs['force_compress']
para_compress = run_pargs['para_compress']
run_fit_form = run_pargs['run_fit_form']
run_waterfall = run_pargs['run_waterfall']
run_t_ROI_Inten = run_pargs['run_t_ROI_Inten']
#run_fit_g2 = run_pargs['run_fit_g2'],
fit_g2_func = run_pargs['fit_g2_func']
run_one_time = run_pargs['run_one_time']
run_two_time = run_pargs['run_two_time']
run_four_time = run_pargs['run_four_time']
run_xsvs=run_pargs['run_xsvs']
###############################################################
if scat_geometry !='saxs': #to be done for other types
run_xsvs = False
###############################################################
att_pdf_report = run_pargs['att_pdf_report']
show_plot = run_pargs['show_plot']
CYCLE = run_pargs['CYCLE']
mask_path = run_pargs['mask_path']
mask_name = run_pargs['mask_name']
good_start = run_pargs['good_start']
use_imgsum_norm = run_pargs['use_imgsum_norm']
mask = load_mask(mask_path, mask_name, plot_ = False, image_name = '%s_mask'%mask_name, reverse=True )
#mask *= pixel_mask
mask[:,2069] =0 # False #Concluded from the previous results
#np.save( data_dir + 'mask', mask)
show_img(mask,image_name = '%s_mask'%uid_average, save=True, path=data_dir)
mask_load=mask.copy()
username = getpass.getuser()
data_dir0 = os.path.join('/XF11ID/analysis/', run_pargs['CYCLE'], username, 'Results/')
os.makedirs(data_dir0, exist_ok=True)
print('Results from this analysis will be stashed in the directory %s' % data_dir0)
data_dir = os.path.join( data_dir0, uid_average +'/')
os.makedirs(data_dir, exist_ok=True)
uid_average = 'uid=' + uid_average
if suf_ids is None:
sids, uids, fuids = find_uids(start_time, stop_time)
else:
sids, uids, fuids = suf_ids
print( uids )
uid = uids[0]
data_dir_ = data_dir
uid_=uid_average
### For Load results
multi_res = {}
for uid, fuid in zip(guids,fuids):
multi_res[uid] = extract_xpcs_results_from_h5( filename = 'uid=%s_Res.h5'%fuid, import_dir = data_dir0 + uid +'/' )
# Get and Plot Averaged Data
mkeys = list(multi_res.keys())
uid = uid_average
setup_pargs['uid'] = uid
avg_img = get_averaged_data_from_multi_res( multi_res, keystr='avg_img' )
imgsum = get_averaged_data_from_multi_res( multi_res, keystr='imgsum' )
if scat_geometry == 'saxs':
q_saxs = get_averaged_data_from_multi_res( multi_res, keystr='q_saxs')
iq_saxs = get_averaged_data_from_multi_res( multi_res, keystr='iq_saxs')
qt = get_averaged_data_from_multi_res( multi_res, keystr='qt')
iqst = get_averaged_data_from_multi_res( multi_res, keystr='iqst')
elif scat_geometry == 'gi_saxs':
qr_1d_pds = get_averaged_data_from_multi_res( multi_res, keystr='<KEY>')
qr_1d_pds = trans_data_to_pd( qr_1d_pds, label= qr_1d_pds_label)
if run_waterfall:
wat = get_averaged_data_from_multi_res( multi_res, keystr='wat')
if run_t_ROI_Inten:
times_roi = get_averaged_data_from_multi_res( multi_res, keystr='times_roi')
mean_int_sets = get_averaged_data_from_multi_res( multi_res, keystr='mean_int_sets')
if run_one_time:
g2 = get_averaged_data_from_multi_res( multi_res, keystr='g2' )
taus = get_averaged_data_from_multi_res( multi_res, keystr='taus' )
g2_pds = save_g2_general( g2, taus=taus,qr=np.array( list( qval_dict.values() ) )[:,0],
uid= uid +'_g2.csv', path= data_dir, return_res=True )
g2_fit_result, taus_fit, g2_fit = get_g2_fit_general( g2, taus,
function = fit_g2_func, vlim=[0.95, 1.05], fit_range= None,
fit_variables={'baseline':True, 'beta':True, 'alpha':False,'relaxation_rate':True},
guess_values={'baseline':1.0,'beta':0.05,'alpha':1.0,'relaxation_rate':0.01,})
g2_fit_paras = save_g2_fit_para_tocsv(g2_fit_result, filename= uid +'_g2_fit_paras.csv', path=data_dir )
if run_two_time:
g12b = get_averaged_data_from_multi_res( multi_res, keystr='g12b',different_length= True )
g2b = get_averaged_data_from_multi_res( multi_res, keystr='g2b' )
tausb = get_averaged_data_from_multi_res( multi_res, keystr='tausb' )
g2b_pds = save_g2_general( g2b, taus=tausb, qr= np.array( list( qval_dict.values() ) )[:,0],
qz=None, uid=uid +'_g2b.csv', path= data_dir, return_res=True )
g2_fit_resultb, taus_fitb, g2_fitb = get_g2_fit_general( g2b, tausb,
function = fit_g2_func, vlim=[0.95, 1.05], fit_range= None,
fit_variables={'baseline':True, 'beta':True, 'alpha':False,'relaxation_rate':True},
guess_values={'baseline':1.0,'beta':0.05,'alpha':1.0,'relaxation_rate':0.01,})
g2b_fit_paras = save_g2_fit_para_tocsv(g2_fit_resultb,
filename= uid + '_g2b_fit_paras.csv', path=data_dir )
if run_four_time:
g4 = get_averaged_data_from_multi_res( multi_res, keystr='g4' )
taus4 = get_averaged_data_from_multi_res( multi_res, keystr='taus4' )
g4_pds = save_g2_general( g4, taus=taus4, qr=np.array( list( qval_dict.values() ) )[:,0],
qz=None, uid=uid +'_g4.csv', path= data_dir, return_res=True )
if run_xsvs:
contrast_factorL = get_averaged_data_from_multi_res( multi_res, keystr='contrast_factorL',different_length=False )
times_xsvs = get_averaged_data_from_multi_res( multi_res, keystr='times_xsvs',different_length=False )
cont_pds = save_arrays( contrast_factorL, label= times_xsvs, filename = '%s_contrast_factorL.csv'%uid,
path=data_dir,return_res=True )
if False:
spec_kmean = get_averaged_data_from_multi_res( multi_res, keystr='spec_kmean' )
spec_pds = get_averaged_data_from_multi_res( multi_res, keystr='spec_pds', different_length=False )
times_xsvs = get_averaged_data_from_multi_res( multi_res, keystr='times_xsvs',different_length=False )
spec_his, spec_std = get_his_std_from_pds( spec_pds, his_shapes=None)
ML_val, KL_val,K_ = get_xsvs_fit( spec_his, spec_kmean, spec_std, max_bins=2,varyK= False, )
contrast_factorL = get_contrast( ML_val)
spec_km_pds = save_KM( spec_kmean, KL_val, ML_val, qs=q_ring_center,level_time=times_xsvs, uid=uid_average , path = data_dir_average )
plot_xsvs_fit( spec_his, ML_val, KL_val, K_mean = spec_kmean, spec_std = spec_std,xlim = [0,15], vlim =[.9, 1.1],
uid=uid_average, qth= None, logy= True, times= times_xsvs, q_ring_center=q_ring_center, path=data_dir )
if scat_geometry =='saxs':
show_saxs_qmap( avg_img, setup_pargs, width=600,vmin=.1, vmax=np.max(avg_img*.1), logs=True,
image_name= '%s_img_avg'%uid, save=True)
plot_circular_average( q_saxs, iq_saxs, q_saxs, pargs=setup_pargs,
xlim=[q_saxs.min(), q_saxs.max()], ylim = [iq_saxs.min(), iq_saxs.max()] )
plot_qIq_with_ROI( q_saxs, iq_saxs, qr, logs=True, uid=uid, xlim=[q_saxs.min(), q_saxs.max()],
ylim = [iq_saxs.min(), iq_saxs.max()], save=True, path=data_dir)
plot1D( y = imgsum, title ='%s_img_sum_t'%uid, xlabel='Frame', colors='b',
ylabel='Total_Intensity', legend='imgsum', save=True, path=data_dir)
plot_t_iqc( qt, iqst, frame_edge=None, pargs=setup_pargs, xlim=[qt.min(), qt.max()],
ylim = [iqst.min(), iqst.max()], save=True )
show_ROI_on_image( avg_img, roi_mask, center, label_on = False, rwidth =700, alpha=.9,
save=True, path=data_dir, uid=uid, vmin= np.min(avg_img), vmax= np.max(avg_img) )
elif scat_geometry =='gi_saxs':
show_img( avg_img, vmin=.1, vmax=np.max(avg_img*.1), logs=True,image_name= uidstr + '_img_avg', save=True, path=data_dir)
plot_qr_1d_with_ROI( qr_1d_pds, qr_center=np.unique( np.array(list( qval_dict.values() ) )[:,0] ),
loglog=False, save=True, uid=uidstr, path = data_dir)
show_qzr_roi( avg_img, roi_mask, inc_x0, ticks, alpha=0.5, save=True, path=data_dir, uid=uidstr )
if run_waterfall:
plot_waterfallc( wat, qth_interest, aspect=None,vmax= np.max(wat), uid=uid, save =True,
path=data_dir, beg= good_start)
if run_t_ROI_Inten:
plot_each_ring_mean_intensityc( times_roi, mean_int_sets, uid = uid, save=True, path=data_dir )
if run_one_time:
plot_g2_general( g2_dict={1:g2, 2:g2_fit}, taus_dict={1:taus, 2:taus_fit},vlim=[0.95, 1.05],
qval_dict = qval_dict, fit_res= g2_fit_result, geometry=scat_geometry,filename= uid +'_g2',
path= data_dir, function= fit_g2_func, ylabel='g2', append_name= '_fit')
D0, qrate_fit_res = get_q_rate_fit_general( qval_dict, g2_fit_paras['relaxation_rate'], geometry=scat_geometry)
plot_q_rate_fit_general( qval_dict, g2_fit_paras['relaxation_rate'], qrate_fit_res,
geometry= scat_geometry,uid=uid, path= data_dir )
if run_two_time:
show_C12(g12b, q_ind= qth_interest, N1= 0, N2=min( len(imgsa) ,1000), vmin=1.01, vmax=1.25,
timeperframe=timeperframe,save=True,
path= data_dir, uid = uid )
plot_g2_general( g2_dict={1:g2b, 2:g2_fitb}, taus_dict={1:tausb, 2:taus_fitb},vlim=[0.95, 1.05],
qval_dict=qval_dict, fit_res= g2_fit_resultb, geometry=scat_geometry,filename=uid+'_g2',
path= data_dir, function= fit_g2_func, ylabel='g2', append_name= '_b_fit')
if run_two_time and run_one_time:
plot_g2_general( g2_dict={1:g2, 2:g2b}, taus_dict={1:taus, 2:tausb},vlim=[0.95, 1.05],
qval_dict=qval_dict, g2_labels=['from_one_time', 'from_two_time'],
geometry=scat_geometry,filename=uid+'_g2_two_g2', path= data_dir, ylabel='g2', )
if run_four_time:
plot_g2_general( g2_dict={1:g4}, taus_dict={1:taus4},vlim=[0.95, 1.05], qval_dict=qval_dict, fit_res= None,
geometry=scat_geometry,filename=uid+'_g4',path= data_dir, ylabel='g4')
if run_xsvs:
plot_g2_contrast( contrast_factorL, g2, times_xsvs, taus, qr,
vlim=[0.8,2.0], qth = qth_interest, uid=uid,path = data_dir, legend_size=14)
plot_g2_contrast( contrast_factorL, g2, times_xsvs, taus, qr,
vlim=[0.8,1.2], qth = None, uid=uid,path = data_dir, legend_size=4)
md = multi_res[mkeys[0]]['md']
md['uid'] = uid
md['suid'] = uid
md['Measurement'] = uid
md['beg'] = None
md['end'] = None
md['bad_frame_list'] = 'unknown'
md['metadata_file'] = data_dir + 'md.csv-&-md.pkl'
psave_obj( md, data_dir + '%s_md'%uid ) #save the setup parameters
save_dict_csv( md, data_dir + '%s_md.csv'%uid, 'w')
Exdt = {}
if scat_geometry == 'gi_saxs':
for k,v in zip( ['md', 'roi_mask','qval_dict','avg_img','mask','pixel_mask', 'imgsum', 'qr_1d_pds'],
[md, roi_mask, qval_dict, avg_img,mask,pixel_mask, imgsum, qr_1d_pds] ):
Exdt[ k ] = v
elif scat_geometry == 'saxs':
for k,v in zip( ['md', 'q_saxs', 'iq_saxs','iqst','qt','roi_mask','qval_dict','avg_img','mask','pixel_mask', 'imgsum', 'bad_frame_list'],
[md, q_saxs, iq_saxs, iqst, qt,roi_mask, qval_dict, avg_img,mask,pixel_mask, imgsum, ] ):
Exdt[ k ] = v
if run_waterfall:Exdt['wat'] = wat
if run_t_ROI_Inten:Exdt['times_roi'] = times_roi;Exdt['mean_int_sets']=mean_int_sets
if run_one_time:
for k,v in zip( ['taus','g2','g2_fit_paras'], [taus,g2,g2_fit_paras] ):Exdt[ k ] = v
if run_two_time:
for k,v in zip( ['tausb','g2b','g2b_fit_paras', 'g12b'], [tausb,g2b,g2b_fit_paras,g12b] ):Exdt[ k ] = v
if run_four_time:
for k,v in zip( ['taus4','g4'], [taus4,g4] ):Exdt[ k ] = v
if run_xsvs:
for k,v in zip( ['spec_kmean','spec_pds','times_xsvs','spec_km_pds','contrast_factorL'],
[ spec_kmean,spec_pds,times_xsvs,spec_km_pds,contrast_factorL] ):Exdt[ k ] = v
contr_pds = save_arrays( Exdt['contrast_factorL'], label= Exdt['times_xsvs'],
filename = '%s_contr.csv'%uid, path=data_dir,return_res=True )
export_xpcs_results_to_h5( uid + '_Res.h5', data_dir, export_dict = Exdt )
#extract_dict = extract_xpcs_results_from_h5( filename = uid + '_Res.h5', import_dir = data_dir )
## Create PDF report for each uid
pdf_out_dir = data_dir
pdf_filename = "XPCS_Analysis_Report_for_%s%s.pdf"%(uid_average,pdf_version)
if run_xsvs:
pdf_filename = "XPCS_XSVS_Analysis_Report_for_%s%s.pdf"%(uid_average,pdf_version)
#pdf_filename = "XPCS_XSVS_Analysis_Report_for_uid=%s%s.pdf"%(uid_average,'_2')
make_pdf_report( data_dir, uid_average, pdf_out_dir, pdf_filename, username,
run_fit_form, run_one_time, run_two_time, run_four_time, run_xsvs, report_type = scat_geometry
)
### Attach each g2 result to the corresponding olog entry
if att_pdf_report:
os.environ['HTTPS_PROXY'] = 'https://proxy:8888'
os.environ['no_proxy'] = 'cs.nsls2.local,localhost,127.0.0.1'
pname = pdf_out_dir + pdf_filename
atch=[ Attachment(open(pname, 'rb')) ]
try:
update_olog_uid( uid= fuids[-1], text='Add XPCS Averaged Analysis PDF Report', attachments= atch )
except:
print("I can't attach this PDF: %s due to a duplicated filename. Please give a different PDF file."%pname)
print( fuids[-1] )
# The End!
if False:
start_time, stop_time = '2016-12-1 16:30:00', '2016-12-1 16:31:50' #for 10 nm, 20, for test purpose
suf_ids = find_uids(start_time, stop_time)
sp='test'
uid_averages= [ sp+'_vs_test1_120116', sp+'_vs_test2_120116', sp+'_vs_test3_120116']
run_pargs= dict(
scat_geometry = 'saxs',
#scat_geometry = 'gi_saxs',
force_compress = False, #True, #False, #True,#False,
para_compress = True,
run_fit_form = False,
run_waterfall = True,#False,
run_t_ROI_Inten = True,
#run_fit_g2 = True,
fit_g2_func = 'stretched',
run_one_time = True,#False,
run_two_time = True,#False,
run_four_time = False, #True, #False,
run_xsvs=True,
att_pdf_report = True,
show_plot = False,
CYCLE = '2016_3',
#if scat_geometry == 'gi_saxs':
#mask_path = '/XF11ID/analysis/2016_3/masks/',
#mask_name = 'Nov16_4M-GiSAXS_mask.npy',
#elif scat_geometry == 'saxs':
mask_path = '/XF11ID/analysis/2016_3/masks/',
mask_name = 'Nov28_4M_SAXS_mask.npy',
good_start = 5,
#####################################for saxs
uniformq = True,
inner_radius= 0.005, #0.005 for 50 nmAu/SiO2, 0.006, #for 10nm/coralpor
outer_radius = 0.04, #0.04 for 50 nmAu/SiO2, 0.05, #for 10nm/coralpor
num_rings = 12,
gap_ring_number = 6,
number_rings= 1,
############################for gi_saxs
#inc_x0 = 1473,
#inc_y0 = 372,
#refl_x0 = 1473,
#refl_y0 = 730,
qz_start = 0.025,
qz_end = 0.04,
qz_num = 3,
gap_qz_num = 1,
#qz_width = ( qz_end - qz_start)/(qz_num +1),
qr_start = 0.0025,
qr_end = 0.07,
qr_num = 14,
gap_qr_num = 5,
definde_second_roi = True,
qz_start2 = 0.04,
qz_end2 = 0.050,
qz_num2= 1,
gap_qz_num2 = 1,
qr_start2 = 0.002,
qr_end2 = 0.064,
qr_num2 = 10,
gap_qr_num2 = 5,
#qcenters = [ 0.00235,0.00379,0.00508,0.00636,0.00773, 0.00902] #in A-1
#width = 0.0002
qth_interest = 1, #the intested single qth
use_sqnorm = False,
use_imgsum_norm = True,
pdf_version = '_1' #for pdf report name
)
step =1
Nt = len( uid_averages )
for i in range( Nt ):
t0=time.time()
suf_idsi = suf_ids[0][i*step:(i+1)*step],suf_ids[1][i*step:(i+1)*step],suf_ids[2][i*step:(i+1)*step]
XPCS_XSVS_SAXS_Multi( 0,0, run_pargs = run_pargs,
suf_ids=suf_idsi, uid_average= uid_averages[i])
run_time(t0)
<file_sep>"""
Dec 10, 2015 Developed by Y.G.@CHX
<EMAIL>
This module is for the GiSAXS XPCS analysis
"""
from chxanalys.chx_generic_functions import *
from chxanalys.chx_compress import ( compress_eigerdata, read_compressed_eigerdata,init_compress_eigerdata, get_avg_imgc,Multifile)
from chxanalys.chx_correlationc import ( cal_g2c )
from chxanalys.chx_libs import ( colors, markers, colors_, markers_)
def get_gisaxs_roi( Qr, Qz, qr_map, qz_map, mask=None, qval_dict=None ):
'''Y.G. 2016 Dec 31
Get xpcs roi of gisaxs
Parameters:
Qr: list, = [qr_start , qr_end, qr_width, qr_num], corresponding to qr start, qr end, qr width, qr number
Qz: list, = [qz_start , qz_end, qz_width, qz_num], corresponding to qz start, qz end, qz width, qz number
qr_map: two-d array, the same shape as gisaxs frame, a qr map
qz_map: two-d array, the same shape as gisaxs frame, a qz map
mask: array, the scattering mask
qval_dict: a dict, each key (a integer) with value as qr or (qr,qz) or (q//, q|-)
if not None, the new returned qval_dict will include the old one
Return:
roi_mask: array, the same shape as gisaxs frame, the label array of roi
qval_dict, a dict, each key (a integer) with value as qr or (qr,qz) or (q//, q|-)
'''
qr_edge, qr_center = get_qedge( *Qr )
qz_edge, qz_center = get_qedge( *Qz )
label_array_qz = get_qmap_label(qz_map, qz_edge)
label_array_qr = get_qmap_label(qr_map, qr_edge)
label_array_qzr, qzc, qrc = get_qzrmap(label_array_qz, label_array_qr,qz_center, qr_center)
labels_qzr, indices_qzr = roi.extract_label_indices(label_array_qzr)
labels_qz, indices_qz = roi.extract_label_indices(label_array_qz)
labels_qr, indices_qr = roi.extract_label_indices(label_array_qr)
if mask is None:
mask=1
roi_mask = label_array_qzr * mask
qval_dict = get_qval_dict( np.round(qr_center, 5) , np.round(qz_center,5), qval_dict = qval_dict )
return roi_mask, qval_dict
############
##developed at Octo 11, 2016
def get_qr( data, Qr, Qz, qr, qz, mask = None ):
'''Octo 12, 2016, Y.G.@CHX
plot one-d of I(q) as a function of qr for different qz
data: a image/Eiger frame
Qr: info for qr, = qr_start , qr_end, qr_width, qr_num
Qz: info for qz, = qz_start, qz_end, qz_width , qz_num
qr: qr-map
qz: qz-map
mask: a mask for qr-1d integration, default is None
Return: qr_1d, a dataframe, with columns as qr1, qz1 (float value), qr2, qz2,....
Examples:
#to make two-qz, from 0.018 to 0.046, width as 0.008,
qz_width = 0.008
qz_start = 0.018 + qz_width/2
qz_end = 0.046 - qz_width/2
qz_num= 2
#to make one-qr, from 0.02 to 0.1, and the width is 0.1-0.012
qr_width = 0.1-0.02
qr_start = 0.02 + qr_width /2
qr_end = 0.01 - qr_width /2
qr_num = 1
Qr = [qr_start , qr_end, qr_width, qr_num]
Qz= [qz_start, qz_end, qz_width , qz_num ]
new_mask[ :, 1020:1045] =0
ticks = show_qzr_map( qr,qz, inc_x0, data = avg_imgmr, Nzline=10, Nrline=10 )
qx, qy, qr, qz = convert_gisaxs_pixel_to_q( inc_x0, inc_y0,refl_x0,refl_y0, lamda=lamda, Lsd=Lsd )
qr_1d = get_qr( avg_imgr, Qr, Qz, qr, qz, new_mask)
'''
qr_start , qr_end, qr_width, qr_num =Qr
qz_start, qz_end, qz_width , qz_num =Qz
qr_edge, qr_center = get_qedge(qr_start , qr_end, qr_width, qr_num )
qz_edge, qz_center = get_qedge( qz_start, qz_end, qz_width , qz_num )
label_array_qr = get_qmap_label( qr, qr_edge)
#qr_1d ={}
#columns=[]
for i,qzc_ in enumerate(qz_center):
#print (i,qzc_)
label_array_qz = get_qmap_label( qz, qz_edge[i*2:2*i+2])
#print (qzc_, qz_edge[i*2:2*i+2])
label_array_qzr,qzc,qrc = get_qzrmap(label_array_qz, label_array_qr,qz_center, qr_center )
#print (np.unique(label_array_qzr ))
if mask is not None:label_array_qzr *= mask
roi_pixel_num = np.sum( label_array_qzr, axis=0)
qr_ = qr *label_array_qzr
data_ = data*label_array_qzr
qr_ave = np.sum( qr_, axis=0)/roi_pixel_num
data_ave = np.sum( data_, axis=0)/roi_pixel_num
qr_ave,data_ave = zip(* sorted( zip( * [ qr_ave[~np.isnan(qr_ave)] , data_ave[~np.isnan( data_ave)] ]) ) )
if i==0:
N_interp = len( qr_ave )
qr_ave_intp = np.linspace( np.min( qr_ave ), np.max( qr_ave ), N_interp)
data_ave = np.interp( qr_ave_intp, qr_ave, data_ave)
#columns.append( ['qr%s'%i, str(round(qzc_,4))] )
if i==0:
df = np.hstack( [ (qr_ave_intp).reshape( N_interp,1) ,
data_ave.reshape( N_interp,1) ] )
else:
df = np.hstack( [ df, (qr_ave_intp).reshape( N_interp,1) ,
data_ave.reshape( N_interp,1) ] )
#df = DataFrame( df )
#df.columns = np.concatenate( columns )
return df
########################
# get one-d of I(q) as a function of qr for different qz
#####################
def cal_1d_qr( data, Qr,Qz, qr, qz, inc_x0=None, mask=None, path=None, uid=None, setup_pargs=None, save = True,
print_save_message=True):
''' Revised at July 18, 2017 by YG, to correct a divide by zero bug
Dec 16, 2016, Y.G.@CHX
calculate one-d of I(q) as a function of qr for different qz
data: a dataframe
Qr: info for qr, = qr_start , qr_end, qr_width, qr_num, the purpose of Qr is only for the defination of qr range (qr number does not matter)
Qz: info for qz, = qz_start, qz_end, qz_width , qz_num
qr: qr-map
qz: qz-map
inc_x0: x-center of incident beam
mask: a mask for qr-1d integration
setup_pargs: gives path, filename...
Return: qr_1d, a dataframe, with columns as qr1, qz1 (float value), qz2,....
Plot 1D cureve as a function of Qr for each Qz
Examples:
#to make two-qz, from 0.018 to 0.046, width as 0.008,
qz_width = 0.008
qz_start = 0.018 + qz_width/2
qz_end = 0.046 - qz_width/2
qz_num= 2
#to make one-qr, from 0.02 to 0.1, and the width is 0.1-0.012
qr_width = 0.1-0.02
qr_start = 0.02 + qr_width /2
qr_end = 0.01 - qr_width /2
qr_num = 1
Qr = [qr_start , qr_end, qr_width, qr_num]
Qz= [qz_start, qz_end, qz_width , qz_num ]
new_mask[ :, 1020:1045] =0
qx, qy, qr, qz = convert_gisaxs_pixel_to_q( inc_x0, inc_y0,refl_x0,refl_y0, lamda=lamda, Lsd=Lsd )
qr_1d = get_1d_qr( avg_imgr, Qr, Qz, qr, qz, inc_x0, new_mask)
A plot example:
plot1D( x= qr_1d['qr1'], y = qr_1d['0.0367'], logxy=True )
'''
qr_start , qr_end, qr_width, qr_num =Qr
qz_start, qz_end, qz_width , qz_num =Qz
qr_edge, qr_center = get_qedge(qr_start , qr_end, qr_width, qr_num,verbose=False )
qz_edge, qz_center = get_qedge( qz_start, qz_end, qz_width , qz_num,verbose=False )
#print ('The qr_edge is: %s\nThe qr_center is: %s'%(qr_edge, qr_center))
#print ('The qz_edge is: %s\nThe qz_center is: %s'%(qz_edge, qz_center))
label_array_qr = get_qmap_label( qr, qr_edge)
#qr_1d ={}
columns=[]
for i,qzc_ in enumerate(qz_center):
#print (i,qzc_)
label_array_qz = get_qmap_label( qz, qz_edge[i*2:2*i+2])
#print (qzc_, qz_edge[i*2:2*i+2])
label_array_qzr,qzc,qrc = get_qzrmap(label_array_qz, label_array_qr,qz_center, qr_center )
#print (np.unique(label_array_qzr ))
if mask is not None:
label_array_qzr *= mask
roi_pixel_num = np.sum( label_array_qzr, axis=0)
#print( label_array_qzr )
qr_ = qr *label_array_qzr
data_ = data*label_array_qzr
w = np.where(roi_pixel_num)
qr_ave = np.zeros_like( roi_pixel_num, dtype= float )[w]
data_ave = np.zeros_like( roi_pixel_num, dtype= float )[w]
qr_ave = (np.sum( qr_, axis=0))[w]/roi_pixel_num[w]
data_ave = (np.sum( data_, axis=0))[w]/roi_pixel_num[w]
qr_ave, data_ave = zip(* sorted( zip( * [ qr_ave[~np.isnan(qr_ave)] , data_ave[~np.isnan( data_ave)] ]) ) )
if i==0:
N_interp = len( qr_ave )
columns.append( ['qr'] )
#qr_1d[i]= qr_ave_intp
qr_ave_intp = np.linspace( np.min( qr_ave ), np.max( qr_ave ), N_interp)
data_ave = np.interp( qr_ave_intp, qr_ave, data_ave)
#qr_1d[i]= [qr_ave_intp, data_ave]
columns.append( ['qz%s=%s'%( i, str(round(qzc_,4)) )] )
if i==0:
df = np.hstack( [ (qr_ave_intp).reshape( N_interp,1) ,
data_ave.reshape( N_interp,1) ] )
else:
df = np.hstack( [ df,
data_ave.reshape( N_interp,1) ] )
df = DataFrame( df )
df.columns = np.concatenate( columns )
if save:
if path is None:
path = setup_pargs['path']
if uid is None:
uid = setup_pargs['uid']
filename = os.path.join(path, '%s_qr_1d.csv'% (uid) )
df.to_csv(filename)
if print_save_message:
print( 'The qr_1d is saved in %s with filename as %s_qr_1d.csv'%(path, uid))
return df
def get_t_qrc( FD, frame_edge, Qr, Qz, qr, qz, mask=None, path=None, uid=None, save=True, *argv,**kwargs):
'''Get t-dependent qr
Parameters
----------
FD: a compressed imgs series handler
frame_edge: list, the ROI frame regions, e.g., [ [0,100], [200,400] ]
mask: a image mask
Returns
---------
qrt_pds: dataframe, with columns as [qr, qz0_fra_from_beg1_to_end1, qz0_fra_from_beg2_to_end2, ...
qz1_fra_from_beg1_to_end1, qz1_fra_from_beg2_to_end2, ...
...
]
'''
Nt = len( frame_edge )
iqs = list( np.zeros( Nt ) )
qz_start, qz_end, qz_width , qz_num =Qz
qz_edge, qz_center = get_qedge( qz_start, qz_end, qz_width , qz_num, verbose=False )
#print('here')
#qr_1d = np.zeros( )
if uid is None:
uid = 'uid'
for i in range(Nt):
#str(round(qz_center[j], 4 )
t1,t2 = frame_edge[i]
avg_imgx = get_avg_imgc( FD, beg=t1,end=t2, sampling = 1, plot_ = False )
qrti = cal_1d_qr( avg_imgx, Qr, Qz, qr, qz, mask = mask, save=False )
if i == 0:
qrt_pds = np.zeros( [len(qrti), 1 + Nt * qz_num ] )
columns = np.zeros( 1 + Nt * qz_num, dtype=object )
columns[0] = 'qr'
qrt_pds[:,0] = qrti['qr']
for j in range(qz_num):
coli = qrti.columns[1+j]
qrt_pds[:, 1 + i + Nt*j] = qrti[ coli ]
columns[ 1 + i + Nt*j ] = coli + '_fra_%s_to_%s'%( t1, t2 )
qrt_pds = DataFrame( qrt_pds )
qrt_pds.columns = columns
if save:
if path is None:
path = setup_pargs['path']
if uid is None:
uid = setup_pargs['uid']
filename = os.path.join(path, '%s_qrt_pds.csv'% (uid) )
qrt_pds.to_csv(filename)
print( 'The qr~time is saved in %s with filename as %s_qrt_pds.csv'%(path, uid))
return qrt_pds
def plot_qrt_pds( qrt_pds, frame_edge, qz_index = 0, uid = 'uid', path = '',fontsize=8, *argv,**kwargs):
'''Y.G. Jan 04, 2017
plot t-dependent qr
Parameters
----------
qrt_pds: dataframe, with columns as [qr, qz0_fra_from_beg1_to_end1, qz0_fra_from_beg2_to_end2, ...
qz1_fra_from_beg1_to_end1, qz1_fra_from_beg2_to_end2, ...
...
]
frame_edge: list, the ROI frame regions, e.g., [ [0,100], [200,400] ]
qz_index, if = integer, e.g. =0, only plot the qr~t for qz0
if None, plot all qzs
Returns
'''
fig,ax = plt.subplots(figsize=(8, 6))
cols = np.array( qrt_pds.columns )
Nt = len( frame_edge )
#num_qz = int( (len( cols ) -1 ) /Nt )
qr = qrt_pds['qr']
if qz_index is None:
r = range( 1, len(cols ) )
else:
r = range( 1 + qz_index*Nt, 1 + (1+qz_index) * Nt )
for i in r:
y = qrt_pds[ cols[i] ]
ax.semilogy(qr, y, label= cols[i], marker = markers[i], color=colors[i], ls='-')
#ax.set_xlabel("q in pixel")
ax.set_xlabel(r'$Q_r$' + r'($\AA^{-1}$)')
ax.set_ylabel("I(q)")
if 'xlim' in kwargs.keys():
ax.set_xlim( kwargs['xlim'] )
if 'ylim' in kwargs.keys():
ax.set_ylim( kwargs['ylim'] )
ax.legend(loc = 'best', fontsize=fontsize)
title = ax.set_title('%s_Iq_t'%uid)
title.set_y(1.01)
fp = path + '%s_Iq_t'%uid + '.png'
fig.savefig( fp, dpi=fig.dpi)
def plot_t_qrc( qr_1d, frame_edge, save=False, pargs=None,fontsize=8, *argv,**kwargs):
'''plot t-dependent qr
Parameters
----------
qr_1d: array, with shape as time length, frame_edge
frame_edge: list, the ROI frame regions, e.g., [ [0,100], [200,400] ]
save: save the plot
if save, all the following paramters are given in argv
{
'path':
'uid': }
Returns
'''
fig,ax = plt.subplots(figsize=(8, 6))
Nt = qr_1d.shape[1]
q=qr_1d[:,0]
for i in range( Nt-1 ):
t1,t2 = frame_edge[i]
ax.semilogy(q, qr_1d[:,i+1], 'o-', label="frame: %s--%s"%( t1,t2) )
#ax.set_xlabel("q in pixel")
ax.set_xlabel(r'$Q_r$' + r'($\AA^{-1}$)')
ax.set_ylabel("I(q)")
if 'xlim' in kwargs.keys():
ax.set_xlim( kwargs['xlim'] )
if 'ylim' in kwargs.keys():
ax.set_ylim( kwargs['ylim'] )
ax.legend(loc = 'best', fontsize=fontsize)
uid = pargs['uid']
title = ax.set_title('uid= %s--t~I(q)'%uid)
title.set_y(1.01)
if save:
#dt =datetime.now()
#CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute)
path = pargs['path']
uid = pargs['uid']
#fp = path + 'uid= %s--Iq~t-'%uid + CurTime + '.png'
fp = path + 'uid=%s--Iq-t-'%uid + '.png'
fig.savefig( fp, dpi=fig.dpi)
save_arrays( np.vstack( [q, np.array(iqs)]).T,
label= ['q_A-1']+ ['Fram-%s-%s'%(t[0],t[1]) for t in frame_edge],
filename='uid=%s-q-Iqt'%uid, path= path )
##########################################
###Functions for GiSAXS
##########################################
def make_gisaxs_grid( qr_w= 10, qz_w = 12, dim_r =100,dim_z=120):
''' Dec 16, 2015, Y.G.@CHX
'''
y, x = np.indices( [dim_z,dim_r] )
Nr = int(dim_r/qp_w)
Nz = int(dim_z/qz_w)
noqs = Nr*Nz
ind = 1
for i in range(0,Nr):
for j in range(0,Nz):
y[ qr_w*i: qr_w*(i+1), qz_w*j:qz_w*(j+1)]= ind
ind += 1
return y
###########################################
#for Q-map, convert pixel to Q
###########################################
def get_incident_angles( inc_x0, inc_y0, refl_x0, refl_y0, pixelsize=[75,75], Lsd=5.0):
'''
Dec 16, 2015, Y.G.@CHX
giving: incident beam center: bcenx,bceny
reflected beam on detector: rcenx, rceny
sample to detector distance: Lsd, in meters
pixelsize: 75 um for Eiger4M detector
get incident_angle (alphai), the title angle (phi)
'''
if Lsd>=1000:
Lsd = Lsd/1000.
px,py = pixelsize
phi = np.arctan2( (-refl_x0 + inc_x0)*px *10**(-6), (refl_y0 - inc_y0)*py *10**(-6) )
alphai = np.arctan2( (refl_y0 -inc_y0)*py *10**(-6), Lsd ) /2.
#thetai = np.arctan2( (rcenx - bcenx)*px *10**(-6), Lsd ) /2. #??
return alphai,phi
def get_reflected_angles(inc_x0, inc_y0, refl_x0, refl_y0, thetai=0.0,
pixelsize=[75,75], Lsd=5.0,dimx = 2070.,dimy=2167.):
''' Dec 16, 2015, Y.G.@CHX
giving: incident beam center: bcenx,bceny
reflected beam on detector: rcenx, rceny
sample to detector distance: Lsd, in mm
pixelsize: 75 um for Eiger4M detector
detector image size: dimx = 2070,dimy=2167 for Eiger4M detector
get reflected angle alphaf (outplane)
reflected angle thetaf (inplane )
'''
#if Lsd>=1000:#it should be something wrong and the unit should be meter
#convert Lsd from mm to m
if Lsd>=1000:
Lsd = Lsd/1000.
alphai, phi = get_incident_angles( inc_x0, inc_y0, refl_x0, refl_y0, pixelsize, Lsd)
print ('The incident_angle (alphai) is: %s'%(alphai* 180/np.pi))
px,py = pixelsize
y, x = np.indices( [int(dimy),int(dimx)] )
#alphaf = np.arctan2( (y-inc_y0)*py*10**(-6), Lsd )/2 - alphai
alphaf = np.arctan2( (y-inc_y0)*py*10**(-6), Lsd ) - alphai
thetaf = np.arctan2( (x-inc_x0)*px*10**(-6), Lsd )/2 - thetai
return alphaf,thetaf, alphai, phi
def convert_gisaxs_pixel_to_q( inc_x0, inc_y0, refl_x0, refl_y0,
pixelsize=[75,75], Lsd=5.0,dimx = 2070.,dimy=2167.,
thetai=0.0, lamda=1.0 ):
'''
Dec 16, 2015, Y.G.@CHX
giving: incident beam center: bcenx,bceny
reflected beam on detector: rcenx, rceny
sample to detector distance: Lsd, in meters
pixelsize: 75 um for Eiger4M detector
detector image size: dimx = 2070,dimy=2167 for Eiger4M detector
wavelength: angstron
get: q_parallel (qp), q_direction_z (qz)
'''
alphaf,thetaf,alphai, phi = get_reflected_angles( inc_x0, inc_y0, refl_x0, refl_y0, thetai, pixelsize, Lsd,dimx,dimy)
pref = 2*np.pi/lamda
qx = np.cos( alphaf)*np.cos( 2*thetaf) - np.cos( alphai )*np.cos( 2*thetai)
qy_ = np.cos( alphaf)*np.sin( 2*thetaf) - np.cos( alphai )*np.sin ( 2*thetai)
qz_ = np.sin(alphaf) + np.sin(alphai)
qy = qz_* np.sin( phi) + qy_*np.cos(phi)
qz = qz_* np.cos( phi) - qy_*np.sin(phi)
qr = np.sqrt( qx**2 + qy**2 )
return qx*pref , qy*pref , qr*pref , qz*pref
def get_qedge( qstart,qend,qwidth,noqs,verbose=True ):
''' July 18, 2017 Revised by Y.G.@CHX,
Add print info for noqs=1
Dec 16, 2015, Y.G.@CHX
DOCUMENT get_qedge( )
give qstart,qend,qwidth,noqs
return a qedge by giving the noqs, qstart,qend,qwidth.
a qcenter, which is center of each qedge
KEYWORD: None '''
import numpy as np
if noqs!=1:
spacing = (qend - qstart - noqs* qwidth )/(noqs-1) # spacing between rings
qedges = (roi.ring_edges(qstart,qwidth,spacing, noqs)).ravel()
qcenter = ( qedges[::2] + qedges[1::2] )/2
else:
spacing = 0
qedges = (roi.ring_edges(qstart,qwidth,spacing, noqs)).ravel()
#qedges = np.array( [qstart, qend] )
qcenter = [( qedges[1] + qedges[0] )/2]
if verbose:
print("Since noqs=1, the qend is actually defined by qstart + qwidth.")
return qedges, qcenter
def get_qedge2( qstart,qend,qwidth,noqs, ):
''' DOCUMENT make_qlist( )
give qstart,qend,qwidth,noqs
return a qedge by giving the noqs, qstart,qend,qwidth.
a qcenter, which is center of each qedge
KEYWORD: None '''
import numpy as np
qcenter = np.linspace(qstart,qend,noqs)
#print ('the qcenter is: %s'%qcenter )
qedge=np.zeros(2*noqs)
qedge[::2]= ( qcenter- (qwidth/2) ) #+1 #render even value
qedge[1::2]= ( qcenter+ qwidth/2) #render odd value
return qedge, qcenter
###########################################
#for plot Q-map
###########################################
def get_qmap_label( qmap, qedge ):
import numpy as np
'''
April 20, 2016, Y.G.@CHX
give a qmap and qedge to bin the qmap into a label array
'''
edges = np.atleast_2d(np.asarray(qedge)).ravel()
label_array = np.digitize(qmap.ravel(), edges, right=False)
label_array = np.int_(label_array)
label_array = (np.where(label_array % 2 != 0, label_array, 0) + 1) // 2
label_array = label_array.reshape( qmap.shape )
return label_array
def get_qzrmap(label_array_qz, label_array_qr, qz_center, qr_center ):
'''April 20, 2016, Y.G.@CHX, get qzrmap '''
qzmax = label_array_qz.max()
label_array_qr_ = np.zeros( label_array_qr.shape )
ind = np.where(label_array_qr!=0)
label_array_qr_[ind ] = label_array_qr[ ind ] + 1E4 #add some large number to qr
label_array_qzr = label_array_qz * label_array_qr_
#convert label_array_qzr to [1,2,3,...]
uqzr = np.unique( label_array_qzr )[1:]
uqz = np.unique( label_array_qz )[1:]
uqr = np.unique( label_array_qr )[1:]
#print (uqzr)
label_array_qzr_ = np.zeros_like( label_array_qzr )
newl = np.arange( 1, len(uqzr)+1)
qzc =list(qz_center) * len( uqr )
qrc= [ [qr_center[i]]*len( uqz ) for i in range(len( uqr )) ]
for i, label in enumerate(uqzr):
#print (i, label)
label_array_qzr_.ravel()[ np.where( label_array_qzr.ravel() == label)[0] ] = newl[i]
return np.int_(label_array_qzr_), np.array( qzc ), np.concatenate(np.array(qrc ))
def show_label_array_on_image(ax, image, label_array, cmap=None,norm=None, log_img=True,alpha=0.3,
imshow_cmap='gray', **kwargs): #norm=LogNorm(),
"""
This will plot the required ROI's(labeled array) on the image
Additional kwargs are passed through to `ax.imshow`.
If `vmin` is in kwargs, it is clipped to minimum of 0.5.
Parameters
----------
ax : Axes
The `Axes` object to add the artist too
image : array
The image array
label_array : array
Expected to be an unsigned integer array. 0 is background,
positive integers label region of interest
cmap : str or colormap, optional
Color map to use for plotting the label_array, defaults to 'None'
imshow_cmap : str or colormap, optional
Color map to use for plotting the image, defaults to 'gray'
norm : str, optional
Normalize scale data, defaults to 'Lognorm()'
Returns
-------
im : AxesImage
The artist added to the axes
im_label : AxesImage
The artist added to the axes
"""
ax.set_aspect('equal')
if log_img:
im = ax.imshow(image, cmap=imshow_cmap, interpolation='none',norm=LogNorm(norm),**kwargs) #norm=norm,
else:
im = ax.imshow(image, cmap=imshow_cmap, interpolation='none',norm=norm,**kwargs) #norm=norm,
im_label = mpl_plot.show_label_array(ax, label_array, cmap=cmap, norm=norm, alpha=alpha,
**kwargs) # norm=norm,
return im, im_label
def show_qz(qz):
'''Dec 16, 2015, Y.G.@CHX
plot qz mape
'''
fig, ax = plt.subplots()
im=ax.imshow(qz, origin='lower' ,cmap='viridis',vmin=qz.min(),vmax= qz.max() )
fig.colorbar(im)
ax.set_title( 'Q-z')
#plt.show()
def show_qr(qr):
'''Dec 16, 2015, Y.G.@CHX
plot qr mape
'''
fig, ax = plt.subplots()
im=ax.imshow(qr, origin='lower' ,cmap='viridis',vmin=qr.min(),vmax= qr.max() )
fig.colorbar(im)
ax.set_title( 'Q-r')
#plt.show()
def show_alphaf(alphaf,):
'''Dec 16, 2015, Y.G.@CHX
plot alphaf mape
'''
fig, ax = plt.subplots()
im=ax.imshow(alphaf*180/np.pi, origin='lower' ,cmap='viridis',vmin=-1,vmax= 1.5 )
#im=ax.imshow(alphaf, origin='lower' ,cmap='viridis',norm= LogNorm(vmin=0.0001,vmax=2.00))
fig.colorbar(im)
ax.set_title( 'alphaf')
#plt.show()
def get_1d_qr( data, Qr,Qz, qr, qz, inc_x0, mask=None, show_roi=True,
ticks=None, alpha=0.3, loglog=False, save=True, setup_pargs=None ):
'''Dec 16, 2015, Y.G.@CHX
plot one-d of I(q) as a function of qr for different qz
data: a dataframe
Qr: info for qr, = qr_start , qr_end, qr_width, qr_num
Qz: info for qz, = qz_start, qz_end, qz_width , qz_num
qr: qr-map
qz: qz-map
inc_x0: x-center of incident beam
mask: a mask for qr-1d integration
show_roi: boolean, if ture, show the interest ROI
ticks: ticks for the plot, = zticks, zticks_label, rticks, rticks_label
alpha: transparency of ROI
loglog: if True, plot in log-log scale
setup_pargs: gives path, filename...
Return: qr_1d, a dataframe, with columns as qr1, qz1 (float value), qr2, qz2,....
Plot 1D cureve as a function of Qr for each Qz
Examples:
#to make two-qz, from 0.018 to 0.046, width as 0.008,
qz_width = 0.008
qz_start = 0.018 + qz_width/2
qz_end = 0.046 - qz_width/2
qz_num= 2
#to make one-qr, from 0.02 to 0.1, and the width is 0.1-0.012
qr_width = 0.1-0.02
qr_start = 0.02 + qr_width /2
qr_end = 0.01 - qr_width /2
qr_num = 1
Qr = [qr_start , qr_end, qr_width, qr_num]
Qz= [qz_start, qz_end, qz_width , qz_num ]
new_mask[ :, 1020:1045] =0
ticks = show_qzr_map( qr,qz, inc_x0, data = avg_imgmr, Nzline=10, Nrline=10 )
qx, qy, qr, qz = convert_gisaxs_pixel_to_q( inc_x0, inc_y0,refl_x0,refl_y0, lamda=lamda, Lsd=Lsd )
qr_1d = get_1d_qr( avg_imgr, Qr, Qz, qr, qz, inc_x0, new_mask, True, ticks, .8)
A plot example:
plot1D( x= qr_1d['qr1'], y = qr_1d['0.0367'], logxy=True )
'''
qr_start , qr_end, qr_width, qr_num =Qr
qz_start, qz_end, qz_width , qz_num =Qz
qr_edge, qr_center = get_qedge(qr_start , qr_end, qr_width, qr_num )
qz_edge, qz_center = get_qedge( qz_start, qz_end, qz_width , qz_num )
print ('The qr_edge is: %s\nThe qr_center is: %s'%(qr_edge, qr_center))
print ('The qz_edge is: %s\nThe qz_center is: %s'%(qz_edge, qz_center))
label_array_qr = get_qmap_label( qr, qr_edge)
if show_roi:
label_array_qz0 = get_qmap_label( qz , qz_edge)
label_array_qzr0,qzc0,qrc0 = get_qzrmap(label_array_qz0, label_array_qr,qz_center, qr_center )
if mask is not None:label_array_qzr0 *= mask
#data_ = data*label_array_qzr0
show_qzr_roi( data,label_array_qzr0, inc_x0, ticks, alpha)
fig, ax = plt.subplots()
qr_1d ={}
columns=[]
for i,qzc_ in enumerate(qz_center):
#print (i,qzc_)
label_array_qz = get_qmap_label( qz, qz_edge[i*2:2*i+2])
#print (qzc_, qz_edge[i*2:2*i+2])
label_array_qzr,qzc,qrc = get_qzrmap(label_array_qz, label_array_qr,qz_center, qr_center )
#print (np.unique(label_array_qzr ))
if mask is not None:label_array_qzr *= mask
roi_pixel_num = np.sum( label_array_qzr, axis=0)
qr_ = qr *label_array_qzr
data_ = data*label_array_qzr
qr_ave = np.sum( qr_, axis=0)/roi_pixel_num
data_ave = np.sum( data_, axis=0)/roi_pixel_num
qr_ave,data_ave = zip(* sorted( zip( * [ qr_ave[~np.isnan(qr_ave)] , data_ave[~np.isnan( data_ave)] ]) ) )
if i==0:
N_interp = len( qr_ave )
qr_ave_intp = np.linspace( np.min( qr_ave ), np.max( qr_ave ), N_interp)
data_ave = np.interp( qr_ave_intp, qr_ave, data_ave)
qr_1d[i]= [qr_ave_intp, data_ave]
columns.append( ['qr%s'%i, str(round(qzc_,4))] )
if loglog:
ax.loglog(qr_ave_intp, data_ave, '--o', label= 'qz= %f'%qzc_, markersize=1)
else:
ax.plot( qr_ave_intp, data_ave, '--o', label= 'qz= %f'%qzc_)
if i==0:
df = np.hstack( [ (qr_ave_intp).reshape( N_interp,1) ,
data_ave.reshape( N_interp,1) ] )
else:
df = np.hstack( [ df, (qr_ave_intp).reshape( N_interp,1) ,
data_ave.reshape( N_interp,1) ] )
#ax.set_xlabel( r'$q_r$', fontsize=15)
ax.set_xlabel(r'$q_r$'r'($\AA^{-1}$)', fontsize=18)
ax.set_ylabel('$Intensity (a.u.)$', fontsize=18)
ax.set_yscale('log')
#ax.set_xscale('log')
ax.set_xlim( qr.max(),qr.min() )
ax.legend(loc='best')
df = DataFrame( df )
df.columns = np.concatenate( columns )
if save:
#dt =datetime.now()
#CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute)
path = setup_pargs['path']
uid = setup_pargs['uid']
#filename = os.path.join(path, 'qr_1d-%s-%s.csv' % (uid,CurTime))
filename = os.path.join(path, 'uid=%s--qr_1d.csv'% (uid) )
df.to_csv(filename)
print( 'The qr_1d is saved in %s with filename as uid=%s--qr_1d.csv'%(path, uid))
#fp = path + 'Uid= %s--Circular Average'%uid + CurTime + '.png'
fp = path + 'uid=%s--qr_1d-'%uid + '.png'
fig.savefig( fp, dpi=fig.dpi)
#plt.show()
return df
def plot_qr_1d_with_ROI( qr_1d, qr_center, loglog=False, save=True, uid='uid', path='' ):
'''Dec 16, 2015, Y.G.@CHX
plot one-d of I(q) as a function of qr with ROI
qr_1d: a dataframe for qr_1d
qr_center: the center of qr
loglog: if True, plot in log-log scale
Return:
Plot 1D cureve with ROI
A plot example:
plot_1d_qr_with_ROI( df, qr_center, loglog=False, save=True )
'''
fig, ax = plt.subplots()
Ncol = len( qr_1d.columns )
Nqr = Ncol%2
qz_center = qr_1d.columns[1::1]#qr_1d.columns[1::2]
Nqz = len(qz_center)
for i,qzc_ in enumerate(qz_center):
x= qr_1d[ qr_1d.columns[0] ]
y= qr_1d[qzc_]
if loglog:
ax.loglog(x,y, '--o', label= 'qz= %s'%qzc_, markersize=1)
else:
ax.plot( x,y, '--o', label= 'qz= %s'%qzc_)
for qrc in qr_center:
ax.axvline( qrc )#, linewidth = 5 )
#ax.set_xlabel( r'$q_r$', fontsize=15)
ax.set_xlabel(r'$q_r$'r'($\AA^{-1}$)', fontsize=18)
ax.set_ylabel('$Intensity (a.u.)$', fontsize=18)
ax.set_yscale('log')
#ax.set_xscale('log')
ax.set_xlim( x.max(), x.min() )
ax.legend(loc='best')
ax.set_title( '%s_Qr_ROI'%uid)
if save:
fp = path + '%s_Qr_ROI'%uid + '.png'
fig.savefig( fp, dpi=fig.dpi)
#plt.show()
def interp_zeros( data ):
from scipy.interpolate import interp1d
gf = data.ravel()
indice, = gf.nonzero()
start, stop = indice[0], indice[-1]+1
dx,dy = data.shape
x=np.arange( dx*dy )
f = interp1d(x[indice], gf[indice])
gf[start:stop] = f(x[start:stop])
return gf.reshape([dx,dy])
def get_qr_tick_label( qr, label_array_qr, inc_x0, interp=True):
'''
Dec 16, 2015, Y.G.@CHX
get zticks,zticks_label
Parameters:
qr: 2-D array, qr of a gisaxs image (data)
label_array_qr: a labelled array of qr map, get by:
label_array_qr = get_qmap_label( qr, qz_edge)
Options:
interp: if True, make qz label round by np.round(data, 2)
inc_x0: x-center of incident beam
Return:
rticks: list, r-tick positions in unit of pixel
rticks_label: list, r-tick positions in unit of real space
Examples:
rticks,rticks_label = get_qr_tick_label( qr, label_array_qr)
'''
rticks =[]
rticks_label = []
num = len( np.unique( label_array_qr ) )
for i in range( 1, num ):
ind = np.sort( np.where( label_array_qr==i )[1] )
#tick = round( qr[label_array_qr==i].mean(),2)
tick = qr[label_array_qr==i].mean()
if ind[0] < inc_x0 and ind[-1]>inc_x0: #
#mean1 = int( (ind[np.where(ind < inc_x0)[0]]).mean() )
#mean2 = int( (ind[np.where(ind > inc_x0)[0]]).mean() )
mean1 = int( (ind[np.where(ind < inc_x0)[0]])[0] )
mean2 = int( (ind[np.where(ind > inc_x0)[0]])[0] )
rticks.append( mean1)
rticks.append(mean2)
rticks_label.append( tick )
rticks_label.append( tick )
else:
#print('here')
#mean = int( ind.mean() )
mean = int( ind[0] )
#mean = int( (ind[0] +ind[-1])/2 )
rticks.append(mean)
rticks_label.append( tick )
#print (rticks)
#print (mean, tick)
n= len(rticks)
for i, rt in enumerate( rticks):
if rt==0:
rticks[i] = n- i
if interp:
rticks = np.array(rticks)
rticks_label = np.array( rticks_label)
try:
w= np.where( rticks <= inc_x0)[0]
rticks1 = np.int_(np.interp( np.round( rticks_label[w], 3), rticks_label[w], rticks[w] ))
rticks_label1 = np.round( rticks_label[w], 3)
except:
rticks_label1 = []
try:
w= np.where( rticks > inc_x0)[0]
rticks2 = np.int_(np.interp( np.round( rticks_label[w], 3), rticks_label[w], rticks[w] ))
rticks = np.append( rticks1, rticks2)
rticks_label2 = np.round( rticks_label[w], 3)
except:
rticks_label2 = []
rticks_label = np.append( rticks_label1, rticks_label2)
return rticks, rticks_label
def get_qz_tick_label( qz, label_array_qz,interp=True):
'''
Dec 16, 2015, Y.G.@CHX
get zticks,zticks_label
Parameters:
qz: 2-D array, qz of a gisaxs image (data)
label_array_qz: a labelled array of qz map, get by:
label_array_qz = get_qmap_label( qz, qz_edge)
interp: if True, make qz label round by np.round(data, 2)
Return:
zticks: list, z-tick positions in unit of pixel
zticks_label: list, z-tick positions in unit of real space
Examples:
zticks,zticks_label = get_qz_tick_label( qz, label_array_qz)
'''
num = len( np.unique( label_array_qz ) )
#zticks = np.array( [ int( np.where( label_array_qz==i )[0].mean() ) for i in range( 1,num ) ])
zticks = np.array( [ int( np.where( label_array_qz==i )[0][0] ) for i in range( 1,num ) ])
#zticks_label = np.array( [ round( qz[label_array_qz==i].mean(),4) for i in range( 1, num ) ])
#zticks_label = np.array( [ qz[label_array_qz==i].mean() for i in range( 1, num ) ])
zticks_label = np.array( [ qz[label_array_qz==i][0] for i in range( 1, num ) ])
if interp:
zticks = np.int_(np.interp( np.round( zticks_label, 3), zticks_label, zticks ))
zticks_label = np.round( zticks_label, 3)
return zticks,zticks_label
def get_qzr_map( qr, qz, inc_x0, Nzline=10,Nrline=10, interp = True,
return_qrz_label= True, *argv,**kwargs):
'''
Dec 31, 2016, Y.G.@CHX
Calculate a qzr map of a gisaxs image (data) without plot
Parameters:
qr: 2-D array, qr of a gisaxs image (data)
qz: 2-D array, qz of a gisaxs image (data)
inc_x0: the incident beam center x
Options:
Nzline: int, z-line number
Nrline: int, r-line number
Return:
if return_qrz_label
zticks: list, z-tick positions in unit of pixel
zticks_label: list, z-tick positions in unit of real space
rticks: list, r-tick positions in unit of pixel
rticks_label: list, r-tick positions in unit of real space
else: return the additional two below
label_array_qr: qr label array with the same shpae as gisaxs image
label_array_qz: qz label array with the same shpae as gisaxs image
Examples:
ticks = get_qzr_map( qr, qz, inc_x0 )
'''
qr_start, qr_end, qr_num = qr.min(),qr.max(), Nrline
qz_start, qz_end, qz_num = qz.min(),qz.max(), Nzline
qr_edge, qr_center = get_qedge(qr_start , qr_end, ( qr_end- qr_start)/(qr_num+100), qr_num )
qz_edge, qz_center = get_qedge( qz_start, qz_end, (qz_end - qz_start)/(qz_num+100 ) , qz_num )
label_array_qz = get_qmap_label( qz, qz_edge)
label_array_qr = get_qmap_label( qr, qr_edge)
labels_qz, indices_qz = roi.extract_label_indices( label_array_qz )
labels_qr, indices_qr = roi.extract_label_indices( label_array_qr )
num_qz = len(np.unique( labels_qz ))
num_qr = len(np.unique( labels_qr ))
zticks,zticks_label = get_qz_tick_label(qz,label_array_qz)
#rticks,rticks_label = get_qr_tick_label(label_array_qr,inc_x0)
try:
rticks,rticks_label = zip(*np.sort( zip( *get_qr_tick_label( qr, label_array_qr, inc_x0,interp=interp) )) )
except:
rticks,rticks_label = zip(* sorted( zip( *get_qr_tick_label( qr, label_array_qr, inc_x0,interp=interp) )) )
#stride = int(len(zticks)/10)
ticks=[ zticks,zticks_label,rticks,rticks_label ]
if return_qrz_label:
return zticks,zticks_label,rticks,rticks_label, label_array_qr, label_array_qz
else:
return zticks,zticks_label,rticks,rticks_label
def plot_qzr_map( qr, qz, inc_x0, ticks = None, data=None,
uid='uid', path ='', *argv,**kwargs):
'''
Dec 31, 2016, Y.G.@CHX
plot a qzr map of a gisaxs image (data)
Parameters:
qr: 2-D array, qr of a gisaxs image (data)
qz: 2-D array, qz of a gisaxs image (data)
inc_x0: the incident beam center x
ticks = [ zticks,zticks_label,rticks,rticks_label ], use ticks = get_qzr_map( qr, qz, inc_x0 ) to get
zticks: list, z-tick positions in unit of pixel
zticks_label: list, z-tick positions in unit of real space
rticks: list, r-tick positions in unit of pixel
rticks_label: list, r-tick positions in unit of real space
label_array_qr: qr label array with the same shpae as gisaxs image
label_array_qz: qz label array with the same shpae as gisaxs image
inc_x0: the incident beam center x
Options:
data: 2-D array, a gisaxs image, if None, =qr+qz
Nzline: int, z-line number
Nrline: int, r-line number
Return:
None
Examples:
ticks = plot_qzr_map( ticks, inc_x0, data = None, Nzline=10, Nrline= 10 )
ticks = plot_qzr_map( ticks, inc_x0, data = avg_imgmr, Nzline=10, Nrline=10 )
'''
import matplotlib.pyplot as plt
import copy
import matplotlib.cm as mcm
if ticks is None:
zticks,zticks_label,rticks,rticks_label, label_array_qr, label_array_qz = get_qzr_map(
qr, qz, inc_x0, return_qrz_label=True )
else:
zticks,zticks_label,rticks,rticks_label, label_array_qr, label_array_qz = ticks
cmap='viridis'
_cmap = copy.copy((mcm.get_cmap(cmap)))
_cmap.set_under('w', 0)
fig, ax = plt.subplots( )
if data is None:
data=qr+qz
im = ax.imshow(data, cmap='viridis',origin='lower')
else:
im = ax.imshow(data, cmap='viridis',origin='lower', norm= LogNorm(vmin=0.001, vmax=1e1))
imr=ax.imshow(label_array_qr, origin='lower' ,cmap='viridis', vmin=0.5,vmax= None )#,interpolation='nearest',)
imz=ax.imshow(label_array_qz, origin='lower' ,cmap='viridis', vmin=0.5,vmax= None )#,interpolation='nearest',)
divider = make_axes_locatable(ax)
cax = divider.append_axes("right", size="5%", pad=0.05)
plt.colorbar(im, cax=cax)
ax.set_xlabel(r'$q_r$', fontsize=18)
ax.set_ylabel(r'$q_z$',fontsize=18)
stride = 1
ax.set_yticks( zticks[::stride] )
yticks = zticks_label[::stride]
ax.set_yticklabels(yticks, fontsize=7)
#stride = int(len(rticks)/10)
stride = 1
ax.set_xticks( rticks[::stride] )
xticks = rticks_label[::stride]
ax.set_xticklabels(xticks, fontsize=7)
ax.set_title( '%s_Qr_Qz_Map'%uid, y=1.03,fontsize=18)
fp = path + '%s_Qr_Qz_Map'%(uid) + '.png'
fig.savefig( fp, dpi=fig.dpi)
def show_qzr_map( qr, qz, inc_x0, data=None, Nzline=10,Nrline=10 ,
interp=True, *argv,**kwargs):
'''
Dec 16, 2015, Y.G.@CHX
plot a qzr map of a gisaxs image (data)
Parameters:
qr: 2-D array, qr of a gisaxs image (data)
qz: 2-D array, qz of a gisaxs image (data)
inc_x0: the incident beam center x
Options:
data: 2-D array, a gisaxs image, if None, =qr+qz
Nzline: int, z-line number
Nrline: int, r-line number
Return:
zticks: list, z-tick positions in unit of pixel
zticks_label: list, z-tick positions in unit of real space
rticks: list, r-tick positions in unit of pixel
rticks_label: list, r-tick positions in unit of real space
Examples:
ticks = show_qzr_map( qr, qz, inc_x0, data = None, Nzline=10, Nrline= 10 )
ticks = show_qzr_map( qr,qz, inc_x0, data = avg_imgmr, Nzline=10, Nrline=10 )
'''
import matplotlib.pyplot as plt
import copy
import matplotlib.cm as mcm
cmap='viridis'
_cmap = copy.copy((mcm.get_cmap(cmap)))
_cmap.set_under('w', 0)
qr_start, qr_end, qr_num = qr.min(),qr.max(), Nrline
qz_start, qz_end, qz_num = qz.min(),qz.max(), Nzline
qr_edge, qr_center = get_qedge(qr_start , qr_end, ( qr_end- qr_start)/(qr_num+100), qr_num )
qz_edge, qz_center = get_qedge( qz_start, qz_end, (qz_end - qz_start)/(qz_num+100 ) , qz_num )
label_array_qz = get_qmap_label( qz, qz_edge)
label_array_qr = get_qmap_label( qr, qr_edge)
labels_qz, indices_qz = roi.extract_label_indices( label_array_qz )
labels_qr, indices_qr = roi.extract_label_indices( label_array_qr )
num_qz = len(np.unique( labels_qz ))
num_qr = len(np.unique( labels_qr ))
fig, ax = plt.subplots( figsize=(8,14) )
if data is None:
data=qr+qz
im = ax.imshow(data, cmap='viridis',origin='lower')
else:
im = ax.imshow(data, cmap='viridis',origin='lower', norm= LogNorm(vmin=0.001, vmax=1e1))
imr=ax.imshow(label_array_qr, origin='lower' ,cmap='viridis', vmin=0.5,vmax= None )#,interpolation='nearest',)
imz=ax.imshow(label_array_qz, origin='lower' ,cmap='viridis', vmin=0.5,vmax= None )#,interpolation='nearest',)
#caxr = fig.add_axes([0.88, 0.2, 0.03, .7]) #x,y, width, heigth
#cba = fig.colorbar(im, cax=caxr )
#cba = fig.colorbar(im, fraction=0.046, pad=0.04)
divider = make_axes_locatable(ax)
cax = divider.append_axes("right", size="5%", pad=0.05)
plt.colorbar(im, cax=cax)
#fig.colorbar(im, shrink =.82)
#cba = fig.colorbar(im)
ax.set_xlabel(r'$q_r$', fontsize=18)
ax.set_ylabel(r'$q_z$',fontsize=18)
zticks,zticks_label = get_qz_tick_label(qz,label_array_qz)
#rticks,rticks_label = get_qr_tick_label(label_array_qr,inc_x0)
try:
rticks,rticks_label = zip(*np.sort( zip( *get_qr_tick_label( qr, label_array_qr, inc_x0,interp=interp) )) )
except:
rticks,rticks_label = zip(* sorted( zip( *get_qr_tick_label( qr, label_array_qr, inc_x0,interp=interp) )) )
#stride = int(len(zticks)/10)
stride = 1
ax.set_yticks( zticks[::stride] )
yticks = zticks_label[::stride]
ax.set_yticklabels(yticks, fontsize=7)
#stride = int(len(rticks)/10)
stride = 1
ax.set_xticks( rticks[::stride] )
xticks = rticks_label[::stride]
ax.set_xticklabels(xticks, fontsize=7)
if 'uid' in kwargs:
uid=kwargs['uid']
else:
uid='uid'
ax.set_title( '%s_Qr_Qz_Map'%uid, y=1.03,fontsize=18)
save=False
if 'save' in kwargs:
save=kwargs['save']
if save:
path=kwargs['path']
fp = path + '%s_Qr_Qz_Map'%(uid) + '.png'
fig.savefig( fp, dpi=fig.dpi)
#plt.show()
return zticks,zticks_label,rticks,rticks_label
def show_qzr_roi( data, rois, inc_x0, ticks, alpha=0.3, uid='uid', path = '', save=False, return_fig=False, *argv,**kwargs):
'''
Dec 16, 2015, Y.G.@CHX
plot a qzr map of a gisaxs image with rois( a label array)
Parameters:
data: 2-D array, a gisaxs image
rois: 2-D array, a label array
inc_x0: the incident beam center x
ticks: zticks, zticks_label, rticks, rticks_label = ticks
zticks: list, z-tick positions in unit of pixel
zticks_label: list, z-tick positions in unit of real space
rticks: list, r-tick positions in unit of pixel
rticks_label: list, r-tick positions in unit of real space
Options:
alpha: transparency of the label array on top of data
Return:
a plot of a qzr map of a gisaxs image with rois( a label array)
Examples:
show_qzr_roi( avg_imgr, box_maskr, inc_x0, ticks)
'''
zticks, zticks_label, rticks, rticks_label = ticks
avg_imgr, box_maskr = data, rois
num_qzr = len(np.unique( box_maskr)) -1
#fig, ax = plt.subplots(figsize=(8,12))
fig, ax = plt.subplots(figsize=(8,8))
ax.set_title("%s_ROI--Labeled Array on Data"%uid)
im,im_label = show_label_array_on_image(ax, avg_imgr, box_maskr, imshow_cmap='viridis',
cmap='Paired', alpha=alpha,
vmin=0.01, vmax=30. , origin="lower")
for i in range( 1, num_qzr+1 ):
ind = np.where( box_maskr == i)[1]
indz = np.where( box_maskr == i)[0]
c = '%i'%i
y_val = int( indz.mean() )
#print (ind[0], ind[-1], inc_x0 )
M,m = max( ind ), min( ind )
#if ind[0] < inc_x0 and ind[-1]>inc_x0:
if m < inc_x0 and M > inc_x0:
x_val1 = int( (ind[np.where(ind < inc_x0)[0]]).mean() )
x_val2 = int( (ind[np.where(ind > inc_x0)[0]]).mean() )
ax.text(x_val1, y_val, c, va='center', ha='center')
ax.text(x_val2, y_val, c, va='center', ha='center')
else:
x_val = int( ind.mean() )
#print (xval, y)
ax.text(x_val, y_val, c, va='center', ha='center')
#print (x_val1,x_val2)
#stride = int(len(zticks)/3)
stride = 1
ax.set_yticks( zticks[::stride] )
yticks = zticks_label[::stride]
ax.set_yticklabels(yticks, fontsize=9)
#stride = int(len(rticks)/3)
stride = 1
ax.set_xticks( rticks[::stride] )
xticks = rticks_label[::stride]
ax.set_xticklabels(xticks, fontsize=9)
divider = make_axes_locatable(ax)
cax = divider.append_axes("right", size="5%", pad=0.05)
plt.colorbar(im, cax=cax)
ax.set_xlabel(r'$q_r$', fontsize=22)
ax.set_ylabel(r'$q_z$',fontsize=22)
fp = path + '%s_ROI_on_Image'%(uid) + '.png'
if save:
fig.savefig( fp, dpi=fig.dpi)
if return_fig:
return fig, ax
#plot g2 results
def plot_gisaxs_g2( g2, taus, res_pargs=None, one_plot = False, *argv,**kwargs):
'''Dec 16, 2015, Y.G.@CHX
plot g2 results,
g2: one-time correlation function
taus: the time delays
res_pargs, a dict, can contains
uid/path/qr_center/qz_center/
one_plot: if True, show all qz in one plot
kwargs: can contains
vlim: [vmin,vmax]: for the plot limit of y, the y-limit will be [vmin * min(y), vmx*max(y)]
ylim/xlim: the limit of y and x
e.g.
plot_gisaxs_g2( g2b, taus= np.arange( g2b.shape[0]) *timeperframe, q_ring_center = q_ring_center, vlim=[.99, 1.01] )
'''
if res_pargs is not None:
uid = res_pargs['uid']
path = res_pargs['path']
qz_center = res_pargs[ 'qz_center']
num_qz = len( qz_center)
qr_center = res_pargs[ 'qr_center']
num_qr = len( qr_center)
else:
if 'uid' in kwargs.keys():
uid = kwargs['uid']
else:
uid = 'uid'
if 'path' in kwargs.keys():
path = kwargs['path']
else:
path = ''
if 'qz_center' in kwargs.keys():
qz_center = kwargs[ 'qz_center']
num_qz = len( qz_center)
else:
print( 'Please give qz_center')
if 'qr_center' in kwargs.keys():
qr_center = kwargs[ 'qr_center']
num_qr = len( qr_center)
else:
print( 'Please give qr_center')
if not one_plot:
for qz_ind in range(num_qz):
fig = plt.figure(figsize=(10, 12))
#fig = plt.figure()
title_qz = ' Qz= %.5f '%( qz_center[qz_ind]) + r'$\AA^{-1}$'
plt.title('uid= %s:--->'%uid + title_qz,fontsize=20, y =1.1)
#print (qz_ind,title_qz)
if num_qz!=1:
if num_qr!=1:
plt.axis('off')
sx = int(round(np.sqrt(num_qr)) )
if num_qr%sx == 0:
sy = int(num_qr/sx)
else:
sy=int(num_qr/sx+1)
for sn in range(num_qr):
ax = fig.add_subplot(sx,sy,sn+1 )
ax.set_ylabel("g2")
ax.set_xlabel(r"$\tau $ $(s)$", fontsize=16)
title_qr = " Qr= " + '%.5f '%( qr_center[sn]) + r'$\AA^{-1}$'
if num_qz==1:
title = 'uid= %s:--->'%uid + title_qz + '__' + title_qr
else:
title = title_qr
ax.set_title( title )
y=g2[:, sn + qz_ind * num_qr]
ax.semilogx(taus, y, '-o', markersize=6)
if 'ylim' in kwargs:
ax.set_ylim( kwargs['ylim'])
elif 'vlim' in kwargs:
vmin, vmax =kwargs['vlim']
ax.set_ylim([min(y)*vmin, max(y[1:])*vmax ])
else:
pass
if 'xlim' in kwargs:
ax.set_xlim( kwargs['xlim'])
fp = path + 'uid=%s--g2-qz=%s'%(uid,qz_center[qz_ind]) + '.png'
fig.savefig( fp, dpi=fig.dpi)
fig.tight_layout()
#plt.show()
else:
if num_qz==1:
if num_qr==1:
fig = plt.figure(figsize=(8,8))
else:
fig = plt.figure(figsize=(10, 12))
else:
fig = plt.figure(figsize=(10, 12))
plt.title('uid= %s'%uid,fontsize=20, y =1.05)
if num_qz!=1:
if num_qr!=1:
plt.axis('off')
if num_qz==1:
if num_qr!=1:
plt.axis('off')
sx = int(round(np.sqrt(num_qr)) )
if num_qr%sx == 0:
sy = int(num_qr/sx)
else:
sy=int(num_qr/sx+1)
for sn in range(num_qr):
ax = fig.add_subplot(sx,sy,sn+1 )
ax.set_ylabel("g2")
ax.set_xlabel(r"$\tau $ $(s)$", fontsize=16)
#title_qr = " Qr= " + '%.5f '%( qr_center[sn]) + r'$\AA^{-1}$'
title_qr = " Qr= " + '%.5s '%( qr_center[sn]) + r'$\AA^{-1}$'
title = title_qr
ax.set_title( title )
for qz_ind in range(num_qz):
y=g2[:, sn + qz_ind * num_qr]
if sn ==0:
title_qz = ' Qz= %.5f '%( qz_center[qz_ind]) + r'$\AA^{-1}$'
ax.semilogx(taus, y, '-o', markersize=6, label = title_qz )
else:
ax.semilogx(taus, y, '-o', markersize=6, label='' )
if 'ylim' in kwargs:
ax.set_ylim( kwargs['ylim'])
elif 'vlim' in kwargs:
vmin, vmax =kwargs['vlim']
ax.set_ylim([min(y)*vmin, max(y[1:])*vmax ])
else:
pass
if 'xlim' in kwargs:
ax.set_xlim( kwargs['xlim'])
if sn ==0:
ax.legend(loc='best', fontsize = 6)
fp = path + 'uid=%s--g2'%(uid) + '.png'
fig.savefig( fp, dpi=fig.dpi)
fig.tight_layout()
#plt.show()
#plot g2 results
def plot_gisaxs_two_g2( g2, taus, g2b, tausb,res_pargs=None,one_plot=False, *argv,**kwargs):
'''Dec 16, 2015, Y.G.@CHX
plot g2 results,
g2: one-time correlation function from a multi-tau method
g2b: another g2 from a two-time method
taus: the time delays
kwargs: can contains
vlim: [vmin,vmax]: for the plot limit of y, the y-limit will be [vmin * min(y), vmx*max(y)]
ylim/xlim: the limit of y and x
e.g.
plot_saxs_g2( g2b, taus= np.arange( g2b.shape[0]) *timeperframe, q_ring_center = q_ring_center, vlim=[.99, 1.01] )
'''
if res_pargs is not None:
uid = res_pargs['uid']
path = res_pargs['path']
qz_center = res_pargs[ 'qz_center']
num_qz = len( qz_center)
qr_center = res_pargs[ 'qr_center']
num_qr = len( qr_center)
else:
if 'uid' in kwargs.keys():
uid = kwargs['uid']
else:
uid = 'uid'
if 'path' in kwargs.keys():
path = kwargs['path']
else:
path = ''
if 'qz_center' in kwargs.keys():
qz_center = kwargs[ 'qz_center']
num_qz = len( qz_center)
else:
print( 'Please give qz_center')
if 'qr_center' in kwargs.keys():
qr_center = kwargs[ 'qr_center']
num_qr = len( qr_center)
else:
print( 'Please give qr_center')
if not one_plot:
for qz_ind in range(num_qz):
fig = plt.figure(figsize=(12, 10))
#fig = plt.figure()
title_qz = ' Qz= %.5f '%( qz_center[qz_ind]) + r'$\AA^{-1}$'
plt.title('uid= %s:--->'%uid + title_qz,fontsize=20, y =1.1)
#print (qz_ind,title_qz)
if num_qz!=1:plt.axis('off')
sx = int(round(np.sqrt(num_qr)) )
if num_qr%sx == 0:
sy = int(num_qr/sx)
else:
sy=int(num_qr/sx+1)
for sn in range(num_qr):
ax = fig.add_subplot(sx,sy,sn+1 )
ax.set_ylabel("g2")
ax.set_xlabel(r"$\tau $ $(s)$", fontsize=16)
title_qr = " Qr= " + '%.5f '%( qr_center[sn]) + r'$\AA^{-1}$'
if num_qz==1:
title = 'uid= %s:--->'%uid + title_qz + '__' + title_qr
else:
title = title_qr
ax.set_title( title )
y=g2b[:, sn + qz_ind * num_qr]
ax.semilogx( tausb, y, '--r', markersize=6,label= 'by-two-time')
#y2=g2[:, sn]
y2=g2[:, sn + qz_ind * num_qr]
ax.semilogx(taus, y2, 'o', markersize=6, label= 'by-multi-tau')
if sn + qz_ind * num_qr==0:
ax.legend(loc='best')
if 'ylim' in kwargs:
ax.set_ylim( kwargs['ylim'])
elif 'vlim' in kwargs:
vmin, vmax =kwargs['vlim']
ax.set_ylim([min(y)*vmin, max(y[1:])*vmax ])
else:
pass
if 'xlim' in kwargs:
ax.set_xlim( kwargs['xlim'])
fp = path + 'uid=%s--two-g2-qz=%s'%(uid,qz_center[qz_ind]) + '.png'
fig.savefig( fp, dpi=fig.dpi)
fig.tight_layout()
#plt.show()
else:
fig = plt.figure(figsize=(12, 10))
plt.title('uid= %s'%uid,fontsize=20, y =1.05)
if num_qz!=1:
if num_qr!=1:
plt.axis('off')
if num_qz==1:
if num_qr!=1:
plt.axis('off')
sx = int(round(np.sqrt(num_qr)) )
if num_qr%sx == 0:
sy = int(num_qr/sx)
else:
sy=int(num_qr/sx+1)
for sn in range(num_qr):
ax = fig.add_subplot(sx,sy,sn+1 )
ax.set_ylabel("g2")
ax.set_xlabel(r"$\tau $ $(s)$", fontsize=16)
title_qr = " Qr= " + '%.5s '%( qr_center[sn]) + r'$\AA^{-1}$'
#title_qr = " Qr= " + '%.5f '%( qr_center[sn]) + r'$\AA^{-1}$'
title = title_qr
ax.set_title( title )
for qz_ind in range(num_qz):
y=g2b[:, sn + qz_ind * num_qr]
y2=g2[:, sn + qz_ind * num_qr]
title_qz = ' Qz= %.5f '%( qz_center[qz_ind]) + r'$\AA^{-1}$'
label1 = ''
label2 =''
if sn ==0:
label2 = title_qz
elif sn==1:
if qz_ind ==0:
label1= 'by-two-time'
label2= 'by-multi-tau'
ax.semilogx(tausb, y, '-r', markersize=6, linewidth=4, label=label1)
ax.semilogx(taus, y2, 'o', markersize=6, label=label2)
if 'ylim' in kwargs:
ax.set_ylim( kwargs['ylim'])
elif 'vlim' in kwargs:
vmin, vmax =kwargs['vlim']
ax.set_ylim([min(y)*vmin, max(y[1:])*vmax ])
else:
pass
if 'xlim' in kwargs:
ax.set_xlim( kwargs['xlim'])
if (sn ==0) or (sn==1):
ax.legend(loc='best', fontsize = 6)
fp = path + 'uid=%s--g2--two-g2-'%uid + '.png'
fig.savefig( fp, dpi=fig.dpi)
fig.tight_layout()
#plt.show()
def save_gisaxs_g2( g2, res_pargs, time_label= False, taus=None, filename=None, *argv,**kwargs):
'''
Aug 8, 2016, Y.G.@CHX
save g2 results,
res_pargs should contain
g2: one-time correlation function
res_pargs: contions taus, q_ring_center values
path:
uid:
'''
if taus is None:
taus = res_pargs[ 'taus']
try:
qz_center = res_pargs['qz_center']
qr_center = res_pargs['qr_center']
except:
roi_label= res_pargs['roi_label']
path = res_pargs['path']
uid = res_pargs['uid']
df = DataFrame( np.hstack( [ (taus).reshape( len(g2),1) , g2] ) )
columns=[]
columns.append('tau')
try:
for qz in qz_center:
for qr in qr_center:
columns.append( [str(qz),str(qr)] )
except:
columns.append( [ v for (k,v) in roi_label.items()] )
df.columns = columns
if filename is None:
if time_label:
dt =datetime.now()
CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute)
filename = os.path.join(path, 'g2-%s-%s.csv' %(uid,CurTime))
else:
filename = os.path.join(path, 'uid=%s--g2.csv' % (uid))
else:
filename = os.path.join(path, filename)
df.to_csv(filename)
print( 'The correlation function of uid= %s is saved with filename as %s'%(uid, filename))
def stretched_auto_corr_scat_factor(x, beta, relaxation_rate, alpha=1.0, baseline=1):
return beta * (np.exp(-2 * relaxation_rate * x))**alpha + baseline
def simple_exponential(x, beta, relaxation_rate, baseline=1):
return beta * np.exp(-2 * relaxation_rate * x) + baseline
def fit_gisaxs_g2( g2, res_pargs, function='simple_exponential', one_plot=False, *argv,**kwargs):
'''
July 20,2016, Y.G.@CHX
Fit one-time correlation function
The support functions include simple exponential and stretched/compressed exponential
Parameters
----------
g2: one-time correlation function for fit, with shape as [taus, qs]
res_pargs: a dict, contains keys
taus: the time delay, with the same length as g2
q_ring_center: the center of q rings, for the title of each sub-plot
uid: unique id, for the title of plot
kwargs:
variables: if exist, should be a dict, like
{ 'lags': True, #always True
'beta', Ture, # usually True
'relaxation_rate': False, #always False
'alpha':False, #False for simple exponential, True for stretched/compressed
'baseline': True #sometimes be False, keep as 1
}
function:
'simple_exponential': fit by a simple exponential function, defined as
beta * np.exp(-2 * relaxation_rate * lags) + baseline
'streched_exponential': fit by a streched exponential function, defined as
beta * (np.exp(-2 * relaxation_rate * lags))**alpha + baseline
Returns
-------
fit resutls:
a dict, with keys as
'baseline':
'beta':
'relaxation_rate':
an example:
result = fit_g2( g2, res_pargs, function = 'simple')
result = fit_g2( g2, res_pargs, function = 'stretched')
TO DO:
add variables to options
'''
taus = res_pargs[ 'taus']
qz_center = res_pargs[ 'qz_center']
num_qz = len( qz_center)
qr_center = res_pargs[ 'qr_center']
num_qr = len( qr_center)
uid=res_pargs['uid']
path=res_pargs['path']
#uid=res_pargs['uid']
num_rings = g2.shape[1]
beta = np.zeros( num_rings ) # contrast factor
rate = np.zeros( num_rings ) # relaxation rate
alpha = np.zeros( num_rings ) # alpha
baseline = np.zeros( num_rings ) # baseline
if function=='simple_exponential' or function=='simple':
_vars = np.unique ( _vars + ['alpha'])
mod = Model(stretched_auto_corr_scat_factor)#, independent_vars= list( _vars) )
elif function=='stretched_exponential' or function=='stretched':
mod = Model(stretched_auto_corr_scat_factor)#, independent_vars= _vars)
else:
print ("The %s is not supported.The supported functions include simple_exponential and stretched_exponential"%function)
#mod.set_param_hint( 'beta', value = 0.05 )
#mod.set_param_hint( 'alpha', value = 1.0 )
#mod.set_param_hint( 'relaxation_rate', value = 0.005 )
#mod.set_param_hint( 'baseline', value = 1.0, min=0.5, max= 1.5 )
mod.set_param_hint( 'baseline', min=0.5, max= 2.5 )
mod.set_param_hint( 'beta', min=0.0 )
mod.set_param_hint( 'alpha', min=0.0 )
mod.set_param_hint( 'relaxation_rate', min=0.0 )
if 'fit_variables' in kwargs:
additional_var = kwargs['fit_variables']
#print ( additional_var )
_vars =[ k for k in list( additional_var.keys()) if additional_var[k] is False]
else:
_vars = []
if 'guess_values' in kwargs:
if 'beta' in list(kwargs['guess_values'].keys()):
beta_ = kwargs['guess_values']['beta']
else:
beta_=0.05
if 'alpha' in list(kwargs['guess_values'].keys()):
alpha_= kwargs['guess_values']['alpha']
else:
alpha_=1.0
if 'relaxation_rate' in list(kwargs['guess_values'].keys()):
relaxation_rate_= kwargs['guess_values']['relaxation_rate']
else:
relaxation_rate_=0.005
if 'baseline' in list(kwargs['guess_values'].keys()):
baseline_= kwargs['guess_values']['baseline']
else:
baseline_=1.0
pars = mod.make_params( beta=beta_, alpha=alpha_, relaxation_rate = relaxation_rate_, baseline=baseline_)
else:
pars = mod.make_params( beta=.05, alpha=1.0, relaxation_rate =0.005, baseline=1.0)
for v in _vars:
pars['%s'%v].vary = False
#print ( pars['%s'%v], pars['%s'%v].vary )
result = {}
if not one_plot:
for qz_ind in range(num_qz):
#fig = plt.figure(figsize=(10, 12))
fig = plt.figure(figsize=(12, 10))
#fig = plt.figure()
title_qz = ' Qz= %.5f '%( qz_center[qz_ind]) + r'$\AA^{-1}$'
plt.title('uid= %s:--->'%uid + title_qz,fontsize=20, y =1.1)
#print (qz_ind,title_qz)
if num_qz!=1:plt.axis('off')
sx = int(round(np.sqrt(num_qr)) )
if num_qr%sx == 0:
sy = int(num_qr/sx)
else:
sy=int(num_qr/sx+1)
for sn in range(num_qr):
ax = fig.add_subplot(sx,sy,sn+1 )
ax.set_ylabel("g2")
ax.set_xlabel(r"$\tau $ $(s)$", fontsize=16)
title_qr = " Qr= " + '%.5f '%( qr_center[sn]) + r'$\AA^{-1}$'
if num_qz==1:
title = 'uid= %s:--->'%uid + title_qz + '__' + title_qr
else:
title = title_qr
ax.set_title( title )
i = sn + qz_ind * num_qr
y=g2[1:, i]
result1 = mod.fit(y, pars, x = taus[1:] )
#print ( result1.best_values)
rate[i] = result1.best_values['relaxation_rate']
#rate[i] = 1e-16
beta[i] = result1.best_values['beta']
#baseline[i] = 1.0
baseline[i] = result1.best_values['baseline']
if function=='simple_exponential' or function=='simple':
alpha[i] =1.0
elif function=='stretched_exponential' or function=='stretched':
alpha[i] = result1.best_values['alpha']
ax.semilogx(taus[1:], y, 'bo')
ax.semilogx(taus[1:], result1.best_fit, '-r')
if 'ylim' in kwargs:
ax.set_ylim( kwargs['ylim'])
elif 'vlim' in kwargs:
vmin, vmax =kwargs['vlim']
ax.set_ylim([min(y)*vmin, max(y[1:])*vmax ])
else:
pass
if 'xlim' in kwargs:
ax.set_xlim( kwargs['xlim'])
txts = r'$\tau$' + r'$ = %.3f$'%(1/rate[i]) + r'$ s$'
ax.text(x =0.02, y=.55 +.3, s=txts, fontsize=14, transform=ax.transAxes)
txts = r'$\alpha$' + r'$ = %.3f$'%(alpha[i])
#txts = r'$\beta$' + r'$ = %.3f$'%(beta[i]) + r'$ s^{-1}$'
ax.text(x =0.02, y=.45+.3, s=txts, fontsize=14, transform=ax.transAxes)
txts = r'$baseline$' + r'$ = %.3f$'%( baseline[i])
ax.text(x =0.02, y=.35 + .3, s=txts, fontsize=14, transform=ax.transAxes)
result = dict( beta=beta, rate=rate, alpha=alpha, baseline=baseline )
fp = path + 'uid=%s--g2-qz=%s--fit'%(uid,qz_center[qz_ind]) + '.png'
fig.savefig( fp, dpi=fig.dpi)
fig.tight_layout()
#plt.show()
else:
#fig = plt.figure(figsize=(10, 12))
#fig = plt.figure(figsize=(12, 10))
if num_qz==1:
if num_qr==1:
fig = plt.figure(figsize=(8,8))
else:
fig = plt.figure(figsize=(10, 12))
else:
fig = plt.figure(figsize=(10, 12))
plt.title('uid= %s'%uid,fontsize=20, y =1.05)
if num_qz!=1:
if num_qr!=1:
plt.axis('off')
sx = int(round(np.sqrt(num_qr)) )
if num_qr%sx == 0:
sy = int(num_qr/sx)
else:
sy=int(num_qr/sx+1)
for sn in range(num_qr):
ax = fig.add_subplot(sx,sy,sn+1 )
ax.set_ylabel("g2")
ax.set_xlabel(r"$\tau $ $(s)$", fontsize=16)
#title_qr = " Qr= " + '%.5f '%( qr_center[sn]) + r'$\AA^{-1}$'
title_qr = " Qr= " + '%.5s '%( qr_center[sn]) + r'$\AA^{-1}$'
title = title_qr
ax.set_title( title )
for qz_ind in range(num_qz):
i = sn + qz_ind * num_qr
y=g2[1:, i]
result1 = mod.fit(y, pars, x = taus[1:] )
#print ( result1.best_values)
rate[i] = result1.best_values['relaxation_rate']
#rate[i] = 1e-16
beta[i] = result1.best_values['beta']
#baseline[i] = 1.0
baseline[i] = result1.best_values['baseline']
if function=='simple_exponential' or function=='simple':
alpha[i] =1.0
elif function=='stretched_exponential' or function=='stretched':
alpha[i] = result1.best_values['alpha']
if sn ==0:
title_qz = ' Qz= %.5f '%( qz_center[qz_ind]) + r'$\AA^{-1}$'
ax.semilogx(taus[1:], y, 'o', markersize=6, label = title_qz )
else:
ax.semilogx(taus[1:], y, 'o', markersize=6, label='' )
ax.semilogx(taus[1:], result1.best_fit, '-r')
#print( result1.best_values['relaxation_rate'], result1.best_values['beta'] )
txts = r'$q_z$' + r'$_%s$'%qz_ind + r'$\tau$' + r'$ = %.3f$'%(1/rate[i]) + r'$ s$'
ax.text(x =0.02, y=.55 +.3 - 0.1*qz_ind, s=txts, fontsize=14, transform=ax.transAxes)
if 'ylim' in kwargs:
ax.set_ylim( kwargs['ylim'])
elif 'vlim' in kwargs:
vmin, vmax =kwargs['vlim']
ax.set_ylim([min(y)*vmin, max(y[1:])*vmax ])
else:
pass
if 'xlim' in kwargs:
ax.set_xlim( kwargs['xlim'])
if sn ==0:
ax.legend(loc='best', fontsize = 6)
result = dict( beta=beta, rate=rate, alpha=alpha, baseline=baseline )
fp = path + 'uid=%s--g2--fit-'%(uid) + '.png'
fig.savefig( fp, dpi=fig.dpi)
fig.tight_layout()
#plt.show()
#dt =datetime.now()
#CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute)
#fp = path + 'g2--uid=%s-qz=%s-fit'%(uid,qz_center[qz_ind]) + CurTime + '.png'
#fig.savefig( fp, dpi=fig.dpi)
#result = dict( beta=beta, rate=rate, alpha=alpha, baseline=baseline )
#fp = path + 'uid=%s--g2--fit-'%(uid) + '.png'
#fig.savefig( fp, dpi=fig.dpi)
#fig.tight_layout()
#plt.show()
return result
#GiSAXS End
###############################
def get_each_box_mean_intensity( data_series, box_mask, sampling, timeperframe, plot_ = True , *argv,**kwargs):
'''Dec 16, 2015, Y.G.@CHX
get each box (ROI) mean intensity as a function of time
'''
mean_int_sets, index_list = roi.mean_intensity(np.array( data_series[::sampling]), box_mask)
try:
N = len(data_series)
except:
N = data_series.length
times = np.arange( N )*timeperframe # get the time for each frame
num_rings = len( np.unique( box_mask)[1:] )
if plot_:
fig, ax = plt.subplots(figsize=(8, 8))
uid = 'uid'
if 'uid' in kwargs.keys():
uid = kwargs['uid']
ax.set_title("uid= %s--Mean intensity of each box"%uid)
for i in range(num_rings):
ax.plot( times[::sampling], mean_int_sets[:,i], label="Box "+str(i+1),marker = 'o', ls='-')
ax.set_xlabel("Time")
ax.set_ylabel("Mean Intensity")
ax.legend()
#fp = path + 'uid=%s--Mean intensity of each box-'%(uid) + '.png'
if 'path' not in kwargs.keys():
path=''
else:
path = kwargs['path']
fp = path + 'uid=%s--Mean-intensity-of-each-ROI-'%(uid) + '.png'
fig.savefig( fp, dpi=fig.dpi)
#plt.show()
return times, mean_int_sets
def power_func(x, D0, power=2):
return D0 * x**power
def fit_qr_qz_rate( qr, qz, rate, plot_=True, *argv,**kwargs):
'''
Option:
if power_variable = False, power =2 to fit q^2~rate,
Otherwise, power is variable.
'''
power_variable=False
x=qr
if 'fit_range' in kwargs.keys():
fit_range = kwargs['fit_range']
else:
fit_range= None
if 'uid' in kwargs.keys():
uid = kwargs['uid']
else:
uid = 'uid'
if 'path' in kwargs.keys():
path = kwargs['path']
else:
path = ''
if fit_range is not None:
y=rate[fit_range[0]:fit_range[1]]
x=q[fit_range[0]:fit_range[1]]
mod = Model( power_func )
#mod.set_param_hint( 'power', min=0.5, max= 10 )
#mod.set_param_hint( 'D0', min=0 )
pars = mod.make_params( power = 2, D0=1*10^(-5) )
if power_variable:
pars['power'].vary = True
else:
pars['power'].vary = False
Nqr = len( qr)
Nqz = len( qz)
D0= np.zeros( Nqz )
power= 2 #np.zeros( Nqz )
res= []
for i, qz_ in enumerate(qz):
try:
y = np.array( rate['rate'][ i*Nqr : (i+1)*Nqr ] )
except:
y = np.array( rate[ i*Nqr : (i+1)*Nqr ] )
#print( len(x), len(y) )
_result = mod.fit(y, pars, x = x )
res.append( _result )
D0[i] = _result.best_values['D0']
#power[i] = _result.best_values['power']
print ('The fitted diffusion coefficient D0 is: %.3e A^2S-1'%D0[i])
if plot_:
fig,ax = plt.subplots()
plt.title('Q%s-Rate--uid= %s_Fit'%(power,uid),fontsize=20, y =1.06)
for i, qz_ in enumerate(qz):
ax.plot(x**power, y, marker = 'o',
label=r'$q_z=%.5f$'%qz_)
ax.plot(x**power, res[i].best_fit, '-r')
txts = r'$D0: %.3e$'%D0[i] + r' $A^2$' + r'$s^{-1}$'
dy=0.1
ax.text(x =0.15, y=.65 -dy *i, s=txts, fontsize=14, transform=ax.transAxes)
legend = ax.legend(loc='best')
ax.set_ylabel('Relaxation rate 'r'$\gamma$'"($s^{-1}$)")
ax.set_xlabel("$q^%s$"r'($\AA^{-2}$)'%power)
dt =datetime.now()
CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute)
#fp = path + 'Q%s-Rate--uid=%s'%(power,uid) + CurTime + '--Fit.png'
fp = path + 'uid=%s--Q-Rate'%(uid) + '--fit-.png'
fig.savefig( fp, dpi=fig.dpi)
fig.tight_layout()
#plt.show()
return D0
#plot g4 results
def plot_gisaxs_g4( g4, taus, res_pargs=None, one_plot=False, *argv,**kwargs):
'''Dec 16, 2015, Y.G.@CHX
plot g4 results,
g4: four-time correlation function
taus: the time delays
res_pargs, a dict, can contains
uid/path/qr_center/qz_center/
kwargs: can contains
vlim: [vmin,vmax]: for the plot limit of y, the y-limit will be [vmin * min(y), vmx*max(y)]
ylim/xlim: the limit of y and x
e.g.
plot_gisaxs_g4( g4, taus= np.arange( g2b.shape[0]) *timeperframe, q_ring_center = q_ring_center, vlim=[.99, 1.01] )
'''
if res_pargs is not None:
uid = res_pargs['uid']
path = res_pargs['path']
qz_center = res_pargs[ 'qz_center']
num_qz = len( qz_center)
qr_center = res_pargs[ 'qr_center']
num_qr = len( qr_center)
else:
if 'uid' in kwargs.keys():
uid = kwargs['uid']
else:
uid = 'uid'
if 'path' in kwargs.keys():
path = kwargs['path']
else:
path = ''
if 'qz_center' in kwargs.keys():
qz_center = kwargs[ 'qz_center']
num_qz = len( qz_center)
else:
print( 'Please give qz_center')
if 'qr_center' in kwargs.keys():
qr_center = kwargs[ 'qr_center']
num_qr = len( qr_center)
else:
print( 'Please give qr_center')
if not one_plot:
for qz_ind in range(num_qz):
fig = plt.figure(figsize=(12, 10))
#fig = plt.figure()
title_qz = ' Qz= %.5f '%( qz_center[qz_ind]) + r'$\AA^{-1}$'
plt.title('uid= %s:--->'%uid + title_qz,fontsize=20, y =1.1)
#print (qz_ind,title_qz)
if num_qz!=1:
if num_qr!=1:
plt.axis('off')
sx = int(round(np.sqrt(num_qr)) )
if num_qr%sx == 0:
sy = int(num_qr/sx)
else:
sy=int(num_qr/sx+1)
for sn in range(num_qr):
ax = fig.add_subplot(sx,sy,sn+1 )
ax.set_ylabel("g4")
ax.set_xlabel(r"$\tau $ $(s)$", fontsize=16)
title_qr = " Qr= " + '%.5f '%( qr_center[sn]) + r'$\AA^{-1}$'
if num_qz==1:
title = 'uid= %s:--->'%uid + title_qz + '__' + title_qr
else:
title = title_qr
ax.set_title( title )
y=g4[:, sn + qz_ind * num_qr]
ax.semilogx(taus, y, '-o', markersize=6)
if 'ylim' in kwargs:
ax.set_ylim( kwargs['ylim'])
elif 'vlim' in kwargs:
vmin, vmax =kwargs['vlim']
ax.set_ylim([min(y)*vmin, max(y[1:])*vmax ])
else:
pass
if 'xlim' in kwargs:
ax.set_xlim( kwargs['xlim'])
fp = path + 'uid=%s--g4-qz=%s'%(uid,qz_center[qz_ind]) + '.png'
fig.savefig( fp, dpi=fig.dpi)
fig.tight_layout()
#plt.show()
else:
fig = plt.figure(figsize=(12, 10))
plt.title('uid= %s'%uid,fontsize=20, y =1.05)
if num_qz!=1:
if num_qr!=1:
plt.axis('off')
sx = int(round(np.sqrt(num_qr)) )
if num_qr%sx == 0:
sy = int(num_qr/sx)
else:
sy=int(num_qr/sx+1)
for sn in range(num_qr):
ax = fig.add_subplot(sx,sy,sn+1 )
ax.set_ylabel("g4")
ax.set_xlabel(r"$\tau $ $(s)$", fontsize=16)
title_qr = " Qr= " + '%.5f '%( qr_center[sn]) + r'$\AA^{-1}$'
title = title_qr
ax.set_title( title )
for qz_ind in range(num_qz):
y=g4[:, sn + qz_ind * num_qr]
if sn ==0:
title_qz = ' Qz= %.5f '%( qz_center[qz_ind]) + r'$\AA^{-1}$'
ax.semilogx(taus, y, '-o', markersize=6, label = title_qz )
else:
ax.semilogx(taus, y, '-o', markersize=6, label='' )
if 'ylim' in kwargs:
ax.set_ylim( kwargs['ylim'])
elif 'vlim' in kwargs:
vmin, vmax =kwargs['vlim']
ax.set_ylim([min(y)*vmin, max(y[1:])*vmax ])
else:
pass
if 'xlim' in kwargs:
ax.set_xlim( kwargs['xlim'])
if sn ==0:
ax.legend(loc='best', fontsize = 6)
fp = path + 'uid=%s--g4-'%(uid) + '.png'
fig.savefig( fp, dpi=fig.dpi)
fig.tight_layout()
#plt.show()
def multi_uids_gisaxs_xpcs_analysis( uids, md, run_num=1, sub_num=None,good_start=10, good_end= None,
force_compress=False,
fit = True, compress=True, para_run=False ):
''''Sep 16, 2016, YG@CHX-NSLS2
Do SAXS-XPCS analysis for multi uid data
uids: a list of uids to be analyzed
md: metadata, should at least include
mask: array, mask data
data_dir: the path to save data, the result will be saved in data_dir/uid/...
dpix:
Ldet:
lambda:
timeperframe:
center
run_num: the run number
sub_num: the number in each sub-run
fit: if fit, do fit for g2 and show/save all fit plots
compress: apply a compress algorithm
Save g2/metadata/g2-fit plot/g2 q-rate plot/ of each uid in data_dir/uid/...
return:
g2s: a dictionary, {run_num: sub_num: g2_of_each_uid}
taus,
use_uids: return the valid uids
'''
g2s = {} # g2s[run_number][sub_seq] = g2 of each uid
lag_steps = [0]
useful_uids = {}
if sub_num is None:
sub_num = len( uids )//run_num
mask = md['mask']
maskr = mask[::-1,:]
data_dir = md['data_dir']
box_maskr = md['ring_mask']
qz_center= md['qz_center']
qr_center= md['qr_center']
for run_seq in range(run_num):
g2s[ run_seq + 1] = {}
useful_uids[ run_seq + 1] = {}
i=0
for sub_seq in range( 0, sub_num ):
uid = uids[ sub_seq + run_seq * sub_num ]
print( 'The %i--th uid to be analyzed is : %s'%(i, uid) )
try:
detector = get_detector( db[uid ] )
imgs = load_data( uid, detector )
except:
print( 'The %i--th uid: %s can not load data'%(i, uid) )
imgs=0
data_dir_ = os.path.join( data_dir, '%s/'%uid)
os.makedirs(data_dir_, exist_ok=True)
i +=1
if imgs !=0:
Nimg = len(imgs)
md_ = imgs.md
useful_uids[ run_seq + 1][i] = uid
imgsr = reverse_updown( imgs )
imgsra = apply_mask( imgsr, maskr )
if compress:
filename = '/XF11ID/analysis/Compressed_Data' +'/uid_%s.cmp'%uid
maskr, avg_imgr, imgsum, bad_frame_list = compress_eigerdata(imgsr, maskr, md_, filename,
force_compress= force_compress, bad_pixel_threshold= 5e9,nobytes=4,
para_compress=True, num_sub= 100)
try:
md['Measurement']= db[uid]['start']['Measurement']
#md['sample']=db[uid]['start']['sample']
#print( md['Measurement'] )
except:
md['Measurement']= 'Measurement'
md['sample']='sample'
dpix = md['x_pixel_size'] * 1000. #in mm, eiger 4m is 0.075 mm
lambda_ =md['incident_wavelength'] # wavelegth of the X-rays in Angstroms
Ldet = md['detector_distance']
# detector to sample distance (mm), currently, *1000 for saxs, *1 for gisaxs
exposuretime= md['count_time']
acquisition_period = md['frame_time']
timeperframe = acquisition_period#for g2
#timeperframe = exposuretime#for visiblitly
#timeperframe = 2 ## manual overwrite!!!! we apparently writing the wrong metadata....
setup_pargs=dict(uid=uid, dpix= dpix, Ldet=Ldet, lambda_= lambda_,
timeperframe=timeperframe, path= data_dir)
md['avg_img'] = avg_imgr
min_inten = 0
#good_start = np.where( np.array(imgsum) > min_inten )[0][0]
#good_start = 0
#good_start = max(good_start, np.where( np.array(imgsum) > min_inten )[0][0] )
good_start = good_start
if good_end is None:
good_end_ = len(imgs)
else:
good_end_= good_end
FD = Multifile(filename, good_start, good_end_ )
good_start = max(good_start, np.where( np.array(imgsum) > min_inten )[0][0] )
print ('With compression, the good_start frame number is: %s '%good_start)
print ('The good_end frame number is: %s '%good_end_)
if not para_run:
g2, lag_steps_ =cal_g2c( FD, box_maskr, bad_frame_list,good_start, num_buf = 8,
imgsum= None, norm= None )
else:
g2, lag_steps_ =cal_g2p( FD, box_maskr, bad_frame_list,good_start, num_buf = 8,
imgsum= None, norm= None )
if len( lag_steps) < len(lag_steps_):
lag_steps = lag_steps_
else:
sampling = 1000 #sampling should be one
#good_start = check_shutter_open( imgsra, min_inten=5, time_edge = [0,10], plot_ = False )
good_start = 0
good_series = apply_mask( imgsar[good_start: ], maskr )
imgsum, bad_frame_list = get_each_frame_intensity(good_series ,sampling = sampling,
bad_pixel_threshold=1.2e8, plot_ = False, uid=uid)
bad_image_process = False
if len(bad_frame_list):
bad_image_process = True
print( bad_image_process )
g2, lag_steps_ =cal_g2( good_series, box_maskr, bad_image_process,
bad_frame_list, good_start, num_buf = 8 )
if len( lag_steps) < len(lag_steps_):
lag_steps = lag_step_
taus_ = lag_steps_ * timeperframe
taus = lag_steps * timeperframe
res_pargs = dict(taus=taus_, qz_center=qz_center, qr_center=qr_center, path=data_dir_, uid=uid )
save_gisaxs_g2( g2, res_pargs )
#plot_gisaxs_g2( g2, taus, vlim=[0.95, 1.1], res_pargs=res_pargs, one_plot=True)
if fit:
fit_result = fit_gisaxs_g2( g2, res_pargs, function = 'stretched', vlim=[0.95, 1.1],
fit_variables={'baseline':True, 'beta':True, 'alpha':False,'relaxation_rate':True},
guess_values={'baseline':1.229,'beta':0.05,'alpha':1.0,'relaxation_rate':0.01},
one_plot= True)
fit_qr_qz_rate( qr_center, qz_center, fit_result, power_variable= False,
uid=uid, path= data_dir_ )
psave_obj( md, data_dir_ + 'uid=%s-md'%uid ) #save the setup parameters
g2s[run_seq + 1][i] = g2
print ('*'*40)
print()
return g2s, taus, useful_uids
<file_sep>"""
Dec 10, 2015 Developed by Y.G.@CHX
<EMAIL>
This module is for the necessary packages for the XPCS analysis
"""
from IPython.core.magics.display import Javascript
from skbeam.core.utils import multi_tau_lags
from skimage.draw import line_aa, line, polygon, ellipse, circle
from modest_image import ModestImage, imshow
from databroker import DataBroker as db, get_images, get_table, get_events, get_fields
from filestore.api import register_handler, deregister_handler
#from filestore.retrieve import _h_registry, _HANDLER_CACHE, HandlerBase
from eiger_io.pims_reader import EigerImages
from chxtools import handlers
from filestore.path_only_handlers import RawHandler
## Import all the required packages for Data Analysis
#* scikit-beam - data analysis tools for X-ray science
# - https://github.com/scikit-beam/scikit-beam
#* xray-vision - plotting helper functions for X-ray science
# - https://github.com/Nikea/xray-vision
import xray_vision
import matplotlib.cm as mcm
import copy
import xray_vision.mpl_plotting as mpl_plot
from xray_vision.mpl_plotting import speckle
from xray_vision.mask.manual_mask import ManualMask
import skbeam.core.roi as roi
import skbeam.core.correlation as corr
import skbeam.core.utils as utils
import numpy as np
from datetime import datetime
import h5py
import pims
from pandas import DataFrame
import os, sys, time
import getpass
import matplotlib as mpl
import matplotlib.pyplot as plt
from matplotlib.colors import LogNorm
import pickle
from lmfit import Model
from lmfit import minimize, Parameters, Parameter, report_fit
from matplotlib.figure import Figure
from matplotlib import gridspec
from mpl_toolkits.axes_grid1 import make_axes_locatable
from tqdm import tqdm
import collections
import itertools
import random
from PIL import Image
import warnings
from eiger_io.fs_handler2 import EigerHandler2
from eiger_io.fs_handler import LazyEigerHandler
fs = db.event_sources[0].fs
fs.deregister_handler('AD_EIGER')
fs.register_handler('AD_EIGER', LazyEigerHandler)
fs.deregister_handler('AD_EIGER2')
fs.register_handler('AD_EIGER2', EigerHandler2)
mcolors = itertools.cycle(['b', 'g', 'r', 'c', 'm', 'y', 'k','darkgoldenrod','oldlace', 'brown','dodgerblue' ])
markers = itertools.cycle(list(plt.Line2D.filled_markers))
lstyles = itertools.cycle(['-', '--', '-.','.',':'])
colors = itertools.cycle(["blue", "darkolivegreen", "brown", "m", "orange", "hotpink", "darkcyan", "red",
"gray", "green", "black", "cyan", "purple" , "navy"])
colors_copy = itertools.cycle(["blue", "darkolivegreen", "brown", "m", "orange", "hotpink", "darkcyan", "red",
"gray", "green", "black", "cyan", "purple" , "navy"])
markers = itertools.cycle( ["o", "2", "p", "1", "s", "*", "4", "+", "8", "v","3", "D", "H", "^",])
markers_copy = itertools.cycle( ["o", "2", "p", "1", "s", "*", "4", "+", "8", "v","3", "D", "H", "^",])
RUN_GUI = False #if True for gui setup; else for notebook; the main code difference is the Figure() or plt.figure(figsize=(8, 6))
markers = ['o', 'D', 'v', '^', '<', '>', 'p', 's', 'H',
'h', '*', 'd',
'$I$','$L$', '$O$','$V$','$E$',
'$c$', '$h$','$x$','$b$','$e$','$a$','$m$','$l$','$i$','$n$', '$e$',
'8', '1', '3', '2', '4', '+', 'x', '_', '|', ',', '1',]
markers = np.array( markers *100 )
markers = ['o', 'D', 'v', '^', '<', '>', 'p', 's', 'H',
'h', '*', 'd',
'8', '1', '3', '2', '4', '+', 'x', '_', '|', ',', '1',]
markers = np.array( markers *100 )
colors = np.array( ['darkorange', 'mediumturquoise', 'seashell', 'mediumaquamarine', 'darkblue',
'yellowgreen', 'mintcream', 'royalblue', 'springgreen', 'slategray',
'yellow', 'slateblue', 'darkslateblue', 'papayawhip', 'bisque', 'firebrick',
'burlywood', 'dodgerblue', 'dimgrey', 'chartreuse', 'deepskyblue', 'honeydew',
'orchid', 'teal', 'steelblue', 'limegreen', 'antiquewhite',
'linen', 'saddlebrown', 'grey', 'khaki', 'hotpink', 'darkslategray',
'forestgreen', 'lightsalmon', 'turquoise', 'navajowhite',
'darkgrey', 'darkkhaki', 'slategrey', 'indigo',
'darkolivegreen', 'aquamarine', 'moccasin', 'beige', 'ivory', 'olivedrab',
'whitesmoke', 'paleturquoise', 'blueviolet', 'tomato', 'aqua', 'palegoldenrod',
'cornsilk', 'navy', 'mediumvioletred', 'palevioletred', 'aliceblue', 'azure',
'orangered', 'lightgrey', 'lightpink', 'orange', 'wheat',
'darkorchid', 'mediumslateblue', 'lightslategray', 'green', 'lawngreen',
'mediumseagreen', 'darksalmon', 'pink', 'oldlace', 'sienna', 'dimgray', 'fuchsia',
'lemonchiffon', 'maroon', 'salmon', 'gainsboro', 'indianred', 'crimson',
'mistyrose', 'lightblue', 'darkgreen', 'lightgreen', 'deeppink',
'palegreen', 'thistle', 'lightcoral', 'lightgray', 'lightskyblue', 'mediumspringgreen',
'mediumblue', 'peru', 'lightgoldenrodyellow', 'darkseagreen', 'mediumorchid',
'coral', 'lightyellow', 'chocolate', 'lavenderblush', 'darkred', 'lightseagreen',
'darkviolet', 'lightcyan', 'cadetblue', 'blanchedalmond', 'midnightblue',
'lightsteelblue', 'darkcyan', 'floralwhite', 'darkgray',
'lavender', 'sandybrown', 'cornflowerblue', 'gray',
'mediumpurple', 'lightslategrey', 'seagreen',
'silver', 'darkmagenta', 'darkslategrey', 'darkgoldenrod', 'rosybrown',
'goldenrod', 'darkturquoise', 'plum',
'purple', 'olive', 'gold','powderblue', 'peachpuff','violet', 'lime', 'greenyellow', 'tan', 'skyblue',
'magenta', 'black', 'brown', 'green', 'cyan', 'red','blue'] *100 )
colors = colors[::-1]
colors_ = itertools.cycle( colors )
#colors_ = itertools.cycle(sorted_colors_ )
markers_ = itertools.cycle( markers )
import matplotlib as mpl
# Custom colormaps
################################################################################
# ROYGBVR but with Cyan-Blue instead of Blue
color_list_cyclic_spectrum = [
[ 1.0, 0.0, 0.0 ],
[ 1.0, 165.0/255.0, 0.0 ],
[ 1.0, 1.0, 0.0 ],
[ 0.0, 1.0, 0.0 ],
[ 0.0, 0.2, 1.0 ],
[ 148.0/255.0, 0.0, 211.0/255.0 ],
[ 1.0, 0.0, 0.0 ]
]
cmap_cyclic_spectrum = mpl.colors.LinearSegmentedColormap.from_list('cmap_cyclic_spectrum', color_list_cyclic_spectrum)
# classic jet, slightly tweaked
# (bears some similarity to mpl.cm.nipy_spectral)
color_list_jet_extended = [
[0, 0, 0],
[0.18, 0, 0.18],
[0, 0, 0.5],
[0, 0, 1],
[ 0. , 0.38888889, 1. ],
[ 0. , 0.83333333, 1. ],
[ 0.3046595 , 1. , 0.66308244],
[ 0.66308244, 1. , 0.3046595 ],
[ 1. , 0.90123457, 0. ],
[ 1. , 0.48971193, 0. ],
[ 1. , 0.0781893 , 0. ],
[1, 0, 0],
[ 0.5 , 0. , 0. ],
]
cmap_jet_extended = mpl.colors.LinearSegmentedColormap.from_list('cmap_jet_extended', color_list_jet_extended)
# Tweaked version of "view.gtk" default color scale
color_list_vge = [
[ 0.0/255.0, 0.0/255.0, 0.0/255.0],
[ 0.0/255.0, 0.0/255.0, 254.0/255.0],
[ 188.0/255.0, 2.0/255.0, 107.0/255.0],
[ 254.0/255.0, 55.0/255.0, 0.0/255.0],
[ 254.0/255.0, 254.0/255.0, 0.0/255.0],
[ 254.0/255.0, 254.0/255.0, 254.0/255.0]
]
cmap_vge = mpl.colors.LinearSegmentedColormap.from_list('cmap_vge', color_list_vge)
# High-dynamic-range (HDR) version of VGE
color_list_vge_hdr = [
[ 255.0/255.0, 255.0/255.0, 255.0/255.0],
[ 0.0/255.0, 0.0/255.0, 0.0/255.0],
[ 0.0/255.0, 0.0/255.0, 255.0/255.0],
[ 188.0/255.0, 0.0/255.0, 107.0/255.0],
[ 254.0/255.0, 55.0/255.0, 0.0/255.0],
[ 254.0/255.0, 254.0/255.0, 0.0/255.0],
[ 254.0/255.0, 254.0/255.0, 254.0/255.0]
]
cmap_vge_hdr = mpl.colors.LinearSegmentedColormap.from_list('cmap_vge_hdr', color_list_vge_hdr)
# Simliar to Dectris ALBULA default color-scale
color_list_hdr_albula = [
[ 255.0/255.0, 255.0/255.0, 255.0/255.0],
[ 0.0/255.0, 0.0/255.0, 0.0/255.0],
[ 255.0/255.0, 0.0/255.0, 0.0/255.0],
[ 255.0/255.0, 255.0/255.0, 0.0/255.0],
#[ 255.0/255.0, 255.0/255.0, 255.0/255.0],
]
cmap_hdr_albula = mpl.colors.LinearSegmentedColormap.from_list('cmap_hdr_albula', color_list_hdr_albula)
cmap_albula = cmap_hdr_albula
# Ugly color-scale, but good for highlighting many features in HDR data
color_list_cur_hdr_goldish = [
[ 255.0/255.0, 255.0/255.0, 255.0/255.0], # white
[ 0.0/255.0, 0.0/255.0, 0.0/255.0], # black
[ 100.0/255.0, 127.0/255.0, 255.0/255.0], # light blue
[ 0.0/255.0, 0.0/255.0, 127.0/255.0], # dark blue
#[ 0.0/255.0, 127.0/255.0, 0.0/255.0], # dark green
[ 127.0/255.0, 60.0/255.0, 0.0/255.0], # orange
[ 255.0/255.0, 255.0/255.0, 0.0/255.0], # yellow
[ 200.0/255.0, 0.0/255.0, 0.0/255.0], # red
[ 255.0/255.0, 255.0/255.0, 255.0/255.0], # white
]
cmap_hdr_goldish = mpl.colors.LinearSegmentedColormap.from_list('cmap_hdr_goldish', color_list_cur_hdr_goldish)
<file_sep>
######################################################################################
########Dec 16, 2015, <NAME>, <EMAIL>, CHX, NSLS-II, BNL################
########Time correlation function, include one-time, two-time, four-time##############
########Muli-tau method, array-operation method#######################################
######################################################################################
import numpy as np
import sys
import time
import skbeam.core.roi as roi
from matplotlib import gridspec
import itertools
import matplotlib.pyplot as plt
from matplotlib.colors import LogNorm
mcolors = itertools.cycle(['b', 'g', 'r', 'c', 'm', 'y', 'k','darkgoldenrod','oldlace', 'brown','dodgerblue' ])
markers = itertools.cycle(list(plt.Line2D.filled_markers))
lstyles = itertools.cycle(['-', '--', '-.','.',':'])
####################################
##Multi-Tau--One time correlation
#####################################
def autocor_one_time( num_buf, rois, imgs, num_lev=None, start_img=None, end_img=None,
bad_images = None, threshold=None):
'''
Dec 16, 2015, Y.G.@CHX
a multi-tau code for one-time correlation function,
add new funciton to deal with bad images, which masked intensities are still
large than threshold
Parameters:
num_buf: int, number of buffer
rois: 2-D array, the interested roi, has the same shape as image, can be rings for saxs, boxes for gisaxs
imgs: pims sequences, image stack
Options:
num_lev: int, number of level, if None: = int(np.log( noframes/(num_buf-1))/np.log(2) +1) +1
start_img: int, None, =0
end_img: int, None, = len(imgs)
bad_images: list, None,bad_images list
threshold: float, None, intensity max threshold, above which is considered as bad images
Return:
g2, 2D-array, shape as (tau, q)
tau, 1D-array
One example:
g2, tau = autocor_one_time( num_buf, ring_mask, imgsr, num_lev=None,
bad_images=None, threshold= 65500 )
'''
start_time = time.time()
#print (dly)
if start_img is None:
start_img=0
if end_img is None:
try:
end_img= len(imgs)
except:
end_img= imgs.length
#print (start_img, end_img)
noframes = end_img - start_img #+ 1
#print (noframes)
ring_mask = rois
if num_lev is None:num_lev = int(np.log( noframes/(num_buf-1))/np.log(2) +1) +1
nolev = num_lev
nobuf =num_buf
print ( 'The lev number is %s'%num_lev)
dly, dict_dly = delays( num_lev, num_buf, time=1 )
#print (dly.max())
lev_leng = np.array( [ len( dict_dly[i] ) for i in list(dict_dly.keys()) ])
qind, pixelist = roi.extract_label_indices( ring_mask )
noqs = np.max(qind)
nopr = np.bincount(qind, minlength=(noqs+1))[1:]
nopixels = nopr.sum()
start_time = time.time()
buf = np.ma.zeros([num_lev,num_buf,nopixels])
buf.mask = True
cts=np.zeros(num_lev)
cur=np.ones(num_lev) * num_buf
countl = np.array( np.zeros( num_lev ),dtype='int')
g2 = np.zeros( [ noframes, noframes, noqs] )
G=np.zeros( [(nolev+1)*int(nobuf/2),noqs])
IAP=np.zeros( [(nolev+1)*int(nobuf/2),noqs])
IAF=np.zeros( [(nolev+1)*int(nobuf/2),noqs])
num= np.array( np.zeros( num_lev ),dtype='int')
Num= { key: [0]* len( dict_dly[key] ) for key in list(dict_dly.keys()) }
print ('Doing g2 caculation of %s frames---'%(noframes ))
ttx=0
#if bad_images is None:bad_images=[]
for n in range( start_img, end_img ): ##do the work here
img = imgs[n]
#for n, img in enumerate( imgs):
img_ = (np.ravel(img))[pixelist]
#print ( img_.max() )
if threshold is not None:
if img_.max() >= threshold:
print ('bad image: %s here!'%n)
img_ = np.ma.zeros( len(img_) )
img_.mask = True
if bad_images is not None:
if n in bad_images:
print ('bad image: %s here!'%n)
img_ = np.ma.zeros( len(img_) )
img_.mask = True
cur[0]=1+cur[0]%num_buf # increment buffer
buf[0, cur[0]-1 ]= img_
img=[] #//save space
img_=[]
countl[0] = 1+ countl[0]
process_one_time(lev=0, bufno=cur[0]-1,
G=G,IAP=IAP,IAF=IAF, buf=buf, num=num, num_buf=num_buf, noqs=noqs, qind=qind, nopr=nopr, dly=dly, Num=Num, lev_leng=lev_leng )
#time_ind[0].append( current_img_time )
processing=1
lev=1
while processing:
if cts[lev]:
prev= 1+ (cur[lev-1]-1-1+num_buf)%num_buf
cur[lev]= 1+ cur[lev]%num_buf
countl[lev] = 1+ countl[lev]
bufa = buf[lev-1,prev-1]
bufb= buf[lev-1,cur[lev-1]-1]
if (bufa.data==0).all():
buf[lev,cur[lev]-1] = bufa
elif (bufb.data==0).all():
buf[lev,cur[lev]-1] = bufb
else:
buf[lev,cur[lev]-1] = ( bufa + bufb ) /2.
cts[lev]=0
t1_idx= (countl[lev]-1) *2
process_one_time(lev=lev, bufno=cur[lev]-1,
G=G,IAP=IAP,IAF=IAF, buf=buf, num=num, num_buf=num_buf, noqs=noqs, qind=qind, nopr=nopr, dly=dly,Num =Num, lev_leng=lev_leng )
lev+=1
#//Since this level finished, test if there is a next level for processing
if lev<num_lev:processing = 1
else:processing = 0
else:
cts[lev]=1 #// set flag to process next time
processing=0 #// can stop until more images are accumulated
if n %( int(noframes/10) ) ==0:
sys.stdout.write("#")
sys.stdout.flush()
#print G.shape
if (len(np.where(IAP==0)[0])!=0) and ( 0 not in nopr):
gmax = np.where(IAP==0)[0][0]
else:
gmax=IAP.shape[0]
#g2=G/(IAP*IAF)
#print G
g2=(G[:gmax]/(IAP[:gmax]*IAF[:gmax]))
elapsed_time = time.time() - start_time
#print (Num)
print ('Total time: %.2f min' %(elapsed_time/60.))
return g2,dly[:gmax] #, elapsed_time/60.
def process_one_time(lev, bufno,
G,IAP,IAF, buf, num, num_buf,noqs,qind,nopr, dly,Num,lev_leng ):
'''
a function for autocor_one_time
'''
num[lev]+=1
if lev==0:imin=0
else:imin= int(num_buf/2 )
for i in range(imin, min(num[lev],num_buf) ):
ptr=lev*int(num_buf/2)+i
delayno=int( (bufno-i)%num_buf) #//cyclic buffers
IP = buf[lev,delayno]
IF = buf[lev,bufno]
ind = ptr - lev_leng[:lev].sum()
IP_ = IP.copy()
IF_ = IF.copy()
if (IP.data ==0).all():
IF_=np.zeros( IP.shape )
IP_= np.zeros( IP.shape )
Num[lev+1][ind] += 1
if (IF.data ==0).all():
#print ('here IF =0')
IF_ = np.zeros( IF.shape )
IP_= np.zeros( IF.shape )
if (IP.data ==0).all():
pass
else:
Num[lev+1][ind] += 1
norm_num = num[lev]-i - Num[lev+1][ind]
#print ( lev, ptr, num[lev]-i, Num[lev+1][ind] )
#print (ind, lev_leng)
if not (IP_ ==0).all():
G[ptr]+= ( np.bincount(qind, weights= IF_*IP_ )[1:]/nopr- G[ptr] )/ norm_num
IAP[ptr]+= ( np.bincount(qind, weights= IP_)[1:]/nopr-IAP[ptr] )/ norm_num
IAF[ptr]+= ( np.bincount(qind, weights= IF_)[1:]/nopr-IAF[ptr] )/ norm_num
####################################
##Multi-Tau--Two time correlation
#####################################
def autocor_two_time( num_buf, rois, imgs, num_lev=None, start_img=None, end_img=None ):
'''
Dec 16, 2015, Y.G.@CHX
a multi-tau code for two-time correlation function
Parameters:
num_buf: int, number of buffer
rois: 2-D array, the interested roi, has the same shape as image, can be rings for saxs, boxes for gisaxs
imgs: pims sequences, image stack
Options:
num_lev: int, number of level, if None: = int(np.log( noframes/(num_buf-1))/np.log(2) +1) +1
start_img: int, None, =0
end_img: int, None, = len(imgs)
#to be done to deal with bad frames
#bad_images: list, None,bad_images list
#threshold: float, None, intensity max threshold, above which is considered as bad images
Return:
g12, 3D-array, shape as ( len(imgs), len(imgs), q)
One example:
g12 = autocor_two_time( num_buf, ring_mask, imgsr, num_lev=None )
'''
#print (dly)
if start_img is None:start_img=0
if end_img is None:
try:
end_img= len(imgs)
except:
end_img= imgs.length
#print (start_img, end_img)
noframes = end_img - start_img #+ 1
#print (noframes)
ring_mask = rois
if num_lev is None:num_lev = int(np.log( noframes/(num_buf-1))/np.log(2) +1) +1
print ( 'The lev number is %s'%num_lev)
dly, dict_dly = delays( num_lev, num_buf, time=1 )
#print (dly.max())
qind, pixelist = roi.extract_label_indices( ring_mask )
noqs = np.max(qind)
nopr = np.bincount(qind, minlength=(noqs+1))[1:]
nopixels = nopr.sum()
start_time = time.time()
buf=np.zeros([num_lev,num_buf,nopixels]) #// matrix of buffers, for store img
cts=np.zeros(num_lev)
cur=np.ones(num_lev) * num_buf
countl = np.array( np.zeros( num_lev ),dtype='int')
g12 = np.zeros( [ noframes, noframes, noqs] )
num= np.array( np.zeros( num_lev ),dtype='int')
time_ind ={key: [] for key in range(num_lev)}
ttx=0
for n in range( start_img, end_img ): ##do the work here
cur[0]=1+cur[0]%num_buf # increment buffer
img = imgs[n]
#print ( 'The insert image is %s' %(n) )
buf[0, cur[0]-1 ]= (np.ravel(img))[pixelist]
img=[] #//save space
countl[0] = 1+ countl[0]
current_img_time = n - start_img +1
process_two_time(lev=0, bufno=cur[0]-1,n=current_img_time,
g12=g12, buf=buf, num=num, num_buf=num_buf, noqs=noqs, qind=qind, nopr=nopr, dly=dly)
time_ind[0].append( current_img_time )
processing=1
lev=1
while processing:
if cts[lev]:
prev= 1+ (cur[lev-1]-1-1+num_buf)%num_buf
cur[lev]= 1+ cur[lev]%num_buf
countl[lev] = 1+ countl[lev]
buf[lev,cur[lev]-1] = ( buf[lev-1,prev-1] + buf[lev-1,cur[lev-1]-1] ) /2.
cts[lev]=0
t1_idx= (countl[lev]-1) *2
current_img_time = ((time_ind[lev-1])[t1_idx ] + (time_ind[lev-1])[t1_idx +1 ] )/2.
time_ind[lev].append( current_img_time )
process_two_time(lev=lev, bufno=cur[lev]-1,n=current_img_time,
g12=g12, buf=buf, num=num, num_buf=num_buf, noqs=noqs, qind=qind, nopr=nopr, dly=dly)
lev+=1
#//Since this level finished, test if there is a next level for processing
if lev<num_lev:processing = 1
else:processing = 0
else:
cts[lev]=1 #// set flag to process next time
processing=0 #// can stop until more images are accumulated
if n %(noframes/10) ==0:
sys.stdout.write("#")
sys.stdout.flush()
for q in range(noqs):
x0 = g12[:,:,q]
g12[:,:,q] = np.tril(x0) + np.tril(x0).T - np.diag( np.diag(x0) )
elapsed_time = time.time() - start_time
print ('Total time: %.2f min' %(elapsed_time/60.))
return g12, elapsed_time/60.
def process_two_time(lev, bufno,n ,
g12, buf, num, num_buf,noqs,qind,nopr, dly ):
'''a function for autocor_two_time'''
num[lev]+=1
if lev==0:imin=0
else:imin= int(num_buf/2 )
for i in range(imin, min(num[lev],num_buf) ):
ptr=lev*int(num_buf/2)+i
delayno=(bufno-i)%num_buf #//cyclic buffers
IP=buf[lev,delayno]
IF=buf[lev,bufno]
I_t12 = (np.histogram(qind, bins=noqs, weights= IF*IP))[0]
I_t1 = (np.histogram(qind, bins=noqs, weights= IP))[0]
I_t2 = (np.histogram(qind, bins=noqs, weights= IF))[0]
tind1 = (n-1)
tind2=(n -dly[ptr] -1)
if not isinstance( n, int ):
nshift = 2**(lev-1)
for i in range( -nshift+1, nshift +1 ):
#print tind1+i
g12[ int(tind1 + i), int(tind2 + i) ] =I_t12/( I_t1 * I_t2) * nopr
else:
#print tind1
g12[ tind1, tind2 ] = I_t12/( I_t1 * I_t2) * nopr
####################################
##Array_Operation--Two time correlation
#####################################
def auto_two_Array( data, rois, data_pixel=None ):
'''
Dec 16, 2015, Y.G.@CHX
a numpy operation method to get two-time correlation function
Parameters:
data: images sequence, shape as [img[0], img[1], imgs_length]
rois: 2-D array, the interested roi, has the same shape as image, can be rings for saxs, boxes for gisaxs
Options:
data_pixel: if not None,
2-D array, shape as (len(images), len(qind)),
use function Get_Pixel_Array( ).get_data( ) to get
Return:
g12: a 3-D array, shape as ( imgs_length, imgs_length, q)
One example:
g12 = auto_two_Array( imgsr, ring_mask, data_pixel = data_pixel )
'''
start_time = time.time()
qind, pixelist = roi.extract_label_indices( rois )
noqs = len( np.unique(qind) )
nopr = np.bincount(qind, minlength=(noqs+1))[1:]
if data_pixel is None:
data_pixel = Get_Pixel_Array( data, pixelist).get_data()
#print (data_pixel.shape)
try:
noframes = len(data)
except:
noframes = data.length
g12b = np.zeros( [noframes, noframes, noqs] )
Unitq = (noqs/10)
proi=0
for qi in range(1, noqs + 1 ):
pixelist_qi = np.where( qind == qi)[0]
#print (pixelist_qi.shape, data_pixel[qi].shape)
data_pixel_qi = data_pixel[:,pixelist_qi]
sum1 = (np.average( data_pixel_qi, axis=1)).reshape( 1, noframes )
sum2 = sum1.T
g12b[:,:,qi -1 ] = np.dot( data_pixel_qi, data_pixel_qi.T) /sum1 / sum2 / nopr[qi -1]
#print ( proi, int( qi //( Unitq) ) )
if int( qi //( Unitq) ) == proi:
sys.stdout.write("#")
sys.stdout.flush()
proi += 1
elapsed_time = time.time() - start_time
print ('Total time: %.2f min' %(elapsed_time/60.))
return g12b
def auto_two_Array_g1_norm( data, rois, data_pixel=None ):
'''
Dec 16, 2015, Y.G.@CHX
a numpy operation method to get two-time correlation function with a normalization
the purpose for this fucntion is to get a exactly same result as one-time correlation function
Parameters:
data: images sequence, shape as [img[0], img[1], imgs_length]
rois: 2-D array, the interested roi, has the same shape as image, can be rings for saxs, boxes for gisaxs
Options:
data_pixel: if not None,
2-D array, shape as (len(images), len(qind)),
use function Get_Pixel_Array( ).get_data( ) to get
Return:
g12b_norm: a 3-D array, shape as ( imgs_length, imgs_length, q),
a convention two-time correlation functio
same as obtained by auto_two_Array( data, roi, data_pixel=None )
g12b: a 3-D array, shape as ( imgs_length, imgs_length, q),
a non-normlized two-time correlation function
norms: a 2-D array, shape as ( imgs_length, q), a normalization for further get one-time from two time
One example:
g12b_norm, g12b_not_norm, norms = auto_two_Array_g1_norm( imgsr, ring_mask, data_pixel = data_pixel )
'''
start_time = time.time()
qind, pixelist = roi.extract_label_indices( rois )
noqs = len( np.unique(qind) )
nopr = np.bincount(qind, minlength=(noqs+1))[1:]
if data_pixel is None:
data_pixel = Get_Pixel_Array( data, pixelist).get_data()
#print (data_pixel.shape)
try:
noframes = len(data)
except:
noframes = data.length
g12b_norm = np.zeros( [noframes, noframes, noqs] )
g12b = np.zeros( [noframes, noframes, noqs] )
norms = np.zeros( [noframes, noqs] )
Unitq = (noqs/10)
proi=0
for qi in range(1, noqs + 1 ):
pixelist_qi = np.where( qind == qi)[0]
#print (pixelist_qi.shape, data_pixel[qi].shape)
data_pixel_qi = data_pixel[:,pixelist_qi]
sum1 = (np.average( data_pixel_qi, axis=1)).reshape( 1, noframes )
sum2 = sum1.T
#norms_g12 = sum1 * sum2 * nopr[qi -1]
norms[:,qi -1 ] = sum1
g12b[:,:,qi -1 ] = np.dot( data_pixel_qi, data_pixel_qi.T)
g12b_norm[:,:,qi -1 ] = g12b[:,:,qi -1 ]/ sum1 / sum2 / nopr[qi -1]
#print ( proi, int( qi //( Unitq) ) )
if int( qi //( Unitq) ) == proi:
sys.stdout.write("#")
sys.stdout.flush()
proi += 1
elapsed_time = time.time() - start_time
print ('Total time: %.2f min' %(elapsed_time/60.))
return g12b_norm, g12b, norms
####################################
##Derivation of Two time correlation
#####################################
#####################################
#get one-time
#####################################
def get_one_time_from_two_time( g12, norms=None, nopr = None ):
'''
Dec 16, 2015, Y.G.@CHX
Get one-time correlation function from two correlation function
namely, calculate the mean of each diag line of g12 to get one-time correlation fucntion
Parameters:
g12: a 3-D array, two correlation function, shape as ( imgs_length, imgs_length, q)
Options:
norms: if not None, a 2-D array, shape as ( imgs_length, q), a normalization for further get one-time from two time, get by: g12b_norm, g12b_not_norm, norms = auto_two_Array_g1_norm( imgsr, ring_mask, data_pixel = data_pixel )
nopr: if not None, 1-D array, shape as [q], the number of interested pixel of each q
Return:
g2f12: a 2-D array, shape as ( imgs_length, q),
a one-time correlation function
One example:
g2b_norm = get_one_time_from_two_time(g12b_norm, norms=None, nopr=None )
g2b_not_norm = get_one_time_from_two_time(g12b_not_norm, norms=norms, nopr=nopr)
'''
m,n,noqs = g12.shape
g2f12 = []
for q in range(noqs):
temp=[]
y=g12[:,:,q]
for tau in range(m):
if norms is None:
temp.append( np.diag(y,k=int(tau)).mean() )
else:
yn = norms[:,q]
yn1 = np.average( yn[tau:] )
yn2 = np.average( yn[: m-tau] )
temp.append( np.diag(y,k=int(tau)).mean()/ (yn1*yn2*nopr[q]) )
temp = np.array( temp).reshape( len(temp),1)
if q==0:
g2f12 = temp
else:
g2f12=np.hstack( [g2f12, temp] )
return g2f12
#####################################
#get one-time @different age
#####################################
def get_qedge( qstart,qend,qwidth,noqs, ):
''' DOCUMENT make_qlist( )
give qstart,qend,qwidth,noqs
return a qedge by giving the noqs, qstart,qend,qwidth.
a qcenter, which is center of each qedge
KEYWORD: None '''
import numpy as np
qcenter = np.linspace(qstart,qend,noqs)
#print ('the qcenter is: %s'%qcenter )
qedge=np.zeros(2*noqs)
qedge[::2]= ( qcenter- (qwidth/2) ) #+1 #render even value
qedge[1::2]= ( qcenter+ qwidth/2) #render odd value
return qedge, qcenter
def rotate_g12q_to_rectangle( g12q ):
'''
Dec 16, 2015, Y.G.@CHX
Rotate anti clockwise 45 of a one-q two correlation function along diagonal to a masked array
the shape ( imgs_length, imgs_length ) of g12q will change to ( imgs_length, 2*imgs_length -1)
Parameters:
g12q: a 2-D array, one-q two correlation function, shape as ( imgs_length, imgs_length )
Return:
g12qr: a masked 2-D array, shape as ( imgs_length, 2*imgs_length -1 )
x-axis: taus, from 0 to imgs_length
y-axis: ages, from 0 to imgs_length( the middle of y) to 2imgs_length-1 (top)
One example:
g12qr = rotate_g12q_to_rectangle(g12bm[:,:,0] )
'''
M,N = g12q.shape
g12qr = np.ma.empty(( 2*N-1,N ))
g12qr.mask = True
for i in range(N):
g12qr[i:(2*N-1-i):2, i ] = g12q.diagonal(i)
return g12qr
def get_aged_g2_from_g12q( g12q, slice_num = 6, slice_width=5, slice_start=0, slice_end= 1 ):
'''
Dec 16, 2015, Y.G.@CHX
Get one-time correlation function of different age from two correlation function
namely, calculate the different aged mean of each diag line of g12 to get one-time correlation fucntion
Parameters:
g12q: a 2-D array, one-q two correlation function, shape as ( imgs_length, imgs_length )
Options:
slice_num: int, the slice number of the diagonal of g12
slice_width: int, each slice width in unit of pixel
slice start: int, can start from 0
slice end: int, can end at 2*imgs_length -1
Return:
g2_aged: a dict, one time correlation function at different age
the keys of dict is ages in unit of pixel
dict[key]:
a 1-D array, shape as ( imgs_length ),
a one-q one-time correlation function
One example:
g2_aged = get_aged_g2_from_g12q( g12q, slice_num =3, slice_width= 500,
slice_start=4000, slice_end= 20000-4000 )
'''
arr= rotate_g12q_to_rectangle( g12q )
m,n = arr.shape #m should be 2*n-1
age_edge, age_center = get_qedge( qstart=slice_start,qend= slice_end,
qwidth = slice_width, noqs =slice_num )
age_edge, age_center = np.int_(age_edge), np.int_(age_center)
#print (age_edge, age_center)
g2_aged = {}
for i,age in enumerate(age_center):
age_edges_0, age_edges_1 = age_edge[ i*2 : 2*i+2]
g2i = arr[ age_edges_0: age_edges_1 ].mean( axis =0 )
g2i_ = np.array( g2i )
g2_aged[age] = g2i_[np.nonzero( g2i_)[0]]
return g2_aged
def show_g12q_aged_g2( g12q, g2_aged,slice_width=10, timeperframe=1,vmin= 1, vmax= 1.25 ):
'''
Dec 16, 2015, Y.G.@CHX
Plot one-time correlation function of different age with two correlation function
Parameters:
g12q: a 2-D array, one-q two correlation function, shape as ( imgs_length, imgs_length )
g2_aged: a dict, one time correlation function at different age
obtained by: for example,
g2_aged = get_aged_g2_from_g12q( g12q, slice_num =3, slice_width= 500,
slice_start=4000, slice_end= 20000-4000 )
the keys of dict is ages in unit of pixel
dict[key]:
a 1-D array, shape as ( imgs_length ),
a one-q one-time correlation function
Options:
slice_width: int, each slice width in unit of pixel, for line width of a plot
timeperframe: float, time per frame for axis unit
vmin, float, matplot vmin
vmax, float, matplot vmax
Return:
two plots, one for the two-time correlation, g12q,
One example:
show_g12q_aged_g2( g12q, g2_aged,timeperframe=1,vmin= 1, vmax= 1.22 )
'''
age_center = list( g2_aged.keys() )
print ('the cut age centers are: ' +str(age_center) )
M,N = g12q.shape
#fig, ax = plt.subplots( figsize = (8,8) )
figw =10
figh = 10
fig = plt.figure(figsize=(figw,figh))
gs = gridspec.GridSpec(1, 2, width_ratios=[10, 8],height_ratios=[8,8] )
ax = plt.subplot(gs[0])
ax1 = plt.subplot(gs[1])
im=ax.imshow( g12q, origin='lower' , cmap='viridis',
norm= LogNorm( vmin, vmax ) , extent=[0, N, 0, N ] )
linS = []
linE=[]
linS.append( zip( [0]*len(age_center), np.int_(age_center) ))
linE.append( zip( np.int_(age_center), [0]*len(age_center) ))
for i, [ps,pe] in enumerate(zip(linS[0],linE[0])):
if ps[1]>=N:s0=ps[1] - N;s1=N
else:s0=0;s1=ps[1]
if pe[0]>=N:e0=N;e1=pe[0] - N
else:e0=pe[0];e1=0
lined= slice_width/2. #in data width
linewidth= (lined * (figh*72./N)) * 0.8
ax.plot( [s0,e0],[s1,e1], linewidth=linewidth ,alpha=0.3 ) #, color= )
ax.set_title( '%s_frames'%(N) )
ax.set_xlabel( r'$t_1$ $(s)$', fontsize = 18)
ax.set_ylabel( r'$t_2$ $(s)$', fontsize = 18)
fig.colorbar(im)
ax1.set_title("Aged_G2")
for i in sorted(g2_aged.keys()):
gx= np.arange(len(g2_aged[i])) * timeperframe
marker = next(markers)
ax1.plot( gx,g2_aged[i], '-%s'%marker, label=r"$age= %.1f s$"%(i*timeperframe))
ax1.set_ylim( vmin, vmax )
ax1.set_xlabel(r"$\tau $ $(s)$", fontsize=18)
ax1.set_ylabel("g2")
ax1.set_xscale('log')
ax1.legend(fontsize='small', loc='best' )
#plt.show()
#####################################
#get fout-time
def get_tau_from_g12q( g12q, slice_num = 6, slice_width=1, slice_start=None, slice_end=None ):
'''
Dec 16, 2015, Y.G.@CHX
Get tau lines from two correlation function
namely, get diag line of g12 as a function of ages
Parameters:
g12q: a 2-D array, one-q two correlation function, shape as ( imgs_length, imgs_length )
Options:
slice_num: int, the slice number of the diagonal of g12
slice_width: int, each slice width in unit of pixel
slice start: int, can start from 0
slice end: int, can end at imgs_length -1
Return:
return: tau, a dict, tau lines
the keys of dict is tau(slice center) in unit of pixel
dict[key]:
a 1-D array, shape as ( tau_line-length ),
One example:
taus = get_aged_g2_from_g12q( g12q, slice_num =3, slice_width= 500,
slice_start=4000, slice_end= 20000-4000 )
'''
arr= rotate_g12q_to_rectangle( g12q )
m,n = arr.shape #m should be 2*n-1
age_edge, age_center = get_qedge( qstart=slice_start,qend= slice_end,
qwidth = slice_width, noqs =slice_num )
age_edge, age_center = np.int_(age_edge), np.int_(age_center)
#print (age_edge, age_center)
tau = {}
for i,age in enumerate(age_center):
age_edges_0, age_edges_1 = age_edge[ i*2 : 2*i+2]
#print (age_edges_0, age_edges_1)
g2i = arr[ :,age_edges_0: age_edges_1 ].mean( axis =1 )
g2i_ = np.array( g2i )
tau[age] = g2i_[np.nonzero( g2i_)[0]]
return tau
def show_g12q_taus( g12q, taus, slice_width=10, timeperframe=1,vmin= 1, vmax= 1.25 ):
'''
Dec 16, 2015, Y.G.@CHX
Plot tau-lines as a function of age with two correlation function
Parameters:
g12q: a 2-D array, one-q two correlation function, shape as ( imgs_length, imgs_length )
tau, a dict, tau lines
the keys of dict is tau(slice center) in unit of pixel
dict[key]:
a 1-D array, shape as ( tau_line-length ),
obtained by: for example,
taus = get_tau_from_g12q( g12b_norm[:,:,0], slice_num = 5, slice_width=1,
slice_start=3, slice_end= 5000-1 ))
Options:
slice_width: int, each slice width in unit of pixel, for line width of a plot
timeperframe: float, time per frame for axis unit
vmin, float, matplot vmin
vmax, float, matplot vmax
Return:
two plots, one for tau lines~ages, g12q,
One example:
show_g12q_taus( g12b_norm[:,:,0], taus, slice_width=50,
timeperframe=1,vmin=1.01,vmax=1.55 )
'''
age_center = list( taus.keys() )
print ('the cut tau centers are: ' +str(age_center) )
M,N = g12q.shape
#fig, ax = plt.subplots( figsize = (8,8) )
figw =10
figh = 10
fig = plt.figure(figsize=(figw,figh))
gs = gridspec.GridSpec(1, 2, width_ratios=[10, 8],height_ratios=[8,8] )
ax = plt.subplot(gs[0])
ax1 = plt.subplot(gs[1])
im=ax.imshow( g12q, origin='lower' , cmap='viridis',
norm= LogNorm( vmin= vmin, vmax= vmax ) , extent=[0, N, 0, N ] )
linS = []
linE=[]
linS.append( zip( np.int_(age_center) -1, [0]*len(age_center) ))
linE.append( zip( [N -1]*len(age_center), N - np.int_(age_center) ))
for i, [ps,pe] in enumerate(zip(linS[0],linE[0])):
lined= slice_width #/2. *draw_scale_tau #in data width
linewidth= (lined * (figh*72./N)) * 0.8
#print (ps,pe)
ax.plot( [ps[0],pe[0]],[ps[1],pe[1]], linewidth=linewidth ) #, color= )
ax.set_title( '%s_frames'%(N) )
ax.set_xlabel( r'$t_1$ $(s)$', fontsize = 18)
ax.set_ylabel( r'$t_2$ $(s)$', fontsize = 18)
fig.colorbar(im)
ax1.set_title("Tau_Cuts_in_G12")
for i in sorted(taus.keys()):
gx= np.arange(len(taus[i])) * timeperframe
marker = next(markers)
ax1.plot( gx,taus[i], '-%s'%marker, label=r"$tau= %.1f s$"%(i*timeperframe))
ax1.set_ylim( vmin,vmax )
ax1.set_xlabel(r'$t (s)$',fontsize=5)
ax1.set_ylabel("g2")
ax1.set_xscale('log')
ax1.legend(fontsize='small', loc='best' )
#plt.show()
def histogram_taus(taus, hisbin=20, plot=True,timeperframe=1):
'''
Dec 16, 2015, Y.G.@CHX
Do histogram and plot of tau-lines
Parameters:
taus, a dict, tau lines
the keys of dict is tau(slice center) in unit of pixel
dict[key]:
a 1-D array, shape as ( tau_line-length ),
obtained by: for example,
taus = get_tau_from_g12q( g12b_norm[:,:,0], slice_num = 5, slice_width=1,
slice_start=3, slice_end= 5000-1 ))
Options:
bins: int, bins number for the histogram
plot: if True, show the histogram plot
timeperframe: float, time per frame for axis unit
Return:
his: a dict, his[key], the histogram of tau-lines
if plot, plot the histogram of tau-lines
One example:
his = histogram_taus(taus, hisbin=30, plot=True, timeperframe=timeperframe)
'''
his={}
for key in list(taus.keys()):
his[key] = np.histogram( taus[key], bins=hisbin)
if plot:
fig, ax1 = plt.subplots(figsize=(8, 8))
ax1.set_title("Tau_histgram")
for key in sorted(his.keys()):
tx= 0.5*( his[key][1][:-1] + his[key][1][1:])
marker = next(markers)
ax1.plot( tx, his[key][0], '-%s'%marker, label=r"$tau= %.1f s$"%(key*timeperframe) )
#ax1.set_ylim( 1.05,1.35 )
ax1.set_xlim( 1.05,1.35 )
ax1.set_xlabel(r'$g_2$',fontsize=19)
ax1.set_ylabel(r"histgram of g2 @ tau",fontsize=15)
#ax1.set_xscale('log')
ax1.legend(fontsize='large', loc='best' )
#plt.show()
return his
def get_four_time_from_two_time( g12,g2=None, rois=None ):
'''
Dec 16, 2015, Y.G.@CHX
Get four-time correlation function from two correlation function
namely, calculate the deviation of each diag line of g12 to get four-time correlation fucntion
TOBEDONE: deal with bad frames
Parameters:
g12: a 3-D array, two correlation function, shape as ( imgs_length, imgs_length, q)
Options:
g2: if not None, a 2-D array, shape as ( imgs_length, q), or (tau, q)
one-time correlation fucntion, for normalization of the four-time
rois: if not None, a list, [x-slice-start, x-slice-end, y-slice-start, y-slice-end]
Return:
g4f12: a 2-D array, shape as ( imgs_length, q),
a four-time correlation function
One example:
s1,s2 = 0,2000
g4 = get_four_time_from_two_time( g12bm, g2b, roi=[s1,s2,s1,s2] )
'''
m,n,noqs = g12.shape
g4f12 = []
for q in range(noqs):
temp=[]
if rois is None:
y=g12[:,:,q]
else:
x1,x2,y1,y2 = rois
y=g12[x1:x2,y1:y2, q]
m,n = y.shape
norm = ( g2[:,q][0] -1)**2
for tau in range(m):
d_ = np.diag(y,k=int(tau))
d = d_[ np.where( d_ !=1) ]
g4 = ( d.std() )**2 /norm
temp.append( g4 )
temp = np.array( temp).reshape( len(temp),1)
if q==0:
g4f12 = temp
else:
g4f12=np.hstack( [g4f12, temp] )
return g4f12
######
def make_g12_mask( badframes_list, g12_shape):
'''
Dec 16, 2015, Y.G.@CHX
make g12 mask by badlines
Parameters:
badframes_list: list, contains the bad frame number, like [100, 155, 10000]
g12_shape: the shape of one-q two correlation function, shape as ( imgs_length, imgs_length )
Return:
g12_mask: a 2-D array, shape as ( imgs_length, imgs_length )
One example:
g12_mask = make_g12_mask(bad_frames, g12b[:,:,0].shape)
'''
m,n = g12_shape
#g12_mask = np.ma.empty( ( m,n ) )
g12_mask = np.ma.ones( ( m,n ) )
g12_mask.mask= False
for bdl in badframes_list:
g12_mask.mask[:,bdl] = True
g12_mask.mask[bdl,:] = True
return g12_mask
def masked_g12( g12, badframes_list):
'''
Dec 16, 2015, Y.G.@CHX
make masked g12 with mask defined by badframes_list
Parameters:
g12: a 3-D array, two correlation function, shape as ( imgs_length, imgs_length, q)
badframes_list: list, contains the bad frame number, like [100, 155, 10000]
Return:
g12m: a masked 3-D array, shape as same as g12, ( imgs_length, imgs_length, q )
One example:
g12m = masked_g12( g12b, bad_frames)
'''
m,n,qs = g12.shape
g12m = np.ma.empty_like( g12 )
g12_mask = make_g12_mask( badframes_list, g12[:,:,0].shape)
for i in range(qs):
g12m[:,:,i] = g12[:,:,i] * g12_mask
return g12m
def make_norms_mask(norms, badlines):
'''make g12 maks to mask teh badlines'''
m,qs = norms.shape
norms_mask = np.ma.ones( ( m ) )
norms_mask.mask= False
for bdl in badlines:
norms_mask.mask[ bdl] = True
return norms_mask
def masked_norms( norms, norms_mask):
'''mask g12 by badlines'''
m,qs = norms.shape
norms_ = np.ma.empty_like( norms )
#norms_mask = make_norms_mask(norms, badlines)
for i in range(qs):
norms_[:,i] = norms[:,i] * norms_mask
return norms_
def delays( num_lev=3, num_buf=4, time=1 ):
''' DOCUMENT delays(time=)
return array of delays.
KEYWORD: time: scale delays by time ( should be time between frames)
'''
if num_buf%2!=0:print ("nobuf must be even!!!" )
dly=np.zeros( (num_lev+1)*int(num_buf/2) +1 )
dict_dly ={}
for i in range( 1,num_lev+1):
if i==1:imin= 1
else:imin= int(num_buf/2)+1
ptr=(i-1)*int(num_buf/2)+ np.arange(imin,num_buf+1)
dly[ptr]= np.arange( imin, num_buf+1) *2**(i-1)
dict_dly[i] = dly[ptr-1]
dly*=time
#print (i, ptr, imin)
return dly, dict_dly
class Get_Pixel_Array(object):
'''
a class to get intested pixels from a images sequence,
load ROI of all images into memory
get_data: to get a 2-D array, shape as (len(images), len(pixellist))
One example:
data_pixel = Get_Pixel_Array( imgsr, pixelist).get_data()
'''
def __init__(self, indexable, pixelist):
'''
indexable: a images sequences
pixelist: 1-D array, interest pixel list
'''
self.indexable = indexable
self.pixelist = pixelist
#self.shape = indexable.shape
try:
self.length= len(indexable)
except:
self.length= indexable.length
def get_data(self ):
'''
To get intested pixels array
Return: 2-D array, shape as (len(images), len(pixellist))
'''
#print (self.length)
data_array = np.zeros([ self.length,len(self.pixelist)])
for key in range(self.length ):
data_array[key] = np.ravel( self.indexable[key])[self.pixelist]
return data_array
class Reverse_Coordinate(object):
def __init__(self, indexable, mask):
self.indexable = indexable
self.mask = mask
try:
self.shape = indexable.shape
except:
#if
self.shape = [len(indexable), indexable[0].shape[0], indexable[0].shape[1] ]
#self.shape = indexable.shape
self.length= len(indexable)
def __getitem__(self, key ):
if self.mask is not None:
img =self.indexable[key] * self.mask
else:
img = self.indexable[key]
if len(img.shape) ==3:
img_=img[:,::-1,:]
if len(img.shape)==2:
img_=img[::-1,:]
return img_
def get_mean_intensity( data_pixel, qind):
'''
Dec 16, 2015, Y.G.@CHX
a function to get mean intensity as a function of time (image number)
Parameters:
data_pixel: 2-D array, shape as (len(images), len(qind)),
use function Get_Pixel_Array( ).get_data( ) to get
qind: 1-D int array, a index list of interest pixel, values change from 1 to int number
Return:
mean_inten: a dict, with keys as the unique values of qind,
each dict[key]: 1-D array, with shape as data_pixel.shape[0],namely, len(images)
One example:
mean_inten = get_mean_intensity( data_pixel, qind)
'''
noqs = len( np.unique(qind) )
mean_inten = {}
for qi in range(1, noqs + 1 ):
pixelist_qi = np.where( qind == qi)[0]
#print (pixelist_qi.shape, data_pixel[qi].shape)
data_pixel_qi = data_pixel[:,pixelist_qi]
mean_inten[qi] = data_pixel_qi.mean( axis =1 )
return mean_inten
def show_C12(C12, qz_ind=0, qr_ind=0, N1=None,N2=None, vmin=None, vmax=None, title=False):
g12_num = qz_ind * num_qr + qr_ind
if N1 is None:
N1=0
if N2 is None:
N2=Nming
if vmin is None:
vmin = 1
if vmax is None:
vmax = 1.02
data = g12b[N1:N2,N1:N2,g12_num]
fig, ax = plt.subplots()
im=ax.imshow( data, origin='lower' , cmap='viridis',
norm= LogNorm( vmin, vmax ),
extent=[0, data.shape[0]*timeperframe, 0, data.shape[0]*timeperframe ] )
#ax.set_title('%s-%s frames--Qth= %s'%(N1,N2,g12_num))
if title:
ax.set_title('%s-%s frames--Qz= %s--Qr= %s'%(N1,N2, qz_center[qz_ind], qr_center[qr_ind] ))
ax.set_xlabel( r'$t_1$ $(s)$', fontsize = 18)
ax.set_ylabel( r'$t_2$ $(s)$', fontsize = 18)
fig.colorbar(im)
#plt.show()
def show_C12(C12, q_ind=0, *argv,**kwargs):
'''
plot one-q of two-time correlation function
C12: two-time correlation function, with shape as [ time, time, qs]
q_ind: if integer, for a SAXS q, the nth of q to be plotted
if a list: for a GiSAXS [qz_ind, qr_ind]
kwargs: support
timeperframe: the time interval
N1: the start frame(time)
N2: the end frame(time)
vmin/vmax: for plot
title: if True, show the tile
e.g.,
show_C12(g12b, q_ind=1, N1=0, N2=500, vmin=1.05, vmax=1.07, )
'''
#strs = [ 'timeperframe', 'N1', 'N2', 'vmin', 'vmax', 'title']
shape = C12.shape
if isinstance(q_ind, int):
C12_num = q_ind
else:
qz_ind, qr_ind = q_ind
C12_num = qz_ind * num_qr + qr_ind
if 'timeperframe' in kwargs.keys():
timeperframe = kwargs['timeperframe']
else:
timeperframe=1
if 'vmin' in kwargs.keys():
vmin = kwargs['vmin']
else:
vmin=1
if 'vmax' in kwargs.keys():
vmax = kwargs['vmax']
else:
vmax=1.05
if 'N1' in kwargs.keys():
N1 = kwargs['N1']
else:
N1=0
if 'N2' in kwargs.keys():
N2 = kwargs['N2']
else:
N2= shape[0]
if 'title' in kwargs.keys():
title = kwargs['title']
else:
title=True
data = C12[N1:N2,N1:N2,C12_num]
fig, ax = plt.subplots()
im=ax.imshow( data, origin='lower' , cmap='viridis',
norm= LogNorm( vmin, vmax ),
extent=[0, data.shape[0]*timeperframe, 0, data.shape[0]*timeperframe ] )
if title:
if isinstance(q_ind, int):
ax.set_title('%s-%s frames--Qth= %s'%(N1,N2,C12_num))
else:
ax.set_title('%s-%s frames--Qzth= %s--Qrth= %s'%(N1,N2, qz_ind, qr_ind ))
#ax.set_title('%s-%s frames--Qth= %s'%(N1,N2,g12_num))
ax.set_xlabel( r'$t_1$ $(s)$', fontsize = 18)
ax.set_ylabel( r'$t_2$ $(s)$', fontsize = 18)
fig.colorbar(im)
#plt.show()
def plot_saxs_g2( g2, taus, *argv,**kwargs):
'''plot g2 results,
g2: one-time correlation function
taus: the time delays
kwargs: can contains
vlim: [vmin,vmax]: for the plot limit of y, the y-limit will be [vmin * min(y), vmx*max(y)]
ylim/xlim: the limit of y and x
e.g.
plot_saxs_g2( g2b, taus= np.arange( g2b.shape[0]) *timeperframe, q_ring_center = q_ring_center, vlim=[.99, 1.01] )
'''
if 'uid' in kwargs.keys():
uid = kwargs['uid']
else:
uid = 'uid'
if 'q_ring_center' in kwargs.keys():
q_ring_center = kwargs[ 'q_ring_center']
else:
q_ring_center = np.arange( g2.shape[1] )
num_rings = g2.shape[1]
sx = int(round(np.sqrt(num_rings)) )
if num_rings%sx == 0:
sy = int(num_rings/sx)
else:
sy=int(num_rings/sx+1)
uid = 'uid'
if 'uid' in kwargs.keys():
uid = kwargs['uid']
fig = plt.figure(figsize=(14, 10))
plt.title('uid= %s'%uid,fontsize=20, y =1.06)
plt.axis('off')
#plt.axes(frameon=False)
plt.xticks([])
plt.yticks([])
for i in range(num_rings):
ax = fig.add_subplot(sx, sy, i+1 )
ax.set_ylabel("g2")
ax.set_title(" Q= " + '%.5f '%(q_ring_center[i]) + r'$\AA^{-1}$')
y=g2[:, i]
ax.semilogx(taus, y, '-o', markersize=6)
#ax.set_ylim([min(y)*.95, max(y[1:])*1.05 ])
if 'ylim' in kwargs:
ax.set_ylim( kwargs['ylim'])
elif 'vlim' in kwargs:
vmin, vmax =kwargs['vlim']
ax.set_ylim([min(y)*vmin, max(y[1:])*vmax ])
else:
pass
if 'xlim' in kwargs:
ax.set_xlim( kwargs['xlim'])
#plt.show()
fig.tight_layout()
<file_sep>#mpiexec -n 4 python mpi_run_saxs_V0.py
from chxanalys.chx_packages import *
from chxanalys.chx_xpcs_xsvs_jupyter import run_xpcs_xsvs_single
from mpi4py import MPI
comm = MPI.COMM_WORLD
print( comm.Get_size(), comm.Get_rank() )
run_pargs= dict(
scat_geometry = 'saxs',
#scat_geometry = 'gi_saxs',
force_compress = False, #True, #False, #True,#False,
para_compress = True,
run_fit_form = False,
run_waterfall = True,#False,
run_t_ROI_Inten = True,
#run_fit_g2 = True,
fit_g2_func = 'stretched',
run_one_time = True,#False,
run_two_time = True,#False,
run_four_time = False, #True, #False,
run_xsvs=True,
att_pdf_report = True,
show_plot = False,
CYCLE = '2016_3',
#if scat_geometry == 'gi_saxs':
#mask_path = '/XF11ID/analysis/2016_3/masks/',
#mask_name = 'Nov16_4M-GiSAXS_mask.npy',
#elif scat_geometry == 'saxs':
mask_path = '/XF11ID/analysis/2016_3/masks/',
mask_name = 'Nov28_4M_SAXS_mask.npy',
good_start = 5,
#####################################for saxs
uniformq = True,
inner_radius= 0.005, #0.005 for 50 nmAu/SiO2, 0.006, #for 10nm/coralpor
outer_radius = 0.04, #0.04 for 50 nmAu/SiO2, 0.05, #for 10nm/coralpor
num_rings = 12,
gap_ring_number = 6,
number_rings= 1,
############################for gi_saxs
#inc_x0 = 1473,
#inc_y0 = 372,
#refl_x0 = 1473,
#refl_y0 = 730,
qz_start = 0.025,
qz_end = 0.04,
qz_num = 3,
gap_qz_num = 1,
#qz_width = ( qz_end - qz_start)/(qz_num +1),
qr_start = 0.0025,
qr_end = 0.07,
qr_num = 14,
gap_qr_num = 5,
definde_second_roi = True,
qz_start2 = 0.04,
qz_end2 = 0.050,
qz_num2= 1,
gap_qz_num2 = 1,
qr_start2 = 0.002,
qr_end2 = 0.064,
qr_num2 = 10,
gap_qr_num2 = 5,
#qcenters = [ 0.00235,0.00379,0.00508,0.00636,0.00773, 0.00902] #in A-1
#width = 0.0002
qth_interest = 1, #the intested single qth
use_sqnorm = False,
use_imgsum_norm = True,
pdf_version = '_1' #for pdf report name
)
print( run_pargs )
give_uids= True #True
#give_uids= True
if not give_uids:
start_time, stop_time = '2016-11-29 8:41:00', '2016-11-29 13:15:00' #4uids
start_time, stop_time = '2016-11-30 17:41:00', '2016-11-30 17:46:00' #5uids
start_time, stop_time = '2016-12-1 14:27:10', '2016-12-1 14:32:10'
start_time, stop_time = '2016-12-1 14:36:20', '2016-12-1 14:41:10'
start_time, stop_time = '2016-12-1 14:50:20', '2016-12-1 14:54:10' #unknown
start_time, stop_time = '2016-12-1 15:12:20', '2016-12-1 15:16:50' #count : 1 ['c3f523'] (scan num: 10487) (Measurement: sample X1 50nm Au in 7.5% PEG_fast_series_#0 )
start_time, stop_time = '2016-12-1 15:17:00', '2016-12-1 15:20:30' #count : 1 ['54c823'] (scan num: 10497) (Measurement: sample X1 50nm Au in 7.5% PEG_slow_series_#0 )
start_time, stop_time = '2016-12-1 15:21:00', '2016-12-1 15:24:30' #count : 1 ['07041f'] (scan num: 10507) (Measurement: sample X1 50nm Au in 7.5% PEG_visibility_series_#0 )
#start_time, stop_time = '2016-12-1 15:33:00', '2016-12-1 15:36:30'
#start_time, stop_time = '2016-12-1 15:37:00', '2016-12-1 15:40:30'
#start_time, stop_time = '2016-12-1 15:41:00', '2016-12-1 15:44:30'
start_time, stop_time = '2016-12-1 15:33:00', '2016-12-1 15:44:30' #X3, 10 nm, fast/slow/vs200us
#start_time, stop_time = '2016-12-1 16:05:00', '2016-12-1 16:08:50'
#start_time, stop_time = '2016-12-1 16:09:00', '2016-12-1 16:12:30'
#start_time, stop_time = '2016-12-1 16:13:00', '2016-12-1 16:16:50'
start_time, stop_time = '2016-12-1 16:05:00', '2016-12-1 16:16:50' #X3, 10 nm, fast/slow/vs200us
#start_time, stop_time = '2016-12-1 16:20:00', '2016-12-1 16:25:50'
#start_time, stop_time = '2016-12-1 16:26:00', '2016-12-1 16:29:50'
#start_time, stop_time = '2016-12-1 16:30:00', '2016-12-1 16:35:50'
start_time, stop_time = '2016-12-1 16:20:00', '2016-12-1 16:35:50'
#X4 2.5%PEG, 10nmAu
start_time, stop_time = '2016-12-1 17:06:00', '2016-12-1 17:11:50'
start_time, stop_time = '2016-12-1 17:12:00', '2016-12-1 17:16:50'
start_time, stop_time = '2016-12-1 17:17:00', '2016-12-1 17:22:50'
start_time, stop_time = '2016-12-1 17:29:00', '2016-12-1 17:33:50'
start_time, stop_time = '2016-12-1 17:33:55', '2016-12-1 17:36:56'
start_time, stop_time = '2016-12-1 17:37:00', '2016-12-1 17:42:50'
start_time, stop_time = '2016-12-1 17:06:00', '2016-12-1 17:42:50'
start_time, stop_time = '2016-12-1 18:03:00', '2016-12-1 19:12:50'
start_time, stop_time = '2016-12-1 19:56:00', '2016-12-1 20:11:00' #for X6
start_time, stop_time = '2016-12-1 20:20', '2016-12-1 20:35:00' #for X5
start_time, stop_time = '2016-12-1 20:39', '2016-12-1 20:53:00' #for X4, 10 nm
start_time, stop_time = '2016-12-1 21:17', '2016-12-1 21:31:00' #for X3, 10 nm
start_time, stop_time = '2016-12-1 21:32', '2016-12-1 21:46:00' #for X2, 50 nm
start_time, stop_time = '2016-12-1 21:46', '2016-12-1 22:01:00' #for X1, 50 nm
start_time, stop_time = '2016-12-2 11:05', '2016-12-2 11:14:00' #for coralpor, 10,20,40,80,160,320,640
start_time, stop_time = '2016-12-2 12:03', '2016-12-2 13:28:00' #for coralpor,
start_time, stop_time = '2016-12-1 16:30:00', '2016-12-1 16:31:50' #for 10 nm, 20, for test purpose
start_time, stop_time = '2016-12-1 16:30:00', '2016-12-1 16:31:50' #for 10 nm, 20, for test purpose
#start_time, stop_time = '2016-12-01 14:31:31', '2016-12-01 14:31:40' #for test purpose
sids, uids, fuids = find_uids(start_time, stop_time)
if give_uids:
uids = [ 'b31e61', 'f05d46', '67cfa1', '52cc2d' ]
uids = ['25f88334']
uids = [ '89297ae8' ] #for saxs test
#uid = [ '96c5dd' ] #for gisaxs test
#uids =['d83fdb'] #100 frames, for test code
#uids =['1d893a' ]
def printf( i, s ):
print('%'*40)
print ("Processing %i-th-->uid=%s"%(i+1,s) )
print('%'*40)
t0=time.time()
for i in range( comm.rank, comm.rank+1 ):
print (i)
uid = uids[i]
printf( i, uid )
run_xpcs_xsvs_single( uid, run_pargs )
run_time(t0)
<file_sep>from numpy import pi,sin,arctan,sqrt,mgrid,where,shape,exp,linspace,std,arange
from numpy import power,log,log10,array,zeros,ones,reshape,mean,histogram,round,int_
from numpy import indices,hypot,digitize,ma,histogramdd,apply_over_axes,sum
from numpy import around,intersect1d, ravel, unique,hstack,vstack,zeros_like
from numpy import save, load, dot
from numpy.linalg import lstsq
from numpy import polyfit,poly1d;
import sys,os
import pickle as pkl
import matplotlib.pyplot as plt
#from Init_for_Timepix import * # the setup file
import time
import struct
import numpy as np
from tqdm import tqdm
import pandas as pds
from chxanalys.chx_libs import multi_tau_lags
from chxanalys.chx_compress import Multifile, go_through_FD, pass_FD
def get_FD_end_num(FD, maxend=1e10):
N = maxend
for i in range(0,int(maxend)):
try:
FD.seekimg(i)
except:
N = i
break
FD.seekimg(0)
return N
def compress_timepix_data( pos, t, tbins, filename=None, md=None, force_compress=False, nobytes=2,
with_pickle=True ):
''' YG.Dev@CHX Nov 20, 2017
Compress the timepixeldata, in a format of x, y, t
x: pos_x in pixel
y: pos_y in pixel
timepix3 det size 256, 256
TODOLIST: mask is not working now
Input:
pos: 256 * y + x
t: arrival time in sec
filename: the output filename
md: a dict to describle the data info
force_compress: if False,
if already compressed, just it
else: compress
if True, compress and, if exist, overwrite the already-coompress data
Return:
N (frame number)
'''
if filename is None:
filename= '/XF11ID/analysis/Compressed_Data' +'/timpix_uid_%s.cmp'%md['uid']
if force_compress:
print ("Create a new compress file with filename as :%s."%filename)
return init_compress_timepix_data( pos, t, tbins, filename=filename, md=md, nobytes= nobytes,
with_pickle=with_pickle )
else:
if not os.path.exists( filename ):
print ("Create a new compress file with filename as :%s."%filename)
return init_compress_timepix_data( pos, t, tbins, filename=filename, md=md, nobytes= nobytes,
with_pickle=with_pickle )
else:
print ("Using already created compressed file with filename as :%s."%filename)
return pkl.load( open(filename + '.pkl', 'rb' ) )
#FD = Multifile(filename, 0, int(1e25) )
#return get_FD_end_num(FD)
def create_timepix_compress_header( md, filename, nobytes=2, bins=1 ):
'''
Create the head for a compressed eiger data, this function is for parallel compress
'''
fp = open( filename,'wb' )
#Make Header 1024 bytes
#md = images.md
if bins!=1:
nobytes=8
Header = struct.pack('@16s8d7I916x',b'Version-COMPtpx1',
md['beam_center_x'],md['beam_center_y'], md['count_time'], md['detector_distance'],
md['frame_time'],md['incident_wavelength'], md['x_pixel_size'],md['y_pixel_size'],
nobytes, md['sy'], md['sx'],
0,256,
0,256
)
fp.write( Header)
fp.close()
def init_compress_timepix_data( pos, t, tbins, filename, mask=None,
md = None, nobytes=2,with_pickle=True ):
''' YG.Dev@CHX Nov 19, 2017
Compress the timepixeldata, in a format of x, y, t
x: pos_x in pixel
y: pos_y in pixel
timepix3 det size 256, 256
TODOLIST: mask is not working now
Input:
pos: 256 * x + y #can't be 256*x + y
t: arrival time in sec
filename: the output filename
md: a dict to describle the data info
Return:
N (frame number)
'''
fp = open( filename,'wb' )
if md is None:
md={}
md['beam_center_x'] = 0
md['beam_center_y'] = 0
md['count_time'] = 0
md['detector_distance'] = 0
md['frame_time'] = 0
md['incident_wavelength'] =0
md['x_pixel_size'] = 45
md['y_pixel_size'] = 45
#nobytes = 2
md['sx'] = 256
md['sy'] = 256
#TODList: for different detector using different md structure, March 2, 2017,
#8d include,
#'bytes', 'nrows', 'ncols', (detsize)
#'rows_begin', 'rows_end', 'cols_begin', 'cols_end' (roi)
Header = struct.pack('@16s8d7I916x',b'Version-COMPtpx1',
md['beam_center_x'],md['beam_center_y'], md['count_time'], md['detector_distance'],
md['frame_time'],md['incident_wavelength'], md['x_pixel_size'],md['y_pixel_size'],
nobytes, md['sy'], md['sx'],
0,256,
0,256
)
fp.write( Header)
tx = np.arange( t.min(), t.max(), tbins )
N = len(tx)
imgsum = np.zeros( N-1 )
print('There are %s frames to be compressed...'%N)
good_count = 0
avg_img = np.zeros( [ md['sy'], md['sx'] ], dtype= np.float )
for i in tqdm( range(N-1) ):
ind1 = np.argmin( np.abs( tx[i] - t) )
ind2 = np.argmin( np.abs( tx[i+1] - t ) )
#print( 'N=%d:'%i, ind1, ind2 )
p_i = pos[ind1: ind2]
ps,vs = np.unique( p_i, return_counts= True )
np.ravel(avg_img )[ps] += vs
good_count +=1
dlen = len(ps)
imgsum[i] = vs.sum()
#print(vs.sum())
fp.write( struct.pack( '@I', dlen ))
fp.write( struct.pack( '@{}i'.format( dlen), *ps))
fp.write( struct.pack( '@{}{}'.format( dlen,'ih'[nobytes==2]), *vs))
fp.close()
avg_img /= good_count
#return N -1
if with_pickle:
pkl.dump( [ avg_img, imgsum, N-1 ], open(filename + '.pkl', 'wb' ) )
return avg_img, imgsum, N-1
def compress_timepix_data_old( data_pixel, filename, rois=None,
md = None, nobytes=2 ):
'''
Compress the timepixeldata
md: a dict to describle the data info
rois: [y1,y2, x1, x2]
'''
fp = open( filename,'wb' )
if md is None:
md={}
md['beam_center_x'] = 0
md['beam_center_y'] = 0
md['count_time'] = 0
md['detector_distance'] = 0
md['frame_time'] = 0
md['incident_wavelength'] =0
md['x_pixel_size'] =25
md['y_pixel_size'] =25
#nobytes = 2
md['sx'] = 256
md['sy'] = 256
md['roi_rb']= 0
md['roi_re']= md['sy']
md['roi_cb']= 0
md['roi_ce']= md['sx']
if rois is not None:
md['roi_rb']= rois[2]
md['roi_re']= rois[3]
md['roi_cb']= rois[1]
md['roi_ce']= rois[0]
md['sy'] = md['roi_cb'] - md['roi_ce']
md['sx'] = md['roi_re'] - md['roi_rb']
#TODList: for different detector using different md structure, March 2, 2017,
#8d include,
#'bytes', 'nrows', 'ncols', (detsize)
#'rows_begin', 'rows_end', 'cols_begin', 'cols_end' (roi)
Header = struct.pack('@16s8d7I916x',b'Version-COMPtpx1',
md['beam_center_x'],md['beam_center_y'], md['count_time'], md['detector_distance'],
md['frame_time'],md['incident_wavelength'], md['x_pixel_size'],md['y_pixel_size'],
nobytes, md['sy'], md['sx'],
md['roi_rb'], md['roi_re'],md['roi_cb'],md['roi_ce']
)
fp.write( Header)
fp.write( data_pixel )
class Get_TimePixel_Arrayc(object):
'''
a class to get intested pixels from a images sequence,
load ROI of all images into memory
get_data: to get a 2-D array, shape as (len(images), len(pixellist))
One example:
data_pixel = Get_Pixel_Array( imgsr, pixelist).get_data()
'''
def __init__(self, pos, hitime, tbins, pixelist, beg=None, end=None, norm=None,flat_correction=None,
detx = 256, dety = 256):
'''
indexable: a images sequences
pixelist: 1-D array, interest pixel list
#flat_correction, normalized by flatfield
#norm, normalized by total intensity, like a incident beam intensity
'''
self.hitime = hitime
self.tbins = tbins
self.tx = np.arange( self.hitime.min(), self.hitime.max(), self.tbins )
N = len(self.tx)
if beg is None:
beg = 0
if end is None:
end = N
self.beg = beg
self.end = end
self.length = self.end - self.beg
self.pos = pos
self.pixelist = pixelist
self.norm = norm
self.flat_correction = flat_correction
self.detx = detx
self.dety = dety
def get_data(self ):
'''
To get intested pixels array
Return: 2-D array, shape as (len(images), len(pixellist))
'''
norm = self.norm
data_array = np.zeros([ self.length-1,len(self.pixelist)])
print( data_array.shape)
#fra_pix = np.zeros_like( pixelist, dtype=np.float64)
timg = np.zeros( self.detx * self.dety, dtype=np.int32 )
timg[self.pixelist] = np.arange( 1, len(self.pixelist) + 1 )
n=0
tx = self.tx
N = len(self.tx)
print( 'The Produced Array Length is %d.'%(N-1) )
flat_correction = self.flat_correction
#imgsum = np.zeros( N )
for i in tqdm( range(N-1) ):
ind1 = np.argmin( np.abs( tx[i] - self.hitime ) )
ind2 = np.argmin( np.abs( tx[i+1] - self.hitime ) )
#print( 'N=%d:'%i, ind1, ind2 )
p_i = self.pos[ind1: ind2]
pos,val = np.unique( p_i, return_counts= True )
#print( val.sum() )
w = np.where( timg[pos] )[0]
pxlist = timg[ pos[w] ] -1
#print( val[w].sum() )
#fra_pix[ pxlist] = v[w]
if flat_correction is not None:
#normalized by flatfield
data_array[n][ pxlist] = val[w]
else:
data_array[n][ pxlist] = val[w] / flat_correction[pxlist] #-1.0
if norm is not None:
#normalized by total intensity, like a incident beam intensity
data_array[n][ pxlist] /= norm[i]
n += 1
return data_array
def apply_timepix_mask( x,y,t, roi ):
y1,y2, x1,x2 = roi
w = (x < x2) & (x >= x1) & (y < y2) & (y >= y1)
return x[w],y[w], t[w]
def get_timepixel_data( data_dir, filename, time_unit=6.1e-12):
'''give a csv file of a timepixel data, return x,y,t (in ps/6.1)
x, pos_x in pixel
y, pos_y in pixel
t, arrival time
time_unit, t*time_unit will convert to second, by default is 6.1e-12
return x,y,t (in second, starting from zero)
'''
data = pds.read_csv( data_dir + filename )
#'#Col', ' #Row', ' #ToA',
#return np.array( data['Col'] ), np.array(data['Row']), np.array(data['GlobalTimeFine']) #*6.1 #in ps
x,y,t=np.array( data['#Col'] ), np.array(data[' #Row']), np.array(data[' #ToA'] ) * time_unit
return x,y, t-t.min() #* 25/4096. #in ns
def get_timepixel_data_from_series( data_dir, filename_prefix,
total_filenum = 72, colms = int(1e5) ):
x = np.zeros( total_filenum * colms )
y = np.zeros( total_filenum * colms )
t = zeros( total_filenum * colms )
for n in range( total_filenum):
filename = filename_prefix + '_%s.csv'%n
data = get_timepixel_data( data_dir, filename )
if n!=total_filenum-1:
( x[n*colms: (n+1)*colms ], y[n*colms: (n+1)*colms ], t[n*colms: (n+1)*colms ] )= (
data[0], data[1], data[2])
else:
#print( filename_prefix + '_%s.csv'%n )
ln = len(data[0])
#print( ln )
( x[n*colms: n*colms + ln ], y[n*colms: n*colms + ln ], t[n*colms: n*colms + ln ] )= (
data[0], data[1], data[2])
return x[:n*colms + ln] ,y[:n*colms + ln],t[:n*colms + ln]
def get_timepixel_avg_image( x,y,t, det_shape = [256, 256], delta_time = None ):
'''YG.Dev@CHX, 2016
give x,y, t data to get image in a period of delta_time (in second)
x, pos_x in pixel
y, pos_y in pixel
t, arrival time
'''
t0 = t.min()
tm = t.max()
if delta_time is not None:
delta_time *=1e12
if delta_time > tm:
delta_time = tm
else:
delta_time = t.max()
#print( delta_time)
t_ = t[t<delta_time]
x_ = x[:len(t_)]
y_ = y[:len(t_)]
img = np.zeros( det_shape, dtype= np.int32 )
pixlist = x_*det_shape[0] + y_
his = np.histogram( pixlist, bins= np.arange( det_shape[0]*det_shape[1] +1) )[0]
np.ravel( img )[:] = his
print( 'The max photon count is %d.'%img.max())
return img
def get_his_taus( t, bin_step ):
'''Get taus and histrogram of photons
Parameters:
t: the time stamp of photon hitting the detector
bin_step: bin time step, in unit of ms
Return:
taus, in ms
histogram of photons
'''
bins = np.arange( t.min(), t.max(), bin_step ) #1e6 for us
#print( bins )
td = np.histogram( t, bins=bins )[0] #do histogram
taus = (bins - bins[0])
return taus[1:], td
def get_multi_tau_lags( oned_count, num_bufs=8 ):
n = len( oned_count )
num_levels = int(np.log( n/(num_bufs-1))/np.log(2) +1) +1
tot_channels, lag_steps, dict_lag = multi_tau_lags(num_levels, num_bufs)
return lag_steps[ lag_steps < n ]
def get_timepixel_multi_tau_g2( oned_count, num_bufs=8 ):
n = len( oned_count )
lag_steps = get_multi_tau_lags( oned_count, num_bufs )
g2 = np.zeros( len( lag_steps) )
for tau_ind, tau in enumerate(lag_steps):
IP= oned_count[: n - tau]
IF= oned_count[tau:n ]
#print( IP.shape,IF.shape, tau, n )
g2[tau_ind]= np.dot( IP, IF )/ ( IP.mean() * IF.mean() * float( n - tau) )
return g2
def get_timepixel_c12( oned_count ):
noframes = len( oned_count)
oned_count = oned_count.reshape( noframes, 1 )
return np.dot( oned_count, oned_count.T) / oned_count / oned_count.T / noframes
def get_timepixel_g2( oned_count ):
n = len( oned_count )
norm = ( np.arange( n, 0, -1) *
np.array( [np.average(oned_count[i:]) for i in range( n )] ) *
np.array( [np.average(oned_count[0:n-i]) for i in range( n )] ) )
return np.correlate(oned_count, oned_count, mode = 'full')[-n:]/norm
#########################################
T = True
F = False
def read_xyt_frame( n=1 ):
''' Load the xyt txt files:
x,y is the detector (x,y) coordinates
t is the time-encoder (when hitting the detector at that (x,y))
DATA_DIR is the data filefold path
DataPref is the data prefix
n is file number
the data name will be like: DATA_DIR/DataPref_0001.txt
return the histogram of the hitting event
'''
import numpy as np
ni = '%04d'%n
fp = DATA_DIR + DataPref + '%s.txt'%ni
data = np.genfromtxt( fp, skiprows=0)[:,2] #take the time encoder
td = np.histogram( data, bins= np.arange(11810) )[0] #do histogram
return td
def readframe_series(n=1 ):
''' Using this universe name for all the loading fucntions'''
return read_xyt_frame( n )
class xpcs( object):
def __init__(self):
""" DOCUMENT __init__( )
the initilization of the XPCS class
"""
self.version='version_0'
self.create_time='July_14_2015'
self.author='Yugang_Zhang<PASSWORD>'
def delays(self,time=1,
nolevs=None,nobufs=None, tmaxs=None):
''' DOCUMENT delays(time=)
Using the lev,buf concept, to generate array of time delays
return array of delays.
KEYWORD: time: scale delays by time ( should be time between frames)
nolevs: lev (a integer number)
nobufs: buf (a integer number)
tmax: the max time in the calculation, usually, the noframes
'''
if nolevs is None:nolevs=nolev #defined by the set-up file
if nobufs is None:nobufs=nobuf #defined by the set-up file
if tmaxs is None:tmaxs=tmax #defined by the set-up file
if nobufs%2!=0:print ("nobuf must be even!!!" )
dly=zeros( (nolevs+1)*nobufs/2 +1 )
dict_dly ={}
for i in range( 1,nolevs+1):
if i==1:imin= 1
else:imin=nobufs/2+1
ptr=(i-1)*nobufs/2+ arange(imin,nobufs+1)
dly[ptr]= arange( imin, nobufs+1) *2**(i-1)
dict_dly[i] = dly[ptr-1]
dly*=time
dly = dly[:-1]
dly_ = dly[: where( dly < tmaxs)[0][-1] +1 ]
self.dly=dly
self.dly_=dly_
self.dict_dly = dict_dly
return dly
def make_qlist(self):
''' DOCUMENT make_qlist( )
Giving the noqs, qstart,qend,qwidth, defined by the set-up file
return qradi: a list of q values, [qstart, ...,qend] with length as noqs
qlist: a list of q centered at qradi with qwidth.
KEYWORD: noqs, qstart,qend,qwidth::defined by the set-up file
'''
qradi = linspace(qstart,qend,noqs)
qlist=zeros(2*noqs)
qlist[::2]= round(qradi-qwidth/2) #render even value
qlist[1::2]= round(qradi+(1+qwidth)/2) #render odd value
qlist[::2]= int_(qradi-qwidth/2) #render even value
qlist[1::2]= int_(qradi+(1+qwidth)/2) #render odd value
if qlist_!=None:qlist=qlist_
return qlist,qradi
def calqlist(self, qmask=None , shape='circle' ):
''' DOCUMENT calqlist( qmask=,shape=, )
calculate the equvilent pixel with a shape,
return
qind: the index of q
pixellist: the list of pixle
nopr: pixel number in each q
nopixels: total pixel number
KEYWORD:
qmask, a mask file;
qlist,qradi is calculated by make_qlist()
shape='circle', give a circle shaped qlist
shape='column', give a column shaped qlist
shape='row', give a row shaped qlist
'''
qlist,qradi = self.make_qlist()
y, x = indices( [dimy,dimx] )
if shape=='circle':
y_= y- ceny +1;x_=x-cenx+1
r= int_( hypot(x_, y_) + 0.5 )
elif shape=='column':
r= x
elif shape=='row':
r=y
else:pass
r= r.flatten()
noqrs = len(qlist)
qind = digitize(r, qlist)
if qmask is None:
w_= where( (qind)%2 )# qind should be odd;print 'Yes'
w=w_[0]
else:
a=where( (qind)%2 )[0]
b=where( mask.flatten()==False )[0]
w= intersect1d(a,b)
nopixels=len(w)
qind=qind[w]/2
pixellist= ( y*dimx +x ).flatten() [w]
nopr,bins=histogram( qind, bins= range( len(qradi) +1 ))
return qind, pixellist,nopr,nopixels
###########################################################################
########for one_time correlation function for xyt frames
##################################################################
def autocor_xytframe(self, n):
'''Do correlation for one xyt frame--with data name as n '''
dly_ = xp.dly_
#cal=0
gg2=zeros( len( dly_) )
data = read_xyt_frame( n ) #load data
datm = len(data)
for tau_ind, tau in enumerate(dly_):
IP= data[: datm - tau]
IF= data[tau: datm ]
gg2[tau_ind]= dot( IP, IF )/ ( IP.mean() * IF.mean() * float( datm - tau) )
return gg2
def autocor(self, noframes=10):
'''Do correlation for xyt file,
noframes is the frame number to be correlated
'''
start_time = time.time()
for n in range(1,noframes +1 ): # the main loop for correlator
gg2 = self.autocor_xytframe( n )
if n==1:g2=zeros_like( gg2 )
g2 += ( gg2 - g2 )/ float( n ) #average g2
#print n
if noframes>10: #print progress...
if n %( noframes / 10) ==0:
sys.stdout.write("#")
sys.stdout.flush()
elapsed_time = time.time() - start_time
print ( 'Total time: %.2f min' %(elapsed_time/60.) )
return g2
def plot(self, y,x=None):
'''a simple plot'''
if x is None:x=arange( len(y))
plt.plot(x,y,'ro', ls='-')
plt.show()
def g2_to_pds(self, dly, g2, tscale = None):
'''convert g2 to a pandas frame'''
if len(g2.shape)==1:g2=g2.reshape( [len(g2),1] )
tn, qn = g2.shape
tindex=xrange( tn )
qcolumns = ['t'] + [ 'g2' ]
if tscale is None:tscale = 1.0
g2t = hstack( [dly[:tn].reshape(tn,1) * tscale, g2 ])
g2p = pd.DataFrame(data=g2t, index=tindex,columns=qcolumns)
return g2p
def show(self,g2p,title):
t = g2p.t
N = len( g2p )
ylim = [g2p.g2.min(),g2p[1:N].g2.max()]
g2p.plot(x=t,y='g2',marker='o',ls='--',logx=T,ylim=ylim);
plt.xlabel('time delay, ns',fontsize=12)
plt.title(title)
plt.savefig( RES_DIR + title +'.png' )
plt.show()
def histogram_xyt( x, y, t, binstep=100, detx=256, dety=256 ):
'''x: coordinate-x
y: coordinate-y
t: photon hit time
bin t in binstep (in t unit) period
'''
L= np.max( (t-t[0])//binstep ) + 1
arr = np.ravel_multi_index( [x, y, (t-t[0])//binstep ], [detx, dety,L ] )
M,N = arr.max(),arr.min()
da = np.zeros( [detx, dety, L ] )
da.flat[np.arange(N, M ) ] = np.bincount( arr- N )
return da
######################################################
if False:
xp=xpcs(); #use the xpcs class
dly = xp.delays()
if T:
fnum = 100
g2=xp.autocor( fnum )
filename='g2_-%s-'%(fnum)
save( RES_DIR + FOUT + filename, g2)
##g2= load(RES_DIR + FOUT + filename +'.npy')
g2p = xp.g2_to_pds(dly,g2, tscale = 20)
xp.show(g2p,'g2_run_%s'%fnum)<file_sep>cython
databroker
numpy
matplotlib
scipy
lmfit
pillow
tifffile
pyyaml
pandas
scikit-beam
sip
#pyqt4
git+https://github.com/yugangzhang/eiger-io.git#eiger-io
git+https://github.com/NSLS-II-CHX/chxtools.git#egg=chxtools
git+https://github.com/Nikea/xray-vision.git#Xray-vision
git+https://github.com/Distrotech/reportlab.git#report lab
| eb99767ea2925b8ef2f39aea90d7fc34cb0b4b7c | [
"Markdown",
"Python",
"Text"
] | 27 | Python | yugangzhang/chxanalys | c73fea41e983540eb22fea5969df6db12602db64 | 5e4effe7655d778449fdc2f77540d97990657200 |
refs/heads/master | <repo_name>rivasyafri/PIDController<file_sep>/src/model/SerialConnection.java
/*
* Copyright (C) 2015 <NAME>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package model;
import jssc.SerialPort;
import jssc.SerialPortException;
import jssc.SerialPortTimeoutException;
/**
* SerialConnection is a class to handle serial port used in this program
* @author <NAME>
*/
public class SerialConnection {
private final SerialPort serialPort;
private final int baudRate;
/**
* Create a new instance of serial connection to port
* @param COM selected port names
*/
public SerialConnection(String COM, int baudRate) {
serialPort = new SerialPort(COM);
this.baudRate = baudRate;
}
/**
* Open the serial connection
* @throws SerialPortException exception if device selected is busy or not
* connected
*/
public void open() throws SerialPortException {
serialPort.openPort();
serialPort.setParams(baudRate,8,1,0);
}
/**
* Close the serial connection
* @throws SerialPortException exception if device selected is busy or not
* connected
*/
public void close() throws SerialPortException {
serialPort.closePort();
}
/**
* Read data sent from device
* @return data as String
* @throws SerialPortException exception if device selected is busy or not
* connected
*/
public String readData() throws SerialPortException, SerialPortTimeoutException {
String data = new String(serialPort.readBytes(5, 1000));
String trim = data.trim();
return trim;
}
/**
* Write data to device
* @param action string that want to be written
* @throws SerialPortException exception if device selected is busy or not
* connected
*/
public void writeData(String action) throws SerialPortException {
serialPort.writeBytes(action.getBytes());
}
}
<file_sep>/src/controller/Main.java
/*
* Copyright (C) 2015 RivaSyafri
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package controller;
import java.awt.event.ActionEvent;
import javax.swing.Timer;
import view.Dashboard;
/**
*
* @author RivaSyafri
*/
public class Main {
public static void main(String args[]) {
/* Set the Windows look and feel */
//<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) ">
/* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel.
* For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html
*/
try {
for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) {
if ("Windows".equals(info.getName())) {
javax.swing.UIManager.setLookAndFeel(info.getClassName());
break;
}
}
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException | javax.swing.UnsupportedLookAndFeelException ex) {
java.util.logging.Logger.getLogger(Dashboard.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
}
//</editor-fold>
java.awt.EventQueue.invokeLater(() -> {
Dashboard d = new Dashboard();
d.setVisible(true);
Timer timer = new Timer(1000, (ActionEvent e) -> {
d.updateDashboard();
});
timer.start();
});
}
}
| 185eeaf6a27d44129f5d30dab0cb9b0f14cbd337 | [
"Java"
] | 2 | Java | rivasyafri/PIDController | a803387a77bef1436849564c650e3c92c2808041 | 393b31236e4e64aeaded14e35c8e3d1d471ac7c1 |
refs/heads/master | <repo_name>MASabbe/awilelang<file_sep>/README.md
# awilelang
Aplikasi AWI Realtime Lelang Menggunakan Framework ExpressJS dan Realtime Database Firebase
AWI Realtime Auction Application Using ExpressJS as Server Framework, AngularJS as Client Framework and Firebase as Realtime Database
<file_sep>/functions/index.js
const functions = require('firebase-functions');
const admin = require('firebase-admin');
const express = require('express');
const engines = require('consolidate');
const path = require('path');
const session = require('express-session');
const bodyParser = require('body-parser');
const cookieParser = require('cookie-parser');
const serviceAccount = require('./service_account.json');
const cors = require('cors')({origin: true});
const app = express();
const firebaseApp = admin.initializeApp({
credential: admin.credential.cert(serviceAccount)
});
app.engine('hbs', engines.handlebars);
app.set('views', './views');
app.set('view engine', 'hbs');
app.use(bodyParser.json()); // for parsing application/json
app.use(bodyParser.urlencoded({ extended: true })); // for parsing
app.use(cookieParser());
app.use(cors);
/**/
const handleErrors = (username, err) => {
console.error({User: username}, error);
res.sendStatus(500);
return;
}
const handleRespons = (username, status, body) => {
console.log({User: username}, {
Response: {
Status: status,
Body: body,
},
});
if (body) {
res.status(200).json(body);
return;
}
res.sendStatus(status);
return;
}
/*
const validateFirebaseIdToken = async (req, res, next) => {
console.log('Check if request is authorized with Firebase ID token');
if ((!req.headers.authorization || !req.headers.authorization.startsWith('Bearer ')) &&
!(req.cookies && req.cookies.__session)) {
console.error('No Firebase ID token was passed as a Bearer token in the Authorization header.',
'Make sure you authorize your request by providing the following HTTP header:',
'Authorization: Bearer <Firebase ID Token>',
'or by passing a "__session" cookie.');
res.status(403).send('Unauthorized');
return;
}
let idToken;
if (req.headers.authorization && req.headers.authorization.startsWith('Bearer ')) {
console.log('Found "Authorization" header');
// Read the ID Token from the Authorization header.
idToken = req.headers.authorization.split('Bearer ')[1];
} else if(req.cookies) {
console.log('Found "__session" cookie');
// Read the ID Token from cookie.
idToken = req.cookies.__session;
} else {
// No cookie
res.status(403).send('Unauthorized');
return;
}
try {
const decodedIdToken = await admin.auth().verifyIdToken(idToken);
console.log('ID Token correctly decoded', decodedIdToken);
req.user = decodedIdToken;
next();
} catch (error) {
console.error('Error while verifying Firebase ID token:', error);
res.status(403).send('Unauthorized');
}
};
*/
app.get('/admin/', (req, res)=>{
res.set('Cache-Control', 'public, max-age=300, s-maxage=600');
res.render('index')
});
app.post('/admin/', (req, res)=>{
console.log('post login '+req.body.uid);
var UID = req.body.uid;
var additional = {administrator : true};
admin.auth().createCustomToken(UID, additional)
.then(function(customToken) {
// Send token back to client
})
.catch(function(error) {
console.log("Error creating custom token:", error);
});
});
app.get('/admin/home', (req, res)=>{
res.set('Cache-Control', 'public, max-age=300, s-maxage=600');
res.render('dashboard');
});
app.get('/', (req, res)=>{
res.set('Cache-Control', 'public, max-age=300, s-maxage=600');
res.render('home');
});
exports.app = functions.https.onRequest(app);
<file_sep>/public/javascripts/admin.js
(function () {
'usestrict';
var admin = angular.module("myAdmin", ["ngRoute"]);
admin.config(["$routeProvider", "$locationProvider", function ($routeProvider, $locationProvider) {
$routeProvider.when('/admin/', {
templateUrl: "/admin/index.html";
})
}]);
})();
| 20e154e9722b602242ad4b3049144bc75601440e | [
"Markdown",
"JavaScript"
] | 3 | Markdown | MASabbe/awilelang | fddfa7eaaccfcbfa6438fc9102e1c2e38a649628 | 5c4896656ade92c4f8f05a4998685c8afb6736c7 |
refs/heads/master | <repo_name>aceleradev-java/desafio-aceleradev-java-criando-entidade-banco-dados<file_sep>/src/main/java/com/challenge/entity/Challenge.java
package com.challenge.entity;
import java.io.Serializable;
import java.sql.Timestamp;
import java.util.Set;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EntityListeners;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.OneToMany;
import javax.persistence.Table;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import org.springframework.data.annotation.CreatedDate;
import org.springframework.data.jpa.domain.support.AuditingEntityListener;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
@Entity
@EntityListeners(AuditingEntityListener.class)
@Table(name = "challenge")
@Getter
@Setter@EqualsAndHashCode
@ToString(exclude = {"accelerations", "submissions"})
public class Challenge implements Serializable {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(strategy=GenerationType.SEQUENCE)
private Integer id;
@Column(length=100)
@NotNull
@Size(max = 100)
private String name;
@Column(length=50)
@NotNull
@Size(max = 50)
private String slug;
@Column(updatable = false)
@CreatedDate
private Timestamp createdAt;
@OneToMany(cascade = CascadeType.ALL, orphanRemoval = true)
@JoinColumn(name = "challenge_id")
private Set<Submission> submissions;
@OneToMany(mappedBy = "challenge", cascade = CascadeType.ALL, orphanRemoval = true)
private Set<Acceleration> accelerations;
public Challenge() {
}
public Challenge(Integer id, String name, String slug, Timestamp createdAt) {
this.id = id;
this.name = name;
this.slug = slug;
this.createdAt = createdAt;
}
}
<file_sep>/README.md
# Criando Entidades de Banco de Dados em Java (ORM Java)
<p>
<img alt="GitHub top language" src="https://img.shields.io/github/languages/top/aceleradev-java/desafio-aceleradev-java-criando-entidade-banco-dados">
<a href="https://github.com/aceleradev-java/desafio-aceleradev-java-criando-entidade-banco-dados">
<img alt="Made by" src="https://img.shields.io/badge/made%20by-adriano%20avelino-gree">
</a>
<img alt="Repository size" src="https://img.shields.io/github/repo-size/aceleradev-java/desafio-aceleradev-java-criando-entidade-banco-dados">
<a href="https://github.com/EliasGcf/readme-template/commits/master">
<img alt="GitHub last commit" src="https://img.shields.io/github/last-commit/aceleradev-java/desafio-aceleradev-java-criando-entidade-banco-dados">
</a>
</p>
O Aceleradev é um treinamento da Codenation com duração de 10 semanas de imersão em programação, no meu caso, Java online. Esse é o desafio do - quarto módulo que tem como conteúdo:
- Criação de banco de dados
- Modelagem de banco de dados
- JPA e Hibernate
## Desafio (Criando Entidades de Banco de Dados em Java)
Este desafio contém um projeto Maven pré-configurado com Springboot, Spring-JPA e banco de dados H2.
Abaixo você encontra um Modelo Entidade Relacionamento de um banco de dados utilizado pela Code Nation.
[Clique aqui para visualizar o modelo de entidade relacionamento](https://codenation-challenges.s3-us-west-1.amazonaws.com/java-9/codenation-sample.png)
Você utilizará o package `entity` para criar as entidades JPA para o modelo proposto.
Informações adicionais:
- O projeto deve `buildar` e `iniciar` sem erros
- Verifique os *logs* para ter certeza de que suas tabelas foram criadas corretamente
- Utilize o comando `mvnw spring-boot:run` para iniciar a aplicação
O que será avaliado:
- Colunas não nulas
- Tamanho das colunas
- Relacionamento entre tabelas (bidirecionalmente)
- Nome de tabelas e colunas
<file_sep>/src/main/java/com/challenge/entity/Acceleration.java
package com.challenge.entity;
import java.io.Serializable;
import java.sql.Timestamp;
import java.util.Set;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EntityListeners;
import javax.persistence.ForeignKey;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import javax.persistence.Table;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import org.springframework.data.annotation.CreatedDate;
import org.springframework.data.jpa.domain.support.AuditingEntityListener;
import lombok.Getter;
import lombok.Setter;
@Entity
@EntityListeners(AuditingEntityListener.class)
@Table(name = "acceleration")
@Getter
@Setter
public class Acceleration implements Serializable {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE)
private Integer id;
@Column(length = 100)
@NotNull
@Size(max = 100)
private String name;
@Column(length = 50)
@NotNull
@Size(max = 50)
private String slug;
@Column(updatable = false)
@CreatedDate
private Timestamp createdAt;
@ManyToOne
@JoinColumn(name = "challenge_id",
foreignKey = @ForeignKey
)
private Challenge challenge;
@OneToMany(cascade = CascadeType.ALL, orphanRemoval = true)
@JoinColumn(name = "acceleration_id")
private Set<Candidate> candidates ;
public Acceleration() {
}
public Acceleration(Integer id, String name, String slug, Timestamp createdAt) {
this.id = id;
this.name = name;
this.slug = slug;
this.createdAt = createdAt;
}
}
| 0c0eab7c348aa9209241bd6a69e64488c88d5d02 | [
"Markdown",
"Java"
] | 3 | Java | aceleradev-java/desafio-aceleradev-java-criando-entidade-banco-dados | f8fcfab2bb5921d48c1be795c37b1887facb1449 | 10e1cb8fc989241cc143e18e0cc6d963f9e7281f |
refs/heads/master | <repo_name>rezafahlavi/perpus-latihan<file_sep>/src/dataperpus/login.java
package dataperpus;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import javax.swing.JOptionPane;
public class login extends javax.swing.JFrame {
public login() {
initComponents();
setLocationRelativeTo(null);
setTitle("Menu Login");
}
public void validasi(String username, String pass){
String query = "SELECT * FROM tblogin WHERE username = ? AND pass = ?";
PreparedStatement statement;
Connection connection;
try {
connection = koneksinya.GetConnection();
statement = connection.prepareStatement(query);
statement.setString(1, username);
statement.setString(2, pass);
ResultSet rs = statement.executeQuery();
if (rs.next()) {
this.dispose();
new menu().setVisible(true);
} else {
JOptionPane.showMessageDialog(this, "Username dan Password tidak valid");
}
} catch (SQLException err) {
JOptionPane.showMessageDialog(this, "Terjadi kesalahan : " + err.getMessage());
}
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jPanel1 = new javax.swing.JPanel();
jLabel3 = new javax.swing.JLabel();
jLabel1 = new javax.swing.JLabel();
jLabel2 = new javax.swing.JLabel();
txtname = new javax.swing.JTextField();
txtpass = new javax.swing.JPasswordField();
btnlogin = new javax.swing.JButton();
setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE);
jPanel1.setBackground(new java.awt.Color(32, 214, 51));
jPanel1.setBorder(new javax.swing.border.MatteBorder(null));
jLabel3.setText("Menu Login");
jLabel1.setText("Username");
jLabel2.setText("Password");
txtname.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
txtnameActionPerformed(evt);
}
});
btnlogin.setText("Login");
btnlogin.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnloginActionPerformed(evt);
}
});
javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1);
jPanel1.setLayout(jPanel1Layout);
jPanel1Layout.setHorizontalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addGap(59, 59, 59)
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jLabel2)
.addComponent(jLabel1))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 31, Short.MAX_VALUE)
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(btnlogin, javax.swing.GroupLayout.PREFERRED_SIZE, 78, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(txtpass, javax.swing.GroupLayout.PREFERRED_SIZE, 140, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(txtname, javax.swing.GroupLayout.PREFERRED_SIZE, 140, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGap(67, 67, 67))
.addGroup(jPanel1Layout.createSequentialGroup()
.addGap(140, 140, 140)
.addComponent(jLabel3)
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
jPanel1Layout.setVerticalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.addComponent(jLabel3)
.addGap(41, 41, 41)
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel1)
.addComponent(txtname, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGap(40, 40, 40)
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(txtpass, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel2))
.addGap(18, 18, 18)
.addComponent(btnlogin)
.addContainerGap(70, Short.MAX_VALUE))
);
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jPanel1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jPanel1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
);
pack();
}// </editor-fold>//GEN-END:initComponents
private void btnloginActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnloginActionPerformed
String user = txtname.getText();
String pass = String.valueOf(txtpass.getPassword());
if(user.trim().equals("")){
JOptionPane.showMessageDialog(this, "Username Tidak Boleh KOSONG");
} else if(pass.trim().equals("")){
JOptionPane.showMessageDialog(this, "Password T<PASSWORD>");
} else {
validasi(user, pass);
}
}//GEN-LAST:event_btnloginActionPerformed
private void txtnameActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_txtnameActionPerformed
// TODO add your handling code here:
}//GEN-LAST:event_txtnameActionPerformed
/**
* @param args the command line arguments
*/
public static void main(String args[]) {
/* Set the Nimbus look and feel */
//<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) ">
/* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel.
* For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html
*/
try {
for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) {
if ("Nimbus".equals(info.getName())) {
javax.swing.UIManager.setLookAndFeel(info.getClassName());
break;
}
}
} catch (ClassNotFoundException ex) {
java.util.logging.Logger.getLogger(login.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (InstantiationException ex) {
java.util.logging.Logger.getLogger(login.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (IllegalAccessException ex) {
java.util.logging.Logger.getLogger(login.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (javax.swing.UnsupportedLookAndFeelException ex) {
java.util.logging.Logger.getLogger(login.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
}
//</editor-fold>
/* Create and display the form */
java.awt.EventQueue.invokeLater(new Runnable() {
public void run() {
new login().setVisible(true);
}
});
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton btnlogin;
private javax.swing.JLabel jLabel1;
private javax.swing.JLabel jLabel2;
private javax.swing.JLabel jLabel3;
private javax.swing.JPanel jPanel1;
private javax.swing.JTextField txtname;
private javax.swing.JPasswordField txtpass;
// End of variables declaration//GEN-END:variables
}
<file_sep>/src/dataperpus/koneksinya.java
package dataperpus;
import com.mysql.jdbc.Driver;
import java.sql.DriverManager;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
/**
*
// * @author Reza
*/
public class koneksinya {
private static Connection koneksi;
public static Connection GetConnection()throws SQLException{
if (koneksi==null){
new Driver();
koneksi=DriverManager.getConnection("jdbc:mysql://localhost:3306/databaseperpus","root","");
}
return koneksi;
}
}<file_sep>/nbproject/private/private.properties
compile.on.save=true
user.properties.file=/home/reza/.netbeans/8.2/build.properties
<file_sep>/src/dataperpus/Tbbuku.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package dataperpus;
import java.beans.PropertyChangeListener;
import java.beans.PropertyChangeSupport;
import java.io.Serializable;
import javax.persistence.Basic;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.NamedQueries;
import javax.persistence.NamedQuery;
import javax.persistence.Table;
import javax.persistence.Transient;
/**
*
* @author reza
*/
@Entity
@Table(name = "tbbuku", catalog = "databaseperpus", schema = "")
@NamedQueries({
@NamedQuery(name = "Tbbuku.findAll", query = "SELECT t FROM Tbbuku t")
, @NamedQuery(name = "Tbbuku.findBySeri", query = "SELECT t FROM Tbbuku t WHERE t.seri = :seri")
, @NamedQuery(name = "Tbbuku.findByJudul", query = "SELECT t FROM Tbbuku t WHERE t.judul = :judul")
, @NamedQuery(name = "Tbbuku.findByPenerbit", query = "SELECT t FROM Tbbuku t WHERE t.penerbit = :penerbit")})
public class Tbbuku implements Serializable {
@Transient
private PropertyChangeSupport changeSupport = new PropertyChangeSupport(this);
private static final long serialVersionUID = 1L;
@Id
@Basic(optional = false)
@Column(name = "Seri")
private String seri;
@Basic(optional = false)
@Column(name = "Judul")
private String judul;
@Basic(optional = false)
@Column(name = "Penerbit")
private String penerbit;
public Tbbuku() {
}
public Tbbuku(String seri) {
this.seri = seri;
}
public Tbbuku(String seri, String judul, String penerbit) {
this.seri = seri;
this.judul = judul;
this.penerbit = penerbit;
}
public String getSeri() {
return seri;
}
public void setSeri(String seri) {
String oldSeri = this.seri;
this.seri = seri;
changeSupport.firePropertyChange("seri", oldSeri, seri);
}
public String getJudul() {
return judul;
}
public void setJudul(String judul) {
String oldJudul = this.judul;
this.judul = judul;
changeSupport.firePropertyChange("judul", oldJudul, judul);
}
public String getPenerbit() {
return penerbit;
}
public void setPenerbit(String penerbit) {
String oldPenerbit = this.penerbit;
this.penerbit = penerbit;
changeSupport.firePropertyChange("penerbit", oldPenerbit, penerbit);
}
@Override
public int hashCode() {
int hash = 0;
hash += (seri != null ? seri.hashCode() : 0);
return hash;
}
@Override
public boolean equals(Object object) {
// TODO: Warning - this method won't work in the case the id fields are not set
if (!(object instanceof Tbbuku)) {
return false;
}
Tbbuku other = (Tbbuku) object;
if ((this.seri == null && other.seri != null) || (this.seri != null && !this.seri.equals(other.seri))) {
return false;
}
return true;
}
@Override
public String toString() {
return "sementara.Tbbuku[ seri=" + seri + " ]";
}
public void addPropertyChangeListener(PropertyChangeListener listener) {
changeSupport.addPropertyChangeListener(listener);
}
public void removePropertyChangeListener(PropertyChangeListener listener) {
changeSupport.removePropertyChangeListener(listener);
}
}
<file_sep>/README.md
# perpus-latihan
latihan repository
<p><marquee>
tes tesssssssssssssssssss</marquee></p>
| 11c0de315beafda6b24d6fb26854e7530e00570e | [
"Markdown",
"Java",
"INI"
] | 5 | Java | rezafahlavi/perpus-latihan | 809c77e539bf1a3b96bdfae0c07fe69eaa1857f4 | e880c30eabd18ca1328877c4f165dc5ebcefbbb5 |
refs/heads/master | <repo_name>akosma/Notitas<file_sep>/Classes/Helpers/MNONotifications.h
//
// MNONotifications.h
// Notitas
//
// Created by Adrian on 9/21/11.
// Copyright 2011 akosma software. All rights reserved.
//
extern NSString * const MNOChangeColorNotification;
extern NSString * const MNOChangeFontNotification;
extern NSString * const MNOCoreDataManagerNoteImportedNotification;
<file_sep>/Classes/Helpers/MNOFontCode.h
//
// MNOFontCode.h
// Notitas
//
// Created by Adrian on 7/23/09.
// Copyright 2009 akosma software. All rights reserved.
//
typedef enum
{
MNOFontCodeHelvetica = 0,
MNOFontCodeTimes = 1,
MNOFontCodeCourier = 2,
MNOFontCodeComic = 3
} MNOFontCode;
static NSString *fontNameForCode(MNOFontCode code)
{
NSString *result = @"Helvetica";
switch (code)
{
case MNOFontCodeHelvetica:
result = @"Helvetica";
break;
case MNOFontCodeTimes:
result = @"TimesNewRomanPSMT";
break;
case MNOFontCodeCourier:
result = @"Courier";
break;
case MNOFontCodeComic:
result = @"MarkerFelt-Thin";
break;
default:
break;
}
return result;
}
<file_sep>/README.markdown
Notitas
=======
[Notitas][1] is a simple post-it application by [akosma software][2] for
iOS devices.
It is currently an iPhone-only app, currently being upgraded to iOS 5 as
a Universal binary.
Resources
---------
The corkboard texture is taken from [DeviantArt][3].
[1]:http://muchasnotitas.com/
[2]:http://akosma.com/
[3]:http://enchantedgal-stock.deviantart.com/art/Corkboard-Wood-Cork-Composite-49823242
<file_sep>/Classes/Helpers/MNOColorCode.h
//
// MNOColorCode.h
// Notitas
//
// Created by Adrian on 7/22/09.
// Copyright 2009 akosma software. All rights reserved.
//
typedef enum
{
MNOColorCodeYellow = 0,
MNOColorCodeGreen = 1,
MNOColorCodeRed = 2,
MNOColorCodeBlue = 3
} MNOColorCode;
| d5e3bf97683e36cc73bc2fcae2b719d066f95d15 | [
"Markdown",
"C"
] | 4 | C | akosma/Notitas | 37ecd54718a95537a26cfc2b26a0091dbbc50234 | 8809bc77b521ab2341c79fcc9dfd5316812ca056 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.