branch_name stringclasses 149 values | text stringlengths 23 89.3M | directory_id stringlengths 40 40 | languages listlengths 1 19 | num_files int64 1 11.8k | repo_language stringclasses 38 values | repo_name stringlengths 6 114 | revision_id stringlengths 40 40 | snapshot_id stringlengths 40 40 |
|---|---|---|---|---|---|---|---|---|
refs/heads/main | <file_sep>#!/usr/bin/env python
import os
import sys
from subprocess import call
from io import BytesIO
import rospy
import rosgraph
from std_msgs.msg import String
from sensor_msgs.msg import Image as ROSImage
from ros_picam.srv import *
from time import sleep
from datetime import datetime
from picamera import PiCamera
from PIL import Image
import cv2
from cv_bridge import CvBridge
import numpy as np
import taskboard_detection as tb
pub = rospy.Publisher("picam_output",ROSImage,queue_size=10)
def setup_camera(label,w=1920,h=1080,fps=30,rotation=0,iso=500):
camera = PiCamera(resolution=(w, h), framerate=fps)
# Set ISO to the desired value
camera.iso = iso
camera.rotation = rotation
#camera.annotate_text = label
# Wait for the automatic gain control to settle
sleep(2)
# Now fix the values
camera.shutter_speed = camera.exposure_speed
camera.exposure_mode = 'off'
g = camera.awb_gains
camera.awb_mode = 'off'
camera.awb_gains = g
return camera
def get_time_string(include_date=False):
dt = datetime.now()
dt_string = ""
date = "{:02}-{:02}-{:02}".format(dt.month,dt.day,dt.year-2000)
time = "{:02}-{:02}-{:02}-{:06}".format(dt.hour,dt.minute,dt.second,dt.microsecond)
if include_date:
dt_string = "{}--{}".format(date,time)
else:
dt_string = time
return dt_string
def grab_still(req):
success = False
label = 'grab' # make configurable?
num = req.number
if num < 1:
num = 1
rospy.loginfo("GRAB STILL: {}".format(num))
for i in range(0,num):
stream = BytesIO()
camera.capture(stream, format='jpeg')
# "Rewind" the stream to the beginning so we can read its content
stream.seek(0)
image = Image.open(stream)
filename = '{}-{}-{}.jpg'.format(NODE_NAME,label,get_time_string())
image.save(filename)
rospy.loginfo("{} grabbing still {}/{} -> {}".format(NODE_NAME,i+1,num,filename))
success = True
print("")
return GrabStillResponse(success)
def grab_taskboard(req):
success = False
publish = True
savelocal = False
label = 'taskboard' # make configurable?
num = req.number
if num < 1:
num = 1
rospy.loginfo("GRAB TASKBOARD: {}".format(num))
for i in range(0,num):
# Start capture and convert to numpy array
stream = BytesIO()
camera.capture(stream, format='jpeg')
data = np.fromstring(stream.getvalue(), dtype=np.uint8)
np_image = cv2.imdecode(data, 1)
np_image = np_image[:, :, ::-1]
# Extract taskboard from image
taskboard = tb.process_taskboard(np_image,120)
if publish:
bridge = CvBridge()
image_message = bridge.cv2_to_imgmsg(taskboard, "bgr8")
image_message.header.frame_id = NODE_NAME
#pub = rospy.Publisher("picam_output",ROSImage,500)
pub.publish(image_message)
rospy.loginfo("Taskboard published.")
if savelocal:
# Save warped taskboard image
filename = '{}-{}-{}.png'.format(NODE_NAME,label,get_time_string())
image = Image.fromarray(taskboard)
image.save(filename)
rospy.loginfo("{} grabbing taskboard {}/{} -> {}".format(NODE_NAME,i+1,num,filename))
success = True
print("")
return GrabStillResponse(success)
def start_recording(req):
global recording, camera
recording_time = "INDEFINITE" if req.seconds < 1 else "{} seconds".format(req.seconds)
rospy.loginfo("START RECORDING")
rospy.loginfo("Length: {}".format(recording_time))
rospy.loginfo("Resolution: {}".format(camera.resolution))
rospy.loginfo("FPS: {}".format(camera.framerate))
success = False
if not recording:
label = "rec" # make configurable?
filename = "/video/{}-{}-{}.h264".format(NODE_NAME,label,get_time_string())
recording = True
camera.start_recording(SAVE_DIR+filename,format='h264')
if req.seconds > 0:
# Maybe implement timing manually
camera.wait_recording(req.seconds)
camera.stop_recording()
recording = False
rospy.loginfo("Done recording.")
success = True
else:
rospy.logwarn("Node is already recording.")
print("")
return StartRecordingResponse(success)
def stop_recording(req):
rospy.loginfo("STOP RECORDING")
success = False
global recording, camera
if not recording:
rospy.logwarn("Node was not recording.")
else:
rospy.loginfo("Recording has been stopped.")
camera.stop_recording()
recording = False
success = True
print("")
return StopRecordingResponse(success)
def picam_client():
global NODE_NAME, SAVE_DIR
rospy.init_node(NODE_NAME)
os.chdir(SAVE_DIR)
#Get ROS params
enable_video = rospy.get_param("~enable_video")
cam_w = rospy.get_param("~im_width")
cam_h = rospy.get_param("~im_height")
cam_fps = rospy.get_param("~fps")
cam_iso = rospy.get_param("~iso")
cam_rotation = rospy.get_param("~rotation")
rospy.loginfo("NODE_NAME: \"{}\"".format(NODE_NAME))
rospy.loginfo("SAVE_DIR: \"{}\"".format(SAVE_DIR))
rospy.loginfo("Video:{} Dim:{}x{} FPS:{} Rotation:{}".format(enable_video,cam_w,cam_h,cam_fps,cam_rotation))
global recording, camera
recording = False
camera = setup_camera(NODE_NAME,cam_w,cam_h,cam_fps,cam_rotation,cam_iso)
rospy.loginfo("Camera initialized.")
# TODO: load parameters
srv1 = rospy.Service(NODE_NAME+'/grab_still', GrabStill, grab_still)
srv2 = rospy.Service(NODE_NAME+'/grab_taskboard', GrabStill, grab_taskboard)
if enable_video:
srv3 = rospy.Service(NODE_NAME+'/start_recording', StartRecording, start_recording)
srv4 = rospy.Service(NODE_NAME+'/stop_recording', StopRecording, stop_recording)
rospy.loginfo("Services ready.\n")
rate = rospy.Rate(1)
while not rospy.is_shutdown():
master_online = rosgraph.is_master_online()
if not master_online:
rospy.logwarn("Master has gone offline. Shutting down.")
rospy.signal_shutdown("Master offline.")
rate.sleep()
# rospy.spin()
pass
if __name__ == '__main__':
# Try setting global variables
global NODE_NAME, SAVE_DIR
try:
NODE_NAME = sys.argv[len(sys.argv)-2][8:]
# Expects save_dir to be sent as first/only argument from launch file [DELICATE]
SAVE_DIR = sys.argv[1]
except:
rospy.logwarn("Something went wrong with arguments. Using Defaults.")
NODE_NAME = 'picam1'
SAVE_DIR = '/home/ubuntu/catkin_ws/src/ros_picam/captures/'
# rospy.loginfo("NODE_NAME set to \"{}\"".format(NODE_NAME))
# rospy.loginfo("SAVE_DIR set to \"{}\"".format(SAVE_DIR))
picam_client()
print("\n\nQuitting.")
<file_sep>#!/usr/bin/env python
import os
import sys
from subprocess import call
from io import BytesIO
import rospy
import rosgraph
from std_msgs.msg import String
from sensor_msgs.msg import Image as ROSImage
from ros_picam.srv import *
from time import sleep
from datetime import datetime
from picamera import PiCamera
from PIL import Image
import cv2
from cv_bridge import CvBridge
import numpy as np
import taskboard_detection as tb
class picam_client:
def __init__(self):
# ROS Node Setup
self.name = sys.argv[len(sys.argv)-2][8:]
rospy.init_node(self.name)
rospy.loginfo("NODE_NAME: \"{}\"".format(self.name))
# Output Parameters
self.save_local = rospy.get_param("~save_local")
self.save_dir = rospy.get_param("~save_dir")
self.publish = rospy.get_param("~publish")
self.publish_topic = rospy.get_param("~publish_topic")
str = "Save Local: {} ".format(self.save_local)
if (self.save_local):
str = str + "Directory: {}".format(self.save_dir)
os.chdir(self.save_dir)
rospy.loginfo(str)
str = "Publish: {} ".format(self.publish)
if (self.publish):
str = str + "Topic: {}".format(self.publish_topic)
self.pub = rospy.Publisher(self.publish_topic,ROSImage,queue_size=100)
rospy.loginfo(str)
# Camera Parameters
self.enable_video = rospy.get_param("~enable_video", True)
self.cam_w = rospy.get_param("~im_width", 1920)
self.cam_h = rospy.get_param("~im_height", 1080)
self.cam_fps = rospy.get_param("~fps", 30)
self.cam_brightness = rospy.get_param("~brightness", 50)
self.cam_contrast = rospy.get_param("~contrast", 0)
self.cam_iso = rospy.get_param("~iso", 200)
self.cam_rotation = rospy.get_param("~rotation", 0)
# Create PiCamera object
self.camera = self.setup_camera()
rospy.loginfo("Camera initialized.")
# Setup Services
rospy.loginfo("Enable Video Services: {}".format(self.enable_video))
srv1 = rospy.Service(self.name+'/grab_still', GrabStill, self.grab_still)
srv2 = rospy.Service(self.name+'/grab_taskboard', GrabStill, self.grab_taskboard)
if self.enable_video:
srv3 = rospy.Service(self.name+'/start_recording', StartRecording, self.start_recording)
srv4 = rospy.Service(self.name+'/stop_recording', StopRecording, self.stop_recording)
rospy.loginfo("Services ready.\n")
def get_time_string(self,include_date=False):
dt = datetime.now()
dt_string = ""
date = "{:02}-{:02}-{:02}".format(dt.month,dt.day,dt.year-2000)
time = "{:02}-{:02}-{:02}-{:06}".format(dt.hour,dt.minute,dt.second,dt.microsecond)
if include_date:
dt_string = "{}--{}".format(date,time)
else:
dt_string = time
return dt_string
def grab_still(self,req):
success = False
label = 'grab' # make configurable?
num = req.number
if num < 1:
num = 1
rospy.loginfo("GRAB STILL: {}".format(num))
for i in range(0,num):
rospy.loginfo("{} grabbing still {}/{}".format(self.name,i+1,num))
# Start capture and convert to numpy array
stream = BytesIO()
self.camera.capture(stream, format='jpeg')
data = np.fromstring(stream.getvalue(), dtype=np.uint8)
np_image = cv2.imdecode(data, 1)
np_image = np_image[:, :, ::-1]
if self.publish:
# Publish still image
bridge = CvBridge()
image_message = bridge.cv2_to_imgmsg(np_image, "bgr8")
image_message.header.frame_id = self.name
self.pub.publish(image_message)
rospy.loginfo("{} Image Published to \"{}\"".format(self.name,self.publish_topic))
if self.save_local:
# Save still image
filename = '{}-{}-{}.png'.format(self.name,label,self.get_time_string())
image = Image.fromarray(np_image)
image.save(filename)
rospy.loginfo("{} Image Saved -> {}".format(self.name,filename))
success = True
print("")
return GrabStillResponse(success)
def grab_taskboard(self,req):
success = False
label = 'taskboard' # make configurable?
num = req.number
if num < 1:
num = 1
rospy.loginfo("GRAB TASKBOARD: {}".format(num))
for i in range(0,num):
rospy.loginfo("{} grabbing taskboard {}/{}".format(self.name,i+1,num))
# Start capture and convert to numpy array
stream = BytesIO()
self.camera.capture(stream, format='jpeg')
data = np.fromstring(stream.getvalue(), dtype=np.uint8)
np_image = cv2.imdecode(data, 1)
np_image = np_image[:, :, ::-1]
# Extract taskboard from image
taskboard = tb.process_taskboard(np_image,80)
if self.publish:
# Publish taskboard image
bridge = CvBridge()
image_message = bridge.cv2_to_imgmsg(taskboard, "bgr8")
image_message.header.frame_id = self.name
self.pub.publish(image_message)
rospy.loginfo("{} Taskboard Published to \"{}\"".format(self.name,self.publish_topic))
if self.save_local:
# Save warped taskboard image
filename = '{}-{}-{}.png'.format(self.name,label,self.get_time_string())
image = Image.fromarray(taskboard)
image.save(filename)
rospy.loginfo("{} Taskboard Saved -> {}".format(self.name,filename))
success = True
print("")
return GrabStillResponse(success)
def start_recording(self,req):
recording_time = "INDEFINITE" if req.seconds < 1 else "{} seconds".format(req.seconds)
rospy.loginfo("START RECORDING")
rospy.loginfo("Length: {}".format(recording_time))
rospy.loginfo("Resolution: {}".format(self.camera.resolution))
rospy.loginfo("FPS: {}".format(self.camera.framerate))
success = False
if not self.camera.recording:
label = "rec" # make configurable?
filename = "/video/{}-{}-{}.h264".format(self.name,label,self.get_time_string())
recording = True
self.camera.start_recording(self.save_dir+filename,format='h264')
if req.seconds > 0:
# Maybe implement timing manually
self.camera.wait_recording(req.seconds)
self.camera.stop_recording()
recording = False
rospy.loginfo("Done recording.")
success = True
else:
rospy.logwarn("Node is already recording.")
print("")
return StartRecordingResponse(success)
def stop_recording(self,req):
rospy.loginfo("STOP RECORDING")
success = False
if not self.camera.recording:
rospy.logwarn("Node was not recording.")
else:
rospy.loginfo("Recording has been stopped.")
self.camera.stop_recording()
recording = False
success = True
print("")
return StopRecordingResponse(success)
def setup_camera(self):
# There are more PiCamera Parameters that can be implemented if needed.
# Docs here: https://picamera.readthedocs.io/en/release-1.10/api_camera.html
self.enable_video = rospy.get_param("~enable_video", True)
self.cam_w = rospy.get_param("~im_width", 1920)
self.cam_h = rospy.get_param("~im_height", 1080)
self.cam_fps = rospy.get_param("~fps", 30)
self.cam_brightness = rospy.get_param("~brightness", 50)
self.cam_contrast = rospy.get_param("~contrast", 0)
self.cam_iso = rospy.get_param("~iso", 200)
self.cam_rotation = rospy.get_param("~rotation", 0)
rospy.loginfo("Creating PiCamera object...")
rospy.loginfo("Dim: {}x{} FPS: {}".format(self.cam_w,self.cam_h,self.cam_fps))
rospy.loginfo("Brightness: {} Contrast: {}".format(self.cam_brightness,self.cam_contrast))
rospy.loginfo("ISO: {} Rotation: {}".format(self.cam_iso,self.cam_rotation))
camera = PiCamera(resolution=(self.cam_w, self.cam_h), framerate=self.cam_fps)
camera.brightness = self.cam_brightness
camera.contrast = self.cam_contrast
camera.iso = self.cam_iso
camera.rotation = self.cam_rotation
# Wait for the automatic gain control to settle
sleep(2)
# Now fix the values
camera.shutter_speed = camera.exposure_speed
camera.exposure_mode = 'off'
g = camera.awb_gains
camera.awb_mode = 'off'
camera.awb_gains = g
return camera
if __name__ == '__main__':
# Create Picam Client
client = picam_client()
rate = rospy.Rate(1)
while not rospy.is_shutdown():
master_online = rosgraph.is_master_online()
if not master_online:
rospy.logwarn("Master has gone offline. Shutting down.")
rospy.signal_shutdown("Master offline.")
rate.sleep()
client.camera.close()
print("Camera object closed.")
print("\n\nQuitting.")
<file_sep># ROS RASPBERRY PI CAMERAS (ROS_PICAM)
A package that enables the usage of Raspberry Pi's using camera module V2 as ROS connected network cameras. Raspberry Pi setup detailed below. The Picams can be set up to launch the client automatically and wait for a master roscore to come online. The client continually checks that the master is running, and if not, it will shut itself down and continually try to restart itself. This allows for a virtually hands off approach, all you have to do is turn the Raspberry Pi on and wait for everything to start.(Automatic startup not yet detailed) The client uses ROS services to tell the camera to take still images or record video. Files are saved locally on the Pi and can be transferred elsewhere via FTP or some other means.
### Raspberry Pi Setup
#### Step 1: Install Ubiquity Robotics Raspberry Pi ROS Image
https://downloads.ubiquityrobotics.com/pi.html
Any Raspberry Pi with a working ROS installation should be able to work, but this image comes with everything already installed which makes the process a lot easier.
#### Step 2: Network / Hostname setup
Log into the Pi and connect to your desired network. If you intend on using multiple cameras, you should also set a unique hostname for each Pi.
[Ubiquity Network Setup Tutorial](https://learn.ubiquityrobotics.com/connect_network)
```
pifi set-hostname <hostname>
pifi add <ssid> <password>
```
#### Step 3: Setup ROS_MASTER_URI
I tend to add the ROS_MASTER_URI export line to the bottom of my ~/.bashrc so you don't need to type it repeatedly.
[ROS Network Setup](http://wiki.ros.org/ROS/NetworkSetup)
[ROS Tutorial: Multiple Machines](http://wiki.ros.org/ROS/Tutorials/MultipleMachines)
#### Step 4: Clone this repository to the Pi's ROS workspace
**NOTE:** Ubiquity Robotics also has a [raspicam_node repository](https://github.com/UbiquityRobotics/raspicam_node) that will stream camera data over ROS topics. Their repository might also be useful to clone, however this repository is not integrated with it in any way *yet*.
Make sure to build the workspace afterwards.
#### Step 5: Clone this repository to the master machines workspace
This is so the services can be called from the master machine. Otherwise, it will see the services but will throw an error when calling them saying it doesn't know how the service is defined.
#### Step 6: [Optional] Configure the Pi to login automatically on startup
From [this forum post](https://forum.ubiquityrobotics.com/t/a-hack-to-autologin/236/4), the solution from jonovos worked well for me.
Create the following file:
```
sudo touch /etc/lightdm/lightdm.conf.d/10-autologin.conf
```
Edit the file:
```
sudo vim /etc/lightdm/lightdm.conf.d/10-autologin.conf
```
Add the following contents:
```
[Seat:*]
autologin-guest = false
autologin-user = ubuntu
autologin-user-timeout = 0
[SeatDefaults]
allow-guest = false
```
Reboot the raspberry pi and it should automatically log you back in.
#### Step 7: [Optional] Create a startup service for picam client
**Background:**
Adapted from instructions [here](https://risc.readthedocs.io/2-auto-service-start-afer-boot.html). However, following these instructions alone, the service was not launching the picam node. I found the [robot_upstart](http://wiki.ros.org/robot_upstart) package, and tried to use that to create the startup service, and after lots of troubleshooting, the service it created seemed to get a step further, but the node was still failing due to [an error like this](https://stackoverflow.com/questions/42583835/failed-to-open-vchiq-instance), and I couldn't find any good solutions that worked, so I started poking around to see how robot_upstart was doing things. The final implementation I ended up with is a Frankenstein between the first set of instructions and pieces of script generated by robot_upstart, but it seems to be working well and is straightforward to setup.
**Instructions:**
I recommend reading through *create_startup_service.sh*, *picam_autostart.service*, and *startup_launch.sh* to try to understand what's going on (especially *startup_launch.sh* because some values may need to be changed such as ROS_MASTER_URI). I also recommend briefly looking into [systemctl](https://www.commandlinux.com/man-page/man1/systemctl.1.html) if you aren't already somewhat familiar.
**ON THE RASPBERRY PI**, calling *create_startup_service.sh* and rebooting the machine should be all you have to do to create the service and set it to automatically start itself.
```
cd ~/catkin_ws/src/ros_picam/scripts
./create_startup_service.sh
sudo reboot
```
You can check the status of the service like so:
```
systemctl status picam_autostart
```
If you're seeing errors, you likely need to make some changes to *startup_launch.sh*, and you can apply any changes you make (or restart the service for any other reason) like so:
```
sudo systemctl restart picam_autostart
```
NOTE: For currently unknown reasons, text output from the node only makes it into systemctl status when the node is shutdown. Ideally there will be a way around this, or some other way of monitoring the nodes output in realtime for troubleshooting.
### Using ros_picam
The main script being used is **client_arducam.py**, the other python scripts in the src folder are just for testing purposes and will likely be removed at some point.
#### On the Pi
**client_arducam.launch** is used. The name for the camera and the directory to save captures can be specified. The *--wait* flag can be used to ensure it waits for roscore to be available before launching:
```
roslaunch ros_picam client_arducam.launch --wait
```
This node will continually try to restart itself if the connection to master is lost.
#### On the master machine
If the picam nodes are set to publish recorded images over a topic, **receiver.launch** will start a node that will subscribe to that topic and save published images to a specified topic. The save directory and topic can be set in the launch file.
```
roslaunch ros_picam receiver.launch
```
The currently implemented services are **grab_still**, **grab_taskboard**, **start_recording**, and **stop_recording**. They can be called via rosservice like:
```
rosservice call [node_name]/grab_still 3
```
However, the launch files provided will also call their respective services. The benefit of calling the services via launch files is that they can easily be edited to call the services from multiple Picams simultaneously if using a multi-camera setup.
Additional services can be defined and implemented as needed.
TODO: Recorded videos may all need to be converted using something like MP4Box (part of gpac). [This was the most relevant post I found](https://www.raspberrypi.org/forums/viewtopic.php?t=245875) regarding choppy/glitched video playback, and it seems like the data itself is fine, but it lacks some information for proper playback for some reason. Will likely address this when it becomes clearer how stills and videos are going to be handled over the network.
### Additional Notes
The scripts temp_check.sh and temp_monitor.sh can be copied into the **/usr/bin** folder so they may be called with the commands **tempcheck** and **tempmonitor** respectively to keep an awareness of the pi's core temperature under different conditions. Future plans include the ability to publish this information over ROS topics.
If you have a roscore running on your master machine and the raspberry pi can see the topics listed, **make sure the raspberry pi is actually receiving data on these topics**. If not, the IP address and hostname of the master machine need to be added to the Pi's **/etc/hosts** file. If the IP address of the master machine ever changes, these issues will pop up again. Resolve them by setting the correct IP address in the hosts file.
<file_sep>#!/usr/bin/env python
import os
import sys
import rospy
from std_msgs.msg import String
from sensor_msgs.msg import Image as ROSImage
from ros_picam.srv import *
from time import sleep
from datetime import datetime
from PIL import Image
import cv2
from cv_bridge import CvBridge, CvBridgeError
import numpy as np
def get_time_string(include_date=False):
dt = datetime.now()
dt_string = ""
date = "{:02}-{:02}-{:02}".format(dt.month,dt.day,dt.year-2000)
time = "{:02}-{:02}-{:02}-{:06}".format(dt.hour,dt.minute,dt.second,dt.microsecond)
if include_date:
dt_string = "{}--{}".format(date,time)
else:
dt_string = time
return dt_string
def callback(img):
node = img.header.frame_id
rospy.loginfo("Image received from: {}".format(node))
label = "taskboard"
bridge = CvBridge()
try:
cv_image = bridge.imgmsg_to_cv2(img, "bgr8")
filename = '{}-{}-{}.png'.format(node,label,get_time_string())
image = Image.fromarray(cv_image)
image.save(filename)
rospy.loginfo("Image saved to: {}".format(filename))
except CvBridgeError as e:
print(e)
def picam_receiver():
rospy.init_node('picam_receiver')
rospy.loginfo("Starting picam receiver...")
save_dir = rospy.get_param("~save_dir")
topic = rospy.get_param("~topic")
rospy.loginfo("Save Directory: {}".format(save_dir))
rospy.loginfo("Topic: {}".format(topic))
os.chdir(save_dir)
sub = rospy.Subscriber(topic,ROSImage,callback)
rospy.loginfo("Ready.")
rospy.spin()
if __name__ == '__main__':
picam_receiver()
print("\n\nQuitting.")
<file_sep>#!/bin/bash
function log() {
logger -s -p user.$1 ${@:2}
}
log info "picam: Using workspace setup file /opt/ros/kinetic/setup.bash"
log info "picam: Using workspace setup file /home/ubuntu/catkin_ws/devel/setup.bash"
source /opt/ros/kinetic/setup.bash
source /home/ubuntu/catkin_ws/devel/setup.bash
JOB_FOLDER=/etc/ros/kinetic/picam.d
log_path="/tmp"
launch_path="/home/ubuntu/catkin_ws/src/ros_picam/launch"
export ROS_HOSTNAME=$(hostname).local
export ROS_MASTER_URI=http://core-armada-1.local:11311
export ROS_HOME=${ROS_HOME:=$(echo ~ubuntu)/.ros}
export ROS_LOG_DIR=$log_path
log info "picam: ROS_HOSTNAME=$ROS_HOSTNAME"
log info "picam: ROS_MASTER_URI=$ROS_MASTER_URI, ROS_HOME=$ROS_HOME"
log info "picam: LAUNCH_PATH=$launch_path, ROS_LOG_DIR=$log_path"
LAUNCH_FILENAME=$launch_path/client.launch
# Launch
roslaunch $LAUNCH_FILENAME --wait &
PID=$!
log info "picam: Started roslaunch as background process, PID $PID, ROS_LOG_DIR=$ROS_LOG_DIR"
echo "$PID" > $log_path/picam.pid
wait "$PID"
<file_sep>#!/usr/bin/env python
import os
import sys
from subprocess import call
from io import BytesIO
import rospy
import rosgraph
from std_msgs.msg import String
from sensor_msgs.msg import Image as ROSImage
from ros_picam.srv import *
from time import sleep
from datetime import datetime
from picamera import PiCamera
from picamera.array import PiRGBArray
from PIL import Image
import cv2
from cv_bridge import CvBridge
import numpy as np
import taskboard_detection as tb
import smbus
bus = smbus.SMBus(0)
class picam_client:
def __init__(self):
# ROS Node Setup
self.name = sys.argv[len(sys.argv)-2][8:]
rospy.init_node(self.name)
rospy.loginfo("NODE_NAME: \"{}\"".format(self.name))
# Output Parameters
self.save_local = rospy.get_param("~save_local")
self.save_dir = rospy.get_param("~save_dir")
self.publish = rospy.get_param("~publish")
self.publish_topic = rospy.get_param("~publish_topic")
str = "Save Local: {} ".format(self.save_local)
if (self.save_local):
str = str + "Directory: {}".format(self.save_dir)
os.chdir(self.save_dir)
rospy.loginfo(str)
str = "Publish: {} ".format(self.publish)
if (self.publish):
str = str + "Topic: {}".format(self.publish_topic)
self.pub = rospy.Publisher(self.publish_topic,ROSImage,queue_size=100)
rospy.loginfo(str)
# Camera Parameters
# self.enable_video = rospy.get_param("~enable_video", True)
# self.cam_w = rospy.get_param("~im_width", 1920)
# self.cam_h = rospy.get_param("~im_height", 1080)
# self.cam_fps = rospy.get_param("~fps", 30)
# self.cam_brightness = rospy.get_param("~brightness", 50)
# self.cam_contrast = rospy.get_param("~contrast", 0)
# self.cam_iso = rospy.get_param("~iso", 200)
# self.cam_rotation = rospy.get_param("~rotation", 0)
# Create PiCamera object
self.camera = self.setup_camera()
rospy.loginfo("Camera initialized.")
# Setup Services
rospy.loginfo("Enable Video Services: {}".format(self.enable_video))
srv1 = rospy.Service(self.name+'/grab_still', GrabStill, self.grab_still)
srv2 = rospy.Service(self.name+'/grab_taskboard', GrabStill, self.grab_taskboard)
if self.enable_video:
srv3 = rospy.Service(self.name+'/start_recording', StartRecording, self.start_recording)
srv4 = rospy.Service(self.name+'/stop_recording', StopRecording, self.stop_recording)
rospy.loginfo("Services ready.\n")
def get_time_string(self,include_date=False):
dt = datetime.now()
dt_string = ""
date = "{:02}-{:02}-{:02}".format(dt.month,dt.day,dt.year-2000)
time = "{:02}-{:02}-{:02}-{:06}".format(dt.hour,dt.minute,dt.second,dt.microsecond)
if include_date:
dt_string = "{}--{}".format(date,time)
else:
dt_string = time
return dt_string
def grab_still(self,req):
success = False
label = 'grab' # make configurable?
num = req.number
if num < 1:
num = 1
rospy.loginfo("GRAB STILL: {}".format(num))
for i in range(0,num):
rospy.loginfo("{} grabbing still {}/{}".format(self.name,i+1,num))
# Start capture and convert to numpy array
stream = BytesIO()
self.camera.capture(stream, format='jpeg')
data = np.fromstring(stream.getvalue(), dtype=np.uint8)
np_image = cv2.imdecode(data, 1)
np_image = np_image[:, :, ::-1]
if self.publish:
# Publish still image
bridge = CvBridge()
image_message = bridge.cv2_to_imgmsg(np_image, "bgr8")
image_message.header.frame_id = self.name
self.pub.publish(image_message)
rospy.loginfo("{} Image Published to \"{}\"".format(self.name,self.publish_topic))
if self.save_local:
# Save still image
filename = '{}-{}-{}.png'.format(self.name,label,self.get_time_string())
image = Image.fromarray(np_image)
image.save(filename)
rospy.loginfo("{} Image Saved -> {}".format(self.name,filename))
success = True
print("")
return GrabStillResponse(success)
def grab_taskboard(self,req):
success = False
label = 'taskboard' # make configurable?
num = req.number
if num < 1:
num = 1
rospy.loginfo("GRAB TASKBOARD: {}".format(num))
for i in range(0,num):
rospy.loginfo("{} grabbing taskboard {}/{}".format(self.name,i+1,num))
# Start capture and convert to numpy array
stream = BytesIO()
self.camera.capture(stream, format='jpeg')
data = np.fromstring(stream.getvalue(), dtype=np.uint8)
np_image = cv2.imdecode(data, 1)
np_image = np_image[:, :, ::-1]
# Extract taskboard from image
#taskboard = tb.process_taskboard(np_image,80)
taskboard = tb.process_taskboard(np_image,110)
if self.publish:
# Publish taskboard image
bridge = CvBridge()
image_message = bridge.cv2_to_imgmsg(taskboard, "bgr8")
image_message.header.frame_id = self.name
self.pub.publish(image_message)
rospy.loginfo("{} Taskboard Published to \"{}\"".format(self.name,self.publish_topic))
if self.save_local:
# Save warped taskboard image
filename = '{}-{}-{}.png'.format(self.name,label,self.get_time_string())
image = Image.fromarray(taskboard)
image.save(filename)
rospy.loginfo("{} Taskboard Saved -> {}".format(self.name,filename))
success = True
print("")
return GrabStillResponse(success)
def start_recording(self,req):
recording_time = "INDEFINITE" if req.seconds < 1 else "{} seconds".format(req.seconds)
rospy.loginfo("START RECORDING")
rospy.loginfo("Length: {}".format(recording_time))
rospy.loginfo("Resolution: {}".format(self.camera.resolution))
rospy.loginfo("FPS: {}".format(self.camera.framerate))
success = False
if not self.camera.recording:
label = "rec" # make configurable?
filename = "/video/{}-{}-{}.h264".format(self.name,label,self.get_time_string())
recording = True
self.camera.start_recording(self.save_dir+filename,format='h264')
if req.seconds > 0:
# Maybe implement timing manually
self.camera.wait_recording(req.seconds)
self.camera.stop_recording()
recording = False
rospy.loginfo("Done recording.")
success = True
else:
rospy.logwarn("Node is already recording.")
print("")
return StartRecordingResponse(success)
def stop_recording(self,req):
rospy.loginfo("STOP RECORDING")
success = False
if not self.camera.recording:
rospy.logwarn("Node was not recording.")
else:
rospy.loginfo("Recording has been stopped.")
self.camera.stop_recording()
recording = False
success = True
print("")
return StopRecordingResponse(success)
def setup_camera(self):
# There are more PiCamera Parameters that can be implemented if needed.
# Docs here: https://picamera.readthedocs.io/en/release-1.10/api_camera.html
self.enable_video = rospy.get_param("~enable_video", True)
self.cam_w = rospy.get_param("~im_width", 1920)
self.cam_h = rospy.get_param("~im_height", 1080)
self.cam_fps = rospy.get_param("~fps", 30)
self.cam_brightness = rospy.get_param("~brightness", 50)
self.cam_contrast = rospy.get_param("~contrast", 0)
self.cam_iso = rospy.get_param("~iso", 200)
self.cam_rotation = rospy.get_param("~rotation", 0)
self.cam_focus = rospy.get_param("~focus", 0)
rospy.loginfo("Creating PiCamera object...")
rospy.loginfo("Dim: {}x{} FPS: {}".format(self.cam_w,self.cam_h,self.cam_fps))
rospy.loginfo("Brightness: {} Contrast: {}".format(self.cam_brightness,self.cam_contrast))
rospy.loginfo("ISO: {} Rotation: {}".format(self.cam_iso,self.cam_rotation))
rospy.loginfo("Focus: {}".format(self.cam_focus))
camera = PiCamera(resolution=(self.cam_w, self.cam_h), framerate=self.cam_fps)
camera.brightness = self.cam_brightness
camera.contrast = self.cam_contrast
camera.iso = self.cam_iso
camera.rotation = self.cam_rotation
# Set camera focus
if self.cam_focus == 0:
self.autofocus(camera)
# Return to desired resolution
camera.resolution = (self.cam_w, self.cam_h)
elif self.cam_focus > 950:
self.set_focus(950)
elif self.cam_focus < 15:
self.set_focus(15)
else:
self.set_focus(self.cam_focus)
# Wait for the automatic gain control to settle
sleep(3)
# Now fix the values
camera.shutter_speed = camera.exposure_speed
camera.exposure_mode = 'off'
#g = camera.awb_gains
#camera.awb_mode = 'off'
#camera.awb_gains = g
camera.awb_mode = 'auto'
return camera
#Arducam Specific Functions
def set_focus(self,val):
value = (val << 4) & 0x3ff0
data1 = (value >> 8) & 0x3f
data2 = value & 0xf0
# time.sleep(0.5)
rospy.loginfo("Focus Set: {}".format(val))
# bus.write_byte_data(0x0c,data1,data2)
os.system("i2cset -y 0 0x0c %d %d" % (data1,data2))
def calculate_focus(self,camera):
rawCapture = PiRGBArray(camera)
camera.capture(rawCapture,format="bgr", use_video_port=True)
image = rawCapture.array
rawCapture.truncate(0)
img_gray = cv2.cvtColor(image,cv2.COLOR_RGB2GRAY)
img_sobel = cv2.Laplacian(img_gray,cv2.CV_16U)
#img_sobel = cv2.Sobel(img_gray,cv2.CV_16U,1,1)
focus_score = cv2.mean(img_sobel)[0]
rospy.loginfo("Focus Score: {}".format(focus_score))
return focus_score
def autofocus(self,camera):
camera.resolution = (640, 480)
sleep(0.1)
rospy.loginfo("Starting Autofocus...")
max_index = 10
max_value = 0.0
last_value = 0.0
dec_count = 0
focal_distance = 10
while True:
# Set focus
self.set_focus(focal_distance)
sleep(0.1)
# Take image and calculate clarity
val = self.calculate_focus(camera)
# Find maximum image clarity
if val > max_value:
max_index = focal_distance
max_value = val
# If clarity starts to decrease
if val < last_value:
dec_count += 1
else:
dec_count = 0
# Image clarity is reducted for six consecutive frames
if dec_count > 6:
break
last_value = val
# Increase the focal distance
focal_distance += 15
if focal_distance > 1000:
break
rospy.loginfo("Max Index: {}, Max Value: {}\n".format(max_index,max_value))
rospy.loginfo("Fine Tuning...")
fine_lb = max_index - 25
if fine_lb < 10: fine_lb = 10
fine_ub = max_index + 25
if fine_ub > 1000: fine_ub = 1000
focal_distance = fine_lb
max_index = fine_lb
max_value = 0.0
rospy.loginfo("Range: {}-{}".format(fine_lb,fine_ub))
sleep(0.2)
while focal_distance <= fine_ub:
#Adjust focus
self.set_focus(focal_distance)
sleep(0.1)
#Take image and calculate image clarity
val = self.calculate_focus(camera)
#Find the maximum image clarity
if val > max_value:
max_index = focal_distance
max_value = val
focal_distance += 5
self.set_focus(max_index)
rospy.loginfo("Max Index: {}, Max Value: {}\n".format(max_index,max_value))
sleep(0.1)
if __name__ == '__main__':
# Create Picam Client
client = picam_client()
rate = rospy.Rate(1)
while not rospy.is_shutdown():
master_online = rosgraph.is_master_online()
if not master_online:
rospy.logwarn("Master has gone offline. Shutting down.")
rospy.signal_shutdown("Master offline.")
rate.sleep()
client.camera.close()
print("Camera object closed.")
print("\n\nQuitting.")
<file_sep>#!/bin/bash
#TO BE RUN ON PI, NOT MASTER MACHINE
# Copy service file to /lib/systemd/system
sudo cp picam_autostart.service /lib/systemd/system/picam_autostart.service
# Reload system control daemon
sudo systemctl daemon-reload
# Enable service on boot
sudo systemctl enable picam_autostart.service
echo "Picam startup service created. System reboot required."
| f3743d04623a837ec15b09b9c48dc4ea04ba192f | [
"Markdown",
"Python",
"Shell"
] | 7 | Python | pgavriel/ros_picam | db5dbf9a00139e7ce9e490744d69eb4fcdebfb0f | 7867f95b763d4157b6d595271b49c29cbe2d5d0e |
refs/heads/master | <file_sep># Singletony
większość obiektów to singletony. Uzycie banalne pewnie wiecie:
``` C#
Class.GetInstance().funkcja();
```
W klasie
```C#
class Functions{} ;
```
Znajdująs ie przydatne funkcje które mogą sie przydać. Jedna zwraca tablice 2D która reprezentuje obraz.Druna natomiast na podstawie
wspomnianej tablicy 2D i Onketu Rect jest w stanie obiczyć hitogram z wskazanego wycinka.
Aby utworzyć podobne funkcja nelaży napisać własną funkcję o strukturze:
``` C#
class Zwracanytyp{};// może być jakiś typ prosty
public Zwracanytyp funkcja(Rect rect, int[,] AllArea){
Zwracanytyp result = new Zwracanytyp();
for (Int32 y = 0; y < rect.Height; ++y)
{
for (Int32 x = 0; x < rect.Width; ++x)
{
result[allArea[y + rect.Y, x + rect.X]]++;
// tu jakies działania na pixelach , nalezy poamiętać o dodawniu
//wartości lokalnych x,y do wartości rect.Y i rect.X
}
}
return result;
}
```
<file_sep>using System;
using System.Collections.Generic;
using System.Drawing;
using System.Drawing.Imaging;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices;
using System.Text;
using System.Threading.Tasks;
namespace FingerPrint
{
/// <summary>
/// Singleton
/// </summary>
class Binarization
{
private static Binarization instance = new Binarization();
private Binarization()
{
}
public static Binarization GetInstance()
{
return instance;
}
public Bitmap OtsuMethod(Bitmap Image, bool isTopThereshold)
{
int[] hist = new int[256];
hist = Histogram.GetInstance().CreateHistogram(Image, 0);
double p1, p2;
double[] VarianceBetweenTable = new double[256];
int MeanIAll = MeanIntensitiesK(0, 255, hist); //mean intensivities of pixels in the whole image.
for (int k = 1; k < 256; k++)
{
p1 = ProbabilityK(0, k, hist) * Math.Pow((MeanIntensitiesK(0, k, hist) - MeanIAll), 2);
p2 = ProbabilityK(k + 1, 255, hist) * Math.Pow((MeanIntensitiesK(k + 1, 255, hist) - MeanIAll), 2);
if (p1 == 0)
p1 = 1;
if (p2 == 0)
p2 = 1;
VarianceBetweenTable[k] = p1 + p2;
}
int point = FindMaximum(VarianceBetweenTable);
return BinarizeThereshol(Image, point, isTopThereshold);
}
private int ProbabilityK(int begin, int end, int[] hist)
{
int sum = 0;
for (int i = begin; i <= end; i++)
{
sum += hist[i];
}
return sum;
}
private int MeanIntensitiesK(int begin, int end, int[] hist)
{
int sum = 0;
for (int i = begin; i < end; i++)
{
sum += i * hist[i];
}
return sum;
}
private int FindMaximum(double[] list)
{
double max = 0;
int idx = 0;
for (int i = 0; i < list.Length; i++)
{
if (list[i] > max)
{
max = list[i];
idx = i;
}
}
return idx;
}
public Bitmap BinarizeThereshol(Bitmap Image, int point, Boolean isTopThereshold)
{
System.Drawing.Rectangle rect = new Rectangle(0, 0, Image.Width, Image.Height);
BitmapData bitmapData = Image.LockBits(rect, ImageLockMode.ReadWrite, System.Drawing.Imaging.PixelFormat.Format24bppRgb);
IntPtr ptr = bitmapData.Scan0;// wskaźnik na pierwszą linię obrazka
int size = bitmapData.Width * bitmapData.Height * 3;
byte[] data = new byte[size];// tablica z wartościami RGB
for (int y = 0; y < bitmapData.Height; y++)
{
IntPtr mem = (IntPtr)((long)bitmapData.Scan0 + y * bitmapData.Stride);
Marshal.Copy(mem, data, y * bitmapData.Width * 3, bitmapData.Width * 3);// skopiuj całą bitmapę do tablicy
}
for (int i = 0; i < size; i += 3)// kazda komórka zawiera jedną z trzech wartości- więc przesuwa się co 3 pixele
{
if (data[i] > point)
{
data[i] = (isTopThereshold) ? (byte)255 : (byte)0;
data[i + 1] = (isTopThereshold) ? (byte)255 : (byte)0;
data[i + 2] = (isTopThereshold) ? (byte)255 : (byte)0;
}
else
{
data[i] = (isTopThereshold) ? (byte)0 : (byte)255;
data[i + 1] = (isTopThereshold) ? (byte)0 : (byte)255;
data[i + 2] = (isTopThereshold) ? (byte)0 : (byte)255;
}
}
for (int y = 0; y < bitmapData.Height; y++)
{
IntPtr mem = (IntPtr)((long)bitmapData.Scan0 + y * bitmapData.Stride);
Marshal.Copy(data, y * bitmapData.Width * 3, mem, bitmapData.Width * 3);// skopiuj całą bitmapę do tablicy
}
Image.UnlockBits(bitmapData);
MemoryStream stream = new MemoryStream();
Image.Save(stream, ImageFormat.Bmp);
return Image;
}
}
}
| e11c12ba4c84f6896a34c3328b9653e10b32be0e | [
"Markdown",
"C#"
] | 2 | Markdown | Pabblittto/FingerPrint | 1238589dfdecedc1c33f2ac7eadc42e2bcea69e3 | b648921626cc40fd1765ae9f70ea13f851bd937a |
refs/heads/master | <repo_name>crw-riviere/Workout5<file_sep>/app/controllers/dayController.js
wo5App.controller('DayController', function ($scope, $routeParams, resourceService, entityService) {
init();
function init() {
entityService.getProgram(parseInt($routeParams.programId)).then(function (program) {
$scope.program = resourceService.getViewModel(program);
entityService.getDaysByProgram(program.id).then(function (days) {
$scope.days = resourceService.getViewModelCollection(days);
})
});
entityService.getAllExercises().then(function (exercises) {
$scope.allExercises = exercises;
});
$scope.measurements = resourceService.getWeightMeasurements();
$scope.newDay = resourceService.getViewModel();
$scope.newExercise = resourceService.getViewModel({ name: null });
$scope.exercise = { name: null };
$scope.newTarget = { reps: 0, perform: 0, measurement: $scope.measurements[0] };
};
$scope.editDay = function (day) {
day.operation = resourceService.consts.op.update;
};
$scope.addDay = function (day) {
var entityDay = { name: day.entity.name, program: $scope.program.entity.id, exercises: [] };
entityService.addDay(entityDay).then(function (day) {
var newDay = { entity: day, operation: resourceService.consts.op.read, error: '' };
$scope.days.push(newDay);
$scope.newDay.entity = { entity: { name: '', program: $scope.program.entity.id, exercises: [] }, error: '' };
});
};
$scope.saveDay = function (day) {
day.operation = resourceService.consts.op.read;
day.error = '';
entityService.saveDay(day.entity).then(function () {
});
};
$scope.deleteDay = function (day) {
entityService.deleteDay(day.entity).then(function () {
$scope.days.splice($scope.days.indexOf(day), 1);
});
};
$scope.validDay = function (day) {
return resourceService.validViewModelName(day, null);
};
$scope.validDayFeedback = function (day) {
day.error = $scope.validDay(day) ? '' : 'Day name cannot be empty.';
};
$scope.loadDay = function (day) {
$scope.day = day;
};
$scope.loadExercise = function (exercise, day) {
$scope.day = day;
$scope.exercise = exercise;
for (var i = 0; i < $scope.measurements.length; i++) {
if ($scope.measurements[i] === exercise.target.measurement) {
$scope.exercise.target.measurement = $scope.measurements[i];
break;
}
}
}
$scope.addExercise = function (addedExercise) {
$scope.day.entity.exercises.push({ id: addedExercise.id, name: addedExercise.name, target: { reps: $scope.newTarget.reps, perform: $scope.newTarget.perform, measurement: $scope.newTarget.measurement } });
entityService.saveDay($scope.day.entity).then(function (day) {
$scope.day = resourceService.getViewModel(day);
})
};
$scope.createExercise = function (newExercise) {
console.debug(newExercise);
entityService.addExercise(newExercise.entity).then(function (exercise) {
$scope.day.entity.exercises.push({ id: exercise.id, name: exercise.name, target: { reps: $scope.newTarget.reps, perform: $scope.newTarget.perform, measurement: $scope.newTarget.measurement } });
entityService.saveDay($scope.day.entity).then(function (day) {
$scope.day = resourceService.getViewModel(day);
$scope.allExercises.push(exercise);
$scope.newExercise = resourceService.getViewModel({ name: null });
})
});
};
$scope.updateExercise = function (updatedExercise) {
$scope.saveDay($scope.day);
}
$scope.deleteExercise = function (updatedExercise) {
var dayExercises = $scope.day.entity.exercises;
for (var i = 0; i < dayExercises.length; i++) {
if (dayExercises[i].id === updatedExercise.id) {
dayExercises.splice(i, 1);
break;
}
}
$scope.saveDay($scope.day);
}
$scope.validExercise = function (exercise) {
return resourceService.validEntityName(exercise.entity, $scope.allExercises);
};
$scope.validExerciseFeedback = function (exercise) {
exercise.error = $scope.validExercise(exercise) ? '' : 'An exercise with this name already exists.';
};
});<file_sep>/app/controllers/workoutController.js
wo5App.controller('WorkoutController', function ($scope, $routeParams, $q, resourceService, entityService) {
init();
function init() {
$scope.sessions = [];
$scope.exercises = [];
$scope.sets = [];
entityService.getPrograms().then(function (programs) {
$scope.programs = programs;
if (programs[0]) {
$scope.loadProgram(programs[0]);
}
$('#mdlSessions').modal('show')
});
};
$scope.loadProgram = function (program) {
$scope.program = program;
entityService.getDaysByProgram(program.id).then(function (days) {
$scope.days = days;
if (days[0]) {
$scope.loadDay(days[0]);
}
})
};
$scope.loadDay = function (day) {
$scope.day = day;
entityService.getSessionsByDay(day.id).then(function (sessions) {
$scope.sessions = resourceService.getViewModelCollection(sessions);
})
};
$scope.startSession = function () {
$scope.exercises = [];
var newSession = {
name: getDateString(),
program: $scope.program.id,
day: $scope.day.id,
exercises: $scope.day.exercises,
date: getDate(),
prevSession: $scope.sessions.length > 0 ? $scope.sessions[$scope.sessions.length - 1].entity.id : null
}
entityService.addSession(newSession).then(function (session) {
$scope.session = resourceService.getViewModel(session);
$scope.loadExercise($scope.day.exercises[0]);
});
};
$scope.loadExercise = function (exercise) {
$scope.sets = [];
$scope.exercise = exercise;
var sessionExercise = [$scope.session.entity.id, exercise.id];
var prevSessionExercise = [$scope.session.entity.prevSession, exercise.id];
entityService.getSetsBySessionExercise(sessionExercise).then(function (sets) {
var setsViewModel = resourceService.getViewModelCollection(sets);
if (exercise.id) {
entityService.getSetByExercisePerformMax(exercise.id).then(function (sets) {
$scope.maxPerformSet = resourceService.getViewModel(sets);
})
}
if (prevSessionExercise) {
entityService.getSetsBySessionExercise(prevSessionExercise).then(function (sets) {
$scope.prevSessionSets = resourceService.getViewModelCollection(sets);
})
}
});
};
$scope.addSet = function () {
var set = {
no: $scope.sets.length + 1,
day: $scope.day.id,
session: $scope.session.entity.id,
exercise: $scope.exercise.id,
reps: null,
perform: null,
measurement: $scope.exercise.target.measurement,
date: $scope.session.entity.date
};
entityService.addSet(set).then(function (newSet) {
var set = resourceService.getViewModel(newSet);
set.operation = resourceService.consts.op.update;
$scope.sets.push(set);
})
};
$scope.editSet = function (set) {
set.operation = resourceService.consts.op.update;
};
$scope.saveSet = function (set) {
if (!set.entity.reps) {
set.entity.reps = 0;
}
if (!set.entity.perform) {
set.entity.perform = 0;
}
entityService.saveSet(set.entity).then(function () {
set.operation = resourceService.consts.op.read;
set.performTargetPercent = resourceService.getPerformTargetPercantage(set.entity.perform, $scope.exercise.target.perform)
});
};
$scope.deleteSet = function (set) {
entityService.deleteSet(set.entity).then(function () {
$scope.sets.splice($scope.sets.indexOf(set), 1);
})
};
$scope.convertToKg = function () {
$scope.sets = resourceService.convertSetsViewModelToKg($scope.sets);
}
$scope.convertToLbs = function () {
$scope.sets = resourceService.convertSetsViewModelToLbs($scope.sets);
}
$scope.cycleMeasurement = function (set) {
var weights = resourceService.getWeightMeasurements();
var index = weights.indexOf(set.entity.measurement) + 1 < weights.length ? weights.indexOf(set.entity.measurement) + 1 : 0;
set.entity.measurement = weights[index];
}
function getDate() {
var date = resourceService.date();
return date.day + '-' + date.month + '-' + date.yearShort + ' ' + date.hour + ':' + date.minute + ':' + date.second;
};
function getDateString() {
var date = resourceService.date();
return date.day + '-' + date.month + '-' + date.yearShort;
};
});<file_sep>/app/directives/modalDirective.js
wo5App.directive("modal", function () {
return {
restrict: "E",
scope: {
modalid: '@'
},
replace: true,
transclude: true,
templateUrl: '/app/views/directives/modal.html',
link: function (scope, element, attrs, controller) {
}
}
});<file_sep>/app/directives/exerciseLineChartDirective.js
wo5App.directive("exerciseLineChart", function () {
return {
restrict: 'E',
scope: {
data: '='
},
link: function (scope, element, attrs) {
scope.$watch('data', function (newValue, oldValue) {
return scope.render(newValue);
}, true);
//var h = 500;
//var w = 300;
var m = { top: 30, right: 10, bottom: 100, left: 30 };
var h = 500 - m.top - m.bottom;
var chart = d3.select(element[0])
.append('svg')
.attr('width', '100%')
.attr('height', h + m.top + m.bottom)
.append('g')
.attr('transform', 'translate(' + m.left + ',' + m.top + ')');
window.onresize = function () {
scope.$apply();
};
scope.$watch(function () {
return angular.element(window)[0].innerWidth;
}, function () {
scope.render(scope.data);
});
scope.render = function (data) {
chart.selectAll('*').remove();
if (!data || data.sets.length <= 0)
{ return; }
var w = d3.select(element[0]).node().offsetWidth - m.left - m.right;
var parseDate = d3.time.format("%d-%m-%y %H:%M:%S").parse;
var minDate = parseDate(data.sets[0].date),
maxDate = parseDate(data.sets[data.sets.length - 1].date);
minPerform = d3.min(data.sets.map(function (d) { return d.perform; }))
maxPerform = d3.max(data.sets.map(function (d) { return d.perform; }))
if (maxPerform < data.target.perform) {
maxPerform = data.target.perform;
}
var x = d3.time.scale.utc().domain([minDate, maxDate])
.range([m.left, w]);
var y = d3.scale.linear().domain([minPerform, maxPerform + 10])
// bottom / top
.range([h, 0]);
function cx(d) {
return x(parseDate(d.date));
}
function cy(d) {
return y(d.perform);
}
var line = d3.svg.line()
.x(function (d) {
return x(parseDate(d.date));
})
.y(function (d) {
return y(d.perform);
})
var xAxis = d3.svg.axis().scale(x).tickFormat(d3.time.format("%d-%m-%y"));
chart.append('g')
.attr('class', 'x axis')
.attr('transform', 'translate(0,' + (h + 10) + ')')
.selectAll("text")
.style("text-anchor", "end")
.attr("dx", "-0.8em")
.attr("dy", "-0.4em")
.attr("transform", function (d) {
return "rotate(-90)"
})
.call(xAxis);
var yAxis = d3.svg.axis().scale(y).orient('left').tickSize(-w);
chart.append("g")
.attr('class', 'y axis')
//.attr('transform', 'translate(25,0)')
.call(yAxis);
chart.append('svg:path').attr('d', line(data.sets));
chart.selectAll("circle")
.data(data.sets)
.enter().append("circle")
.attr("fill", "red")
.attr("r", 5)
.attr("cx", cx)
.attr("cy", cy);
}
}
}
});<file_sep>/js/script.js
var checkRequest = navigator.mozApps.checkInstalled("http://wo5.riviere.la/wo5.webapp");
checkRequest.onsuccess = function () {
if (checkRequest.result) {
// we're installed
} else {
// not installed
var installRequest = navigator.mozApps.install("http://wo5.riviere.la/wo5.webapp");
request.onsuccess = function () {
window.alert('Installed Workout5.');
};
request.onerror = function () {
window.alert('Failed to install Workout5.');
};
}
};
checkRequest.onerror = function () {
alert('Error checking installation status: ' + this.error.message);
};<file_sep>/app/controllers/progressController.js
wo5App.controller('ProgressController', function ($scope, $routeParams, resourceService, entityService) {
init();
function init() {
$scope.sessions = [];
$scope.exercises = [];
var programs = entityService.getPrograms().then(function (programs) {
$scope.programs = programs;
if (programs[0]) {
$scope.loadProgram(programs[0]);
}
$('#mdlSessions').modal('show')
});
};
$scope.loadProgram = function (program) {
$scope.program = program;
entityService.getDaysByProgram(program.id).then(function (days) {
$scope.days = days;
if (days[0]) {
$scope.loadDay(days[0]);
}
})
};
$scope.loadDay = function (day) {
$scope.day = day;
if (day.exercises[0]) {
$scope.loadExercise(day.exercises[0]);
}
};
$scope.loadExercise = function (exercise) {
$scope.exercise = exercise;
entityService.getAllMaxSetsByDayExercise($scope.day.id, exercise.id).then(function (maxSets) {
var data = { target: exercise.target, sets: maxSets };
$scope.data = data;
})
}
});<file_sep>/app/services/resourceService.js
wo5App.service('resourceService', function () {
var self = this;
self.db = {}
self.consts = {
db: { wo5: 'WO5' },
op: {
create: 'create',
read: 'read',
update: 'update',
rw: 'readwrite'
},
store: {
program: 'Program',
day: 'Day',
exercise: 'Exercise',
session: 'Session',
set: 'Set'
},
index: {
id: 'id',
program: 'program',
day: 'day',
name: 'name',
exercise: 'exercise',
session: 'session',
sessionExercise: 'sessionExercise',
dayExercise: 'dayExercise'
},
measurement: {
weight: {
kgs: 'kgs',
lbs: 'lbs'
}
}
};
self.getWeightMeasurements = function () {
return [self.consts.measurement.weight.kgs, self.consts.measurement.weight.lbs];
}
self.date = function () {
var today = new Date();
var dd = today.getDate();
var mm = today.getMonth() + 1;
var yyyy = today.getFullYear();
var yy = ("" + yyyy).substr(2, 2);
var h = today.getHours();
var m = today.getMinutes()
var s = today.getSeconds();
if (dd < 10) { dd = '0' + dd }
if (mm < 10) { mm = '0' + mm }
return { year: yyyy, yearShort: yy, month: mm, day: dd, hour: h, minute: m, second: s };
};
self.validViewModelEntity = function (valEntity, entityCollection) {
var valid = true;
angular.forEach(entityCollection, function (scopeEntity) {
if (scopeEntity.entity.id !== valEntity.entity.id &&
angular.lowercase(scopeEntity.entity.name) === angular.lowercase(valEntity.entity.name)) {
valid = false;
}
});
return valid;
};
self.validEntity = function (valEntity, entityCollection) {
var valid = true;
if (entityCollection) {
for (var i = 0; i < entityCollection.lenght; i++) {
if (entityCollection[i].id !== valEntity.id &&
angular.lowercase(entityCollection[i].name) === angular.lowercase(valEntity.name)) {
return false;
}
}
}
return true;
};
self.validViewModelName = function (viewModel, viewModelCollection) {
return self.validViewModelEntity(viewModel, viewModelCollection) && viewModel.entity.name !== '';
}
self.validEntityName = function (entity, entityCollection) {
return self.validEntity(entity, entityCollection) && entity.name !== '';
}
self.getViewModel = function (entity, operation) {
return { entity: entity || {}, operation: operation || self.consts.op.read, error: '' };
};
self.getViewModelCollection = function (entities, operation) {
var entityCollection = [];
angular.forEach(entities, function (entity) {
entityCollection.push({ entity: entity, operation: operation || self.consts.op.read, error: '' });
});
return entityCollection;
}
self.getPerformTargetPercantage = function (perform, targetPerform) {
if (perform > 0 && targetPerform > 0) {
return ((perform / targetPerform) * 100).toFixed(2) + '%';
}
else {
return 'N/A';
}
}
self.convertSetsViewModelToKg = function (sets) {
angular.forEach(sets, function (set) {
if (set.entity.measurement === self.consts.measurement.weight.lbs) {
var kgCalc = set.entity.perform * 0.45359237;
var kgs = Math.round(kgCalc);
set.entity.perform = kgs;
set.entity.measurement = self.consts.measurement.weight.kgs;
}
});
return sets;
};
self.convertSetsViewModelToLbs = function (sets) {
angular.forEach(sets, function (set) {
if (set.entity.measurement === self.consts.measurement.weight.kgs) {
var kgs = set.entity.perform / 0.45359237;
var lbs = Math.floor(kgs);
set.entity.perform = lbs;
set.entity.measurement = self.consts.measurement.weight.lbs
}
});
return sets;
};
});<file_sep>/app/controllers/exerciseController.js
wo5App.controller('ExerciseController', function ($scope, $routeParams, resourceService, entityService) {
init();
function init() {
entityService.getAllExercises().then(function (exercises) {
$scope.allExercises = resourceService.getViewModelCollection(exercises);
});
entityService.getDay(parseInt($routeParams.dayId)).then(function (day) {
$scope.day = resourceService.getViewModel(day);
entityService.getExercisesByDay(day).then(function (exercises) {
$scope.dayExercises = resourceService.getViewModelCollection(exercises);
})
});
$scope.newExercise = resourceService.getViewModel();
$scope.emptyExercise = resourceService.getViewModel({}, resourceService.consts.op.create);
};
$scope.editExercise = function (exercise) {
exercise.operation = resourceService.consts.op.update;
};
$scope.addExercise = function (exercise) {
entityService.addExercise(exercise.entity).then(function (exercise) {
//Add id to day.exercises
$scope.day.entity.exercises.push({ id: exercise.id });
entityService.saveDay($scope.day.entity).then(function () {
//Add exercise to scope.exercises
$scope.dayExercises.push(resourceService.getViewModel(exercise));
$scope.newExercise.entity = { name: '' };
})
})
};
$scope.saveExercise = function (exercise) {
entityService.saveDay(day.entity).then(function () {
day.operation = resourceService.consts.op.read;
day.error = '';
entityService.saveProgram($scope.program).then(function () {
exercise.operation = resourceService.consts.op.update;
})
});
};
$scope.deleteExercise = function (exercise) {
entityService.deleteExercise(exercise.entity).then(function () {
//Remove id from day.exercises
$scope.day.entity.exercises.splice($scope.day.entity.exercises.indexOf(exercise.entity.id), 1);
entityService.saveDay($scope.day.entity).then(function () {
//Remove exercise from scope.exercises
$scope.dayExercises.splice($scope.dayExercises.indexOf(exercise), 1);
})
})
};
$scope.validExercise = function (exercise) {
return resourceService.validViewModelEntity(exercise, $scope.allExercises);
}
$scope.validExerciseFeedback = function (exercise) {
exercise.error = $scope.validExercise(exercise) ? 'Name free!' : 'Name exists.';
}
});<file_sep>/app/controllers/defaultController.js
wo5App.controller('HomeController', function ($scope, dbService) {
dbService.initDB();
})
wo5App.controller('SettingsController', function ($scope) {
}) | f846a572b6a731029a13cbfc584b87b998d5a994 | [
"JavaScript"
] | 9 | JavaScript | crw-riviere/Workout5 | cd55a6f7692242c078ef21bc9c1e14d05857ce04 | af1b86085069c0cc4ae6551bb96f70a681cdb017 |
refs/heads/master | <repo_name>ShiYiLiJack/Tip-App-with-Tesseract<file_sep>/TipResult/ViewController.swift
//
// ViewController.swift
// TipResult
//
// Created by Jack on 5/7/19.
// Copyright © 2019 Jack. All rights reserved.
//
import UIKit
import ProgressHUD
import TesseractOCR
import GoogleMobileAds
let defaults = UserDefaults.standard
var tax: Double = 9.75
var tip: Double = 15
class ViewController: UIViewController, UIAlertViewDelegate, G8TesseractDelegate, GADBannerViewDelegate, UINavigationControllerDelegate, UIImagePickerControllerDelegate, UITableViewDelegate, UITableViewDataSource{
@IBOutlet weak var totalPersonalLabel: UILabel!
@IBOutlet var adBannerView: GADBannerView!
@IBOutlet weak var tipButton: UIBarButtonItem!
@IBOutlet weak var taxButton: UIBarButtonItem!
@IBOutlet weak var imageView: UIImageView!
@IBOutlet weak var tipTableView: UITableView!
let tapRec = UIGestureRecognizer()
let imagePicked = UIImagePickerController()
let operationQueue = OperationQueue()
var finalArray: [Double] = []
var allNumArray: [String] = []
var personalPay: Double = 0.00
@IBAction func tipButtonTapped(_ sender: UIBarButtonItem) {
var textfield = UITextField()
let alert = UIAlertController(title: "Tip Percent", message: "Enter Tip Percentage", preferredStyle: .alert)
let action = UIAlertAction(title: "Confirm", style: .default) { (action) in
if textfield.text != "" {
tip = Double(textfield.text!)!
self.tipButton.title = "Tip: \(tip)%"
self.calcTip()
}
}
alert.addTextField { (tipTextfield) in
tipTextfield.keyboardType = .decimalPad
tipTextfield.placeholder = "\(tip)"
textfield = tipTextfield
}
alert.addAction(action)
present(alert, animated: true, completion: nil)
}
//clear and zero the total label for the next personn
@IBAction func clearButtonTapped(_ sender: UIButton) {
personalPay = 0.00
totalPersonalLabel.text = "Total: \(personalPay)"
}
@IBAction func taxButtonTapped(_ sender: UIBarButtonItem) {
var textfield = UITextField()
let alert = UIAlertController(title: "Tax Percent", message: "Enter Tax Percentage", preferredStyle: .alert)
let action = UIAlertAction(title: "Confirm", style: .default) { (action) in
if textfield.text != "" {
tax = Double(textfield.text!)!
self.taxButton.title = "Tax: \(tax)%"
self.calcTip()
}
}
alert.addTextField { (taxTextfield) in
taxTextfield.keyboardType = .decimalPad
taxTextfield.placeholder = "\(tax)"
textfield = taxTextfield
}
alert.addAction(action)
present(alert, animated: true, completion: nil)
}
//access Teseract OCR
func recognizeImageWithTesseract(image: UIImage) {
guard let operation = G8RecognitionOperation(language: "eng") else {
fatalError("Error in operation language")
}
//operation.tesseract.engineMode = .lstmOnly
operation.tesseract.pageSegmentationMode = .autoOnly
//operation.delegate = self
operation.tesseract.charWhitelist = "0123456789"
operation.tesseract.image = image
operation.recognitionCompleteBlock = {(tesseract: G8Tesseract?) in
let recognizedText = tesseract?.recognizedText
//let textString: String = (recognizedText)!
if recognizedText != nil {
self.splitString(detectedString: recognizedText!)
}
//print(textString)
let alertController = UIAlertController(title: "OCR Result", message: recognizedText, preferredStyle: .alert)
let alertAction = UIAlertAction(title: "Ok", style: .default)
alertController.addAction(alertAction)
self.present(alertController, animated: true)
}
operationQueue.addOperation(operation)
}
// let user take photo and sent to tesseract OCR for recognition
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) {
guard let userPickedImage = info[UIImagePickerController.InfoKey.editedImage] as? UIImage else {
fatalError("")
}
ProgressHUD.show()
allNumArray.removeAll()
let userPicked = UIImage(named: "Receipt")
recognizeImageWithTesseract(image: userPickedImage)
imageView.image = userPickedImage
imagePicked.dismiss(animated: true, completion: nil)
}
//Tokenize the result from tesseract and take only double value from the string
func splitString(detectedString: String){
var textArray: [String] = []
let seperator = CharacterSet(charactersIn: " ,\n,$")
textArray = detectedString.components(separatedBy: seperator)
var x = 0
while(x < textArray.count){
if Double(textArray[x]) != nil {
allNumArray.append(textArray[x])
}
x += 1
}
calcTip()
}
public func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return finalArray.count
}
func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
if indexPath.row % 4 == 3 {
personalPay = Double(round((personalPay + finalArray[indexPath.row]) * 100) / 100)
totalPersonalLabel.text = "Total: \(personalPay)"
}
tableView.deselectRow(at: indexPath, animated: true)
}
public func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let priceList = UITableViewCell(style: UITableViewCell.CellStyle.default, reuseIdentifier: "price")
if indexPath.row % 4 == 0 {
priceList.textLabel?.text = String(self.finalArray[indexPath.row])
} else if indexPath.row % 4 == 1 {
priceList.textLabel?.text = "\t Tax: \(self.finalArray[indexPath.row])"
} else if indexPath.row % 4 == 2 {
priceList.textLabel?.text = "\t Tip: \(self.finalArray[indexPath.row])"
} else if indexPath.row % 4 == 3 {
priceList.textLabel?.text = "\t Total: \(self.finalArray[indexPath.row])"
}
print(self.finalArray[indexPath.row])
return priceList
}
func calcTip(){
finalArray.removeAll()
var x = 0
//detect if there is any int and remove int
while(x < allNumArray.count) {
if Int(allNumArray[x]) == nil {
let regPrice = Double(allNumArray[x])!
let taxPrice = Double(round(regPrice * tax)/100)
let tipPrice = Double(round((regPrice + taxPrice) * tip)/100)
let totalPrice = Double(round((taxPrice + tipPrice + regPrice) * 100)/100)
//print(taxPrice)
//print(tipPrice)
self.finalArray.append(regPrice)
self.finalArray.append(taxPrice)
self.finalArray.append(tipPrice)
self.finalArray.append(totalPrice)
}
x += 1
}
print(finalArray.count)
tipTableView.reloadData()
ProgressHUD.dismiss()
}
@IBAction func cameraButtonTapped(_ sender: UIBarButtonItem) {
present(imagePicked, animated: true, completion: nil)
}
@IBAction func imageTapped(_ sender: UITapGestureRecognizer) {
print("image tapped")
performSegue(withIdentifier: "reciptImage", sender: self)
}
//set image on imageViewController to the image selected
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
let destinationVC = segue.destination as! ImageViewController
destinationVC.selectedImage = imageView.image
}
func addBannerViewToView(_ bannerView: GADBannerView) {
bannerView.translatesAutoresizingMaskIntoConstraints = false
view.addSubview(bannerView)
view.addConstraints(
[NSLayoutConstraint(item: bannerView,
attribute: .bottom,
relatedBy: .equal,
toItem: bottomLayoutGuide,
attribute: .top,
multiplier: 1,
constant: 0),
NSLayoutConstraint(item: bannerView,
attribute: .centerX,
relatedBy: .equal,
toItem: view,
attribute: .centerX,
multiplier: 1,
constant: 0)
])
}
override func viewDidLoad() {
super.viewDidLoad()
adBannerView = GADBannerView(adSize: kGADAdSizeBanner)
addBannerViewToView(adBannerView)
//test banner
adBannerView.adUnitID = "ca-app-pub-3940256099942544/2934735716"
//App Banner
//adBannerView.adUnitID = "ca-app-pub-8519552575120945/1025960537"
adBannerView.rootViewController = self
adBannerView.load(GADRequest())
adBannerView.delegate = self
imageView.isUserInteractionEnabled = true
imageView.addGestureRecognizer(tapRec)
tapRec.addTarget(self, action: Selector(("imageTapped")))
imagePicked.delegate = self
imagePicked.allowsEditing = true
imagePicked.sourceType = .camera
tipTableView.delegate = self
tipTableView.dataSource = self
tipTableView.rowHeight = 50
taxButton.title = "Tax: \(tax)%"
tipButton.title = "Tip: \(tip)%"
}
}
<file_sep>/TipResult/ImageViewController.swift
//
// ImageViewController.swift
// TipResult
//
// Created by Jack on 5/17/19.
// Copyright © 2019 Jack. All rights reserved.
//
import UIKit
import GoogleMobileAds
class ImageViewController: UIViewController {
@IBOutlet weak var adBannerView: GADBannerView!
@IBOutlet weak var largeImage: UIImageView!
var selectedImage: UIImage?
override func viewDidLoad() {
super.viewDidLoad()
largeImage.image = selectedImage
adBannerView.adUnitID = "ca-app-pub-8519552575120945/1025960537"
adBannerView.rootViewController = self
adBannerView.load(GADRequest())
}
}
<file_sep>/README.md
# Tip-App-with-Tesseract
| 330ed7a51d2ce3efc8c4c17ecb906d121b15a793 | [
"Swift",
"Markdown"
] | 3 | Swift | ShiYiLiJack/Tip-App-with-Tesseract | a3fe2bac2ebfd6a29cae08b2db7f7faa8cf453d4 | 5baee46a6462c2ee19a3aa2fac8a3d515479f838 |
refs/heads/master | <repo_name>dgfigueroa29/GLTest<file_sep>/app/src/main/java/com/boa/gltest/global/di/ListComponent.java
package com.boa.gltest.global.di;
import com.boa.gltest.ui.feature.list.ListActivity;
import dagger.Component;
@Component(dependencies = RootComponent.class, modules = {ListModule.class, MainModule.class})
public interface ListComponent {
void inject(ListActivity activity);
}
<file_sep>/app/src/main/java/com/boa/gltest/global/di/RootComponent.java
package com.boa.gltest.global.di;
import com.boa.gltest.global.App;
import com.boa.gltest.ui.feature.detail.DetailActivity;
import com.boa.gltest.ui.feature.detail.DetailPresenter;
import com.boa.gltest.ui.feature.list.ListActivity;
import com.boa.gltest.ui.feature.list.ListPresenter;
import dagger.Component;
@Component(modules = MainModule.class)
public interface RootComponent {
void inject(App application);
//Inject activities dependencies from this Component
void inject(ListActivity activity);
void inject(DetailActivity activity);
void inject(ListPresenter presenter);
void inject(DetailPresenter presenter);
}
<file_sep>/build.gradle
buildscript {
ext.retrofit_version = "2.5.0"
ext.butterknife_version = "6.0.0"
ext.dagger_version = "2.15"
repositories {
google()
jcenter()
}
dependencies {
classpath "com.android.tools.build:gradle:3.5.3"
}
}
allprojects {
repositories {
google()
jcenter()
}
}
task clean(type: Delete) {
delete rootProject.buildDir
}
<file_sep>/app/src/main/java/com/boa/gltest/usecase/ShowItemClicked.java
package com.boa.gltest.usecase;
import com.boa.gltest.global.model.Item;
public interface ShowItemClicked {
void show(Item item);
}
<file_sep>/app/src/main/java/com/boa/gltest/repository/cachepolicy/CachePolicy.java
package com.boa.gltest.repository.cachepolicy;
public interface CachePolicy {
boolean isCacheValid();
}
<file_sep>/app/src/main/java/com/boa/gltest/datasource/preferences/GetItemPrefImpl.java
package com.boa.gltest.datasource.preferences;
import android.content.Context;
import com.boa.gltest.global.model.Item;
import com.boa.gltest.repository.ItemRepository;
import com.boa.gltest.usecase.GetItem;
public class GetItemPrefImpl implements GetItem {
private Context context;
public GetItemPrefImpl(Context context) {
this.context = context;
}
@Override
public Item get() {
return bring(context);
}
@Override
public void getAsync(Listener listener) {
listener.onItemReceived(bring(context));
}
public static Item bring(Context context) {
return ItemRepository.bring(context);
}
}
<file_sep>/app/src/main/java/com/boa/gltest/interactor/GetItemInteractor.java
package com.boa.gltest.interactor;
import com.boa.gltest.global.model.Item;
import com.boa.gltest.usecase.GetItem;
public class GetItemInteractor implements Interactor, GetItem, GetItem.Listener {
GetItem.Listener listener;
GetItem getItem;
Executor executor;
MainThread mainThread;
public GetItemInteractor(GetItem getItem, Executor executor, MainThread mainThread) {
this.getItem = getItem;
this.executor = executor;
this.mainThread = mainThread;
}
@Override
public void run() {
getItem.getAsync(listener);
}
@Override
public Item get() {
return getItem.get();
}
@Override
public void getAsync(Listener listener) {
if (listener != null) {
this.listener = listener;
}
this.executor.run(this);
}
@Override
public void onItemReceived(Item item) {
listener.onItemReceived(item);
}
@Override
public void onError(Exception e) {
listener.onError(e);
}
@Override
public void onNoInternetAvailable() {
listener.onNoInternetAvailable();
}
}
<file_sep>/app/src/main/java/com/boa/gltest/ui/render/ItemRendererBuilder.java
package com.boa.gltest.ui.render;
import android.content.Context;
import com.boa.gltest.global.model.Item;
import com.pedrogomez.renderers.Renderer;
import com.pedrogomez.renderers.RendererBuilder;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
public class ItemRendererBuilder extends RendererBuilder<Item> {
public ItemRendererBuilder(Context context, ItemRenderer.OnItemClicked itemClicked) {
Collection<Renderer<Item>> prototypes = getPrototypes(context, itemClicked);
setPrototypes(prototypes);
}
private List<Renderer<Item>> getPrototypes(Context context, ItemRenderer.OnItemClicked itemClicked) {
List<Renderer<Item>> prototypes = new LinkedList<>();
ItemRenderer itemRenderer = new ItemRenderer(context, itemClicked);
prototypes.add(itemRenderer);
return prototypes;
}
@Override
protected Class getPrototypeClass(Item content) {
return ItemRenderer.class;
}
}
<file_sep>/app/src/main/java/com/boa/gltest/ui/render/ItemRenderer.java
package com.boa.gltest.ui.render;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.boa.gltest.R;
import com.boa.gltest.global.model.Item;
import com.pedrogomez.renderers.Renderer;
import com.squareup.picasso.Picasso;
import butterknife.ButterKnife;
import butterknife.InjectView;
import butterknife.OnClick;
public class ItemRenderer extends Renderer<Item> {
private Context context;
private OnItemClicked listener;
public ItemRenderer(Context context, OnItemClicked listener) {
this.context = context.getApplicationContext();
this.listener = listener;
}
@InjectView(R.id.ivItem)
ImageView ivItem;
@InjectView(R.id.tvItemTitle)
TextView tvItemTitle;
@InjectView(R.id.tvItemDescription)
TextView tvItemDescription;
@InjectView(R.id.rlItem)
RelativeLayout rlItem;
@Override
protected void setUpView(View rootView) {
ButterKnife.inject(this, rootView);
}
@Override
protected void hookListeners(View rootView) {
}
@Override
protected View inflate(LayoutInflater inflater, ViewGroup parent) {
return inflater.inflate(R.layout.row_item, parent, false);
}
@OnClick(R.id.rlItem)
void onClickItem() {
listener.onRowClicked(getContent());
}
@Override
public void render() {
Item item = getContent();
renderDescription(item.getDescription());
renderTitle(item.getTitle());
renderImage(item.getImage());
}
private void renderImage(String image) {
Picasso.get()
.load(image)
.resizeDimen(R.dimen.user_thumbnail_w, R.dimen.user_thumbnail_h)
.error(R.mipmap.ic_launcher)
.placeholder(R.mipmap.ic_launcher)
.into(ivItem);
}
private void renderTitle(String title) {
tvItemTitle.setText(title);
}
private void renderDescription(String description) {
tvItemDescription.setText(description);
}
public interface OnItemClicked {
void onRowClicked(Item item);
}
}
<file_sep>/app/src/main/java/com/boa/gltest/global/di/ListModule.java
package com.boa.gltest.global.di;
import android.content.Context;
import com.boa.gltest.navigator.ShowItemClickedImpl;
import com.boa.gltest.usecase.ShowItemClicked;
import dagger.Module;
import dagger.Provides;
@Module
public class ListModule {
Context context;
ShowItemClicked showItemClicked;
public ListModule(final Context context) {
this.context = context;
showItemClicked = new ShowItemClickedImpl(context);
}
@Provides
public ShowItemClicked provideShowItemClicked() {
return showItemClicked;
}
}
<file_sep>/app/src/main/java/com/boa/gltest/ui/base/BaseActivity.java
package com.boa.gltest.ui.base;
import android.os.Bundle;
import android.widget.Toast;
import androidx.appcompat.app.AppCompatActivity;
import com.boa.gltest.BuildConfig;
import com.boa.gltest.R;
import butterknife.ButterKnife;
public abstract class BaseActivity extends AppCompatActivity {
protected abstract int getLayoutId();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(getLayoutId());
ButterKnife.inject(this);
}
public void showLoading() {
}
public void hideLoading() {
}
public void showError(Exception e) {
Toast.makeText(this, e.getMessage(), Toast.LENGTH_SHORT).show();
if (BuildConfig.ShowLogs) {
System.out.println("ERROR: " + e.getMessage());
}
hideLoading();
}
public void showOfflineMessage() {
Toast.makeText(this, R.string.no_internet, Toast.LENGTH_SHORT).show();
hideLoading();
}
}
<file_sep>/app/src/main/java/com/boa/gltest/ui/feature/list/ListActivity.java
package com.boa.gltest.ui.feature.list;
import android.os.Bundle;
import android.os.PersistableBundle;
import android.view.LayoutInflater;
import android.widget.ProgressBar;
import androidx.annotation.Nullable;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import com.boa.gltest.R;
import com.boa.gltest.global.App;
import com.boa.gltest.global.di.DaggerListComponent;
import com.boa.gltest.global.di.ListComponent;
import com.boa.gltest.global.di.ListModule;
import com.boa.gltest.global.model.Item;
import com.boa.gltest.navigator.ShowItemClickedImpl;
import com.boa.gltest.ui.base.BaseActivity;
import com.boa.gltest.ui.render.ItemRenderer;
import com.boa.gltest.ui.render.ItemRendererBuilder;
import com.pedrogomez.renderers.ListAdapteeCollection;
import com.pedrogomez.renderers.RVRendererAdapter;
import java.util.ArrayList;
import java.util.List;
import butterknife.InjectView;
import static android.view.View.GONE;
import static android.view.View.VISIBLE;
public class ListActivity extends BaseActivity implements ListContract.View {
@InjectView(R.id.listProgressBar)
ProgressBar listProgressBar;
@InjectView(R.id.list)
RecyclerView list;
ListPresenter presenter;
RVRendererAdapter<Item> adapter;
private ListComponent component;
final ItemRenderer.OnItemClicked itemClicked = new ItemRenderer.OnItemClicked() {
@Override
public void onRowClicked(Item item) {
presenter.onItemClicked(item);
}
};
@Override
public void onCreate(@Nullable Bundle savedInstanceState, @Nullable PersistableBundle persistentState) {
super.onCreate(savedInstanceState, persistentState);
component().inject(this);
init();
prepare();
presenter.initialize();
showLoading();
}
@Override
protected void onResume() {
super.onResume();
prepare();
adapter.clear();
init();
presenter.resume();
}
private void init() {
if (presenter == null) {
presenter = new ListPresenter(this);
presenter.attach(this);
}
}
private void prepare() {
adapter = new RVRendererAdapter<>(
LayoutInflater.from(this),
new ItemRendererBuilder(this, itemClicked),
new ListAdapteeCollection<>(new ArrayList<Item>())
);
list.setHasFixedSize(true);
list.setLayoutManager(new LinearLayoutManager(this, RecyclerView.VERTICAL, false));
list.setAdapter(adapter);
}
@Override
protected int getLayoutId() {
return R.layout.activity_list;
}
@Override
public void showList(List<Item> itemList) {
for (Item item : itemList) {
adapter.add(item);
}
adapter.notifyDataSetChanged();
hideLoading();
}
@Override
public void showLoading() {
listProgressBar.setVisibility(VISIBLE);
}
@Override
public void hideLoading() {
listProgressBar.setVisibility(GONE);
}
@Override
public void goToDetail(Item item) {
new ShowItemClickedImpl(this).show(item);
}
@Override
public void showListError(Exception e) {
showError(e);
}
@Override
public void showNoInternetMessage() {
showOfflineMessage();
}
private ListComponent component() {
if (component == null) {
component = DaggerListComponent.builder()
.rootComponent(((App) getApplication()).getRootComponent())
.listModule(new ListModule(getApplicationContext()))
.mainModule(((App) getApplication()).getMainModule())
.build();
}
return component;
}
@Override
public void onBackPressed() {
finish();
}
}
<file_sep>/app/src/main/java/com/boa/gltest/global/di/MainModule.java
package com.boa.gltest.global.di;
import android.content.Context;
import com.boa.gltest.datasource.api.GetItemsApiImpl;
import com.boa.gltest.datasource.preferences.GetItemPrefImpl;
import com.boa.gltest.global.App;
import com.boa.gltest.interactor.GetItemInteractor;
import com.boa.gltest.interactor.GetItemsInteractor;
import com.boa.gltest.interactor.impl.MainThreadImpl;
import com.boa.gltest.interactor.impl.ThreadExecutor;
import com.boa.gltest.navigator.ShowItemClickedImpl;
import com.boa.gltest.repository.ItemRepository;
import com.boa.gltest.usecase.ShowItemClicked;
import javax.inject.Named;
import dagger.Module;
import dagger.Provides;
@Module
public class MainModule {
private App application;
GetItemsInteractor getItemsInteractor;
GetItemInteractor getItemInteractor;
ShowItemClickedImpl showItemClicked;
ItemRepository itemRepository;
public MainModule(App application) {
this.application = application;
getItemsInteractor = new GetItemsInteractor(new GetItemsApiImpl(),
new ThreadExecutor(),
new MainThreadImpl());
getItemInteractor = new GetItemInteractor(new GetItemPrefImpl(application),
new ThreadExecutor(),
new MainThreadImpl());
showItemClicked = new ShowItemClickedImpl(application);
itemRepository = new ItemRepository(application, getItemsInteractor);
}
@Provides
GetItemsInteractor provideGetItemsInteractor() {
return getItemsInteractor;
}
@Provides
ShowItemClicked provideShowItemClicked() {
return showItemClicked;
}
@Provides
GetItemInteractor provideGetItemInteractor() {
return getItemInteractor;
}
@Provides
ItemRepository provideItemRepository() {
return itemRepository;
}
@Provides
@Named("applicationContext")
Context provideApplicationContext() {
return application.getApplicationContext();
}
}
<file_sep>/app/src/main/java/com/boa/gltest/global/App.java
package com.boa.gltest.global;
import android.app.Application;
import androidx.annotation.VisibleForTesting;
import com.boa.gltest.global.di.DaggerRootComponent;
import com.boa.gltest.global.di.MainModule;
import com.boa.gltest.global.di.RootComponent;
public class App extends Application {
private RootComponent rootComponent;
private MainModule mainModule;
@Override
public void onCreate() {
super.onCreate();
mainModule = new MainModule(this);
rootComponent = DaggerRootComponent.builder()
.mainModule(mainModule)
.build();
rootComponent.inject(this);
}
public RootComponent getRootComponent() {
return rootComponent;
}
@VisibleForTesting
public void setRootComponent(RootComponent rootComponent) {
this.rootComponent = rootComponent;
}
public MainModule getMainModule() {
return mainModule;
}
public void setMainModule(MainModule mainModule) {
this.mainModule = mainModule;
}
}
<file_sep>/app/src/main/java/com/boa/gltest/interactor/GetItemsInteractor.java
package com.boa.gltest.interactor;
import com.boa.gltest.datasource.api.NullListener;
import com.boa.gltest.global.model.Item;
import com.boa.gltest.usecase.GetItems;
import java.util.List;
public class GetItemsInteractor implements Interactor, GetItems, GetItems.Listener {
GetItems.Listener listener = new NullListener();
GetItems getItems;
Executor executor;
MainThread mainThread;
public GetItemsInteractor(GetItems dataSource, Executor executor, MainThread mainThread) {
this.getItems = dataSource;
this.executor = executor;
this.mainThread = mainThread;
}
@Override
public void run() {
getItems.getAsync(listener);
}
@Override
public List<Item> get() {
return getItems.get();
}
@Override
public void getAsync(Listener listener) {
if (listener != null) {
this.listener = listener;
}
this.executor.run(this);
}
@Override
public void onItemsReceived(List<Item> items, boolean isCached) {
listener.onItemsReceived(items, isCached);
}
@Override
public void onError(Exception e) {
listener.onError(e);
}
@Override
public void onNoInternetAvailable() {
listener.onNoInternetAvailable();
}
}
<file_sep>/settings.gradle
include ':app'
rootProject.name='GLTest'
<file_sep>/README.md
# GLTest
## Mobile Technical Interview
Create an app that shows a simple list of items based on the following endpoint:
http://private-f0eea-mobilegllatam.apiary-mock.com/list
The app should **show the following data** :
* Title (at its full length, so take this into account when sizing your cells)
* Part of description
* A thumbnail for those who have a picture.
* Display a detail (with an image and full description) in a new screen when user
taps on an item
To do this please follow this **guidelines** :
* Use github as VCS and share the code with us.
* For Android apps, use Java. For iOS, use Swift 4.
* Support portrait and landscape mode.
Some **notes** :
* If you don’t have the time to complete the full exercise, don’t worry. Just do your
best in the time you have and then let us know.
* Keep in mind that some information from api could be wrong
* You can use any framework or library
* The assessment of the app won’t be only related to code quality but architecture
design too.<file_sep>/app/src/main/java/com/boa/gltest/ui/base/BaseContract.java
package com.boa.gltest.ui.base;
public class BaseContract {
public interface Presenter<T> {
void initialize();
void resume();
void pause();
void destroy();
void attach(T view);
}
public interface View {
}
}
<file_sep>/app/src/main/java/com/boa/gltest/ui/feature/detail/DetailContract.java
package com.boa.gltest.ui.feature.detail;
import com.boa.gltest.global.model.Item;
import com.boa.gltest.ui.base.BaseContract;
public class DetailContract {
interface View extends BaseContract.View {
void showItem(Item item);
void showItemError(Exception e);
void showNoInternetMessage();
}
interface Presenter extends BaseContract.Presenter<DetailContract.View> {
void getItem();
}
}
| c471ccac5ee139924b76f0b45bd8bca300860fe3 | [
"Markdown",
"Java",
"Gradle"
] | 19 | Java | dgfigueroa29/GLTest | 32301486328090c9dc945d5244ddf735c6a3a3ff | 1ea96cf3e7d10473ce53dc75f59f2856eb59f290 |
refs/heads/master | <repo_name>muditshamz/Reclette<file_sep>/src/data/MenuData.js
export const menuData = [
{ title: "Courses", icon: "/images/icons/courses.svg", link: "/courses" },
{ title: "About", icon: "/images/icons/book.svg", link: "/About" },
{ title: "Subscribe", icon: "/images/icons/email.svg", link: "/Subscribe" },
{ title: "", icon: "/images/icons/award.svg", link: "/Awards" },
{ title: "", icon: "/images/icons/downloads.svg", link: "/account" },
]
export const tooltipData = [
{ title: "Profile", icon: "/images/icons/profile.svg", link: "/profile" },
{ title: "Settings", icon: "/images/icons/settings.svg", link: "/settings" },
{ title: "Sign out", icon: "/images/icons/signout.svg", link: "/logout" },
]
| 3aba7a66f48c3aee8120cec7ae90da4290a82439 | [
"JavaScript"
] | 1 | JavaScript | muditshamz/Reclette | 6611bb65af6018ffdf1467edbbb1ef2b88b233ca | 5715a5b0853fe5a679af724023eb94d92eb377d8 |
refs/heads/master | <repo_name>SumyHu/xiuyu<file_sep>/js/index.js
//存储全局变量
var commentValue = {
hadFinishValue: 20
};
//需初始化的事件
var init = function() {
articleTopInit();
bindEvent();
};
//左侧导航栏事件处理
var asideBlockEvent = {
//被选中导航栏样式效果
selectedStyle: function(target) {
$(".aside-nav ul li").removeClass();
target.addClass("aside-nav-selected");
},
//显示被选中块的内容
showMainBlock: function(index) {
$(".main-block").css("display", "none");
$(".main-block")[index].style.display = "block";
}
};
//绑定左侧导航栏事件
var bindAsideNavEvent = function() {
$.each($(".aside-nav ul li"), function(index, liTarget) {
$(liTarget).on("click", function() {
asideBlockEvent.selectedStyle($(this));
asideBlockEvent.showMainBlock(index);
});
});
};
//article的top部分的事件处理
var articleTopEvent = {
//获取当前时间
getCurrentDate: function() {
return new Date().toLocaleString();
},
//实时更新当前时间
setDateEvent: function() {
var currentDate = articleTopEvent.getCurrentDate();
$(".date").text(currentDate);
setTimeout(articleTopEvent.setDateEvent, 1000);
},
//设置my-case里面的进度值
setMyCaseSpanValue: function() {
var str = commentValue.hadFinishValue + "%";
$(".my-case span").text(str);
},
//设置进度条
setProcessBar: function() {
$(".progress-bar").attr("aria-valuenow", commentValue.hadFinishValue);
$(".progress-bar").css("width", commentValue.hadFinishValue+"%");
$(".progress-bar").text(commentValue.hadFinishValue+"%");
}
};
//公告管理逻辑处理
var callBoardEvent = {
//点击公告栏查看情况的样式处理
selectedStyle: function(target) {
$(".aside-block nav a").removeClass();
target.addClass("aside-block-selected");
},
//点击公告栏查看情况显示情况
showCallBoardTitle: function(className) {
$(".call-board-title").css("display", "none");
$("." + className).css("display", "block");
},
//点击公告标题后公告标题处理
hadClickTitleStyle: function(target) {
$(".call-board-title a").css("color", "#595757");
target.css("color", "red");
if (target.parent().attr("class") === "call-board-title not-see") {
target.parent().attr("class", "call-board-title already-see")
}
$(".article").css("display", "none");
var className = target.attr("class");
$("." + className + "-article").css("display", "block");
},
//点击收藏或者删除
clickStarOrRemove: function(target) {
target.popover("show");
}
};
//公告管理事件绑定
var bindCallBoardEvent = function() {
$.each($(".aside-block nav a"), function(index, aTarget) {
$(aTarget).on("click", function() {
callBoardEvent.selectedStyle($(this));
if ($(aTarget).text() === "未看公告") {
callBoardEvent.showCallBoardTitle("not-see");
}
else if ($(aTarget).text() === "已看公告") {
callBoardEvent.showCallBoardTitle("already-see");
}
else {
$(".call-board-title").css("display", "block");
}
});
});
$.each($(".call-board-title a"), function(index, titleTarget) {
$(titleTarget).on("click", function() {
callBoardEvent.hadClickTitleStyle($(this));
});
});
$.each($(".star"), function(index, starTarget) {
$(starTarget).on("click", function() {
callBoardEvent.clickStarOrRemove($(this));
});
});
$.each($(".remove"), function(index, removeTarget) {
$(removeTarget).on("click", function() {
callBoardEvent.clickStarOrRemove($(this));
});
});
};
//article的top部分的需要初始化的事件
var articleTopInit = function() {
articleTopEvent.setDateEvent();
articleTopEvent.setMyCaseSpanValue();
articleTopEvent.setProcessBar();
};
//全局事件绑定
var bindEvent = function() {
bindCallBoardEvent();
bindAsideNavEvent();
};
$(function() {
init();
}); | 57ef35935c126c9f373fc3cd594f57764063a923 | [
"JavaScript"
] | 1 | JavaScript | SumyHu/xiuyu | 29d8faacadef703426bee01e935191f70d49ebb3 | c679897885c22e65844b3bca560dd3ebe156e939 |
refs/heads/master | <repo_name>monchito007/ClasePHP<file_sep>/index.php
<?php
class connection{
private $server="localhost";
private $user="root";
private $pwd="";
private $database="consumoelectrico";
private $con;
public function connect(){
$this->con = mysql_connect($this->server,$this->user,$this->pwd);
mysql_select_db($this->database,$this->con);
}
public function showData($query){
$res = mysql_query($query,$this->con);
mysql_close();
echo "<table border=1>";
while($row = mysql_fetch_row($res)){
echo "<tr>";
for($x=0;$x<count($row);$x++){
echo "<td>";
echo $row[$x];
echo "</td>";
}
echo "</tr>";
}
echo "</table>";
}
public function getTypeOfFields($tabla){
$query = "SELECT * FROM ".$tabla;
$res = mysql_query($query,$this->con);
//mysql_close();
$fields = mysql_num_fields($res);
$type_of_fields = array();
for ($i=1; $i < $fields; $i++) {
$type_of_fields[$i]['type'] = mysql_field_type($res, $i);
$type_of_fields[$i]['name'] = mysql_field_name($res, $i);
$type_of_fields[$i]['len'] = mysql_field_len($res, $i);
$type_of_fields[$i]['flags'] = mysql_field_flags($res, $i);
}
return $type_of_fields;
}
public function showForm($tabla){
$array_of_fields = $this->getTypeOfFields($tabla);
//Create form
echo "<form method='GET'>";
echo "<table>";
for ($i=1; $i<=count($array_of_fields); $i++) {
$type = $array_of_fields[$i]['type'];
$name = $array_of_fields[$i]['name'];
$len = $array_of_fields[$i]['len'];
$flags = $array_of_fields[$i]['flags'];
//echo $type . " " . $name . " " . $len . " " . $flags . "<br>";
echo "<tr>";
echo "<td>";
echo "<label>".$name."</label>";
echo "</td>";
echo "<td>";
//echo "<input type='text' name=".$name." maxlength=".$len." required>";
if($type=="int"){echo "<input type='number' name=".$name." min='0' maxlength='".$len."'>";}
else{echo "<input type='text' name=".$name." maxlength=".$len.">";}
echo "</td>";
echo "<td>";
echo "<i>".$type." ".$flags."</i>";
echo "</td>";
echo "</tr>";
}
echo "<tr>";
echo "<td>";
echo "<input type='submit' value='Send'>";
echo "</td>";
echo "</tr>";
echo "</table>";
echo "</form>";
}
private function _GenerateFieldsString($fields){
$string = "";
for($i=1;$i<count($fields);$i++){
if(($i+1)!=count($fields)){
$string .= $fields[$i]['name'].", ";
}else{
$string .= $fields[$i]['name'];
}
}
return $string;
}
private function _GenerateValuesString($fields,$values){
$string = "";
for($i=1;$i<count($fields);$i++){
if(($i+1)!=count($fields)){
if($fields[$i]['type']=='string'){
$string .= "'".$values[$fields[$i]['name']]."', ";
}else{
$string .= $values[$fields[$i]['name']].", ";
}
}else{
if($fields[$i]['type']=='string'){
$string .= "'".$values[$fields[$i]['name']]."'";
}else{
$string .= $values[$fields[$i]['name']];
}
}
}
return $string;
}
public function insertData($fields,$values,$table){
$str_fields = $this->_GenerateFieldsString($fields);
$str_values = $this->_GenerateValuesString($fields,$values);
$query = "INSERT INTO $table ($str_fields) VALUES ($str_values);";
//mysql_query($query,$this->con);
}
}
$con = new connection();
$con->connect();
$table = "clientes";
if($_REQUEST){
echo "Datos recibidos";
$fields = $con->getTypeOfFields($table);
$values = $_REQUEST;
$con->insertData($fields, $values,$table);
}
$query = "SELECT * FROM ".$table;
//$query = "SELECT a.Codigo,a.Nombre,a.Apellido,a.Apellido2,a.Calle,a.Numero,a.Piso,a.Metros,b.Poblacion,c.Provincia "
// . "FROM clientes as a, poblaciones as b, provincias as c WHERE a.CodigoPoblacion=b.CodigoPoblacion AND b.CodigoProvincia=c.CodigoProvincia";
$con->showData($query);
$con->showForm($table);
echo "<br>";
?> | a16e5d140c7f847fd0f95c6a5385cea9ef1f6a34 | [
"PHP"
] | 1 | PHP | monchito007/ClasePHP | 4fb657274a86e7e8666079006c9e7645a91f4da9 | 1175cf504191aed18dcb2ae9f2c5011971c28dcc |
refs/heads/master | <file_sep>server.port=8082
logging.level.com.github=debug
logging.level.org.springframework=info
spring.jackson.date-format=yyyy-MM-dd HH:mm:ss
spring.jackson.time-zone=Asia/Shanghai
endpoints.enabled=true
peacetrue.result.exclude-auto-convert-when-return=boolean
peacetrue.metadata.entity-class-map.Demo=com.github.peacetrue.demo.service.ll_Demo<file_sep>= maven 配置
.配置快照仓库
[source%nowrap,maven]
----
<repository>
<id>sonatype-snapshots</id>
<url>https://oss.sonatype.org/content/repositories/snapshots/</url>
</repository>
----
.服务接口
[source%nowrap,maven]
----
<dependency>
<groupId>com.github.peacetrue.template</groupId>
<artifactId>peacetrue-demo-service-api</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
----
.服务mybatis实现
[source%nowrap,maven]
----
<dependency>
<groupId>com.github.peacetrue.template</groupId>
<artifactId>peacetrue-demo-service-mybatis</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
----
.控制器
[source%nowrap,maven]
----
<dependency>
<groupId>com.github.peacetrue.template</groupId>
<artifactId>peacetrue-demo-controller</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
----
.前端
[source%nowrap,maven]
----
<dependency>
<groupId>com.github.peacetrue.template</groupId>
<artifactId>peacetrue-demo-ui</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
----
<file_sep>rootProject.name = 'peacetrue-template-model-library'
include 'peacetrue-demo-service-api'
include 'peacetrue-demo-service-mybatis'
include 'peacetrue-demo-controller'
include 'peacetrue-demo-type'
include 'peacetrue-demo-sample'
<file_sep>package com.github.peacetrue.demo.service;
import com.github.pagehelper.autoconfigure.PageHelperAutoConfiguration;
import org.mybatis.spring.boot.autoconfigure.MybatisAutoConfiguration;
import org.springframework.boot.autoconfigure.ImportAutoConfiguration;
import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
import org.springframework.boot.autoconfigure.jdbc.DataSourceTransactionManagerAutoConfiguration;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
/**
* @author xiayx
*/
@Configuration
@ImportAutoConfiguration(value = {
DataSourceAutoConfiguration.class,
DataSourceTransactionManagerAutoConfiguration.class,
MybatisAutoConfiguration.class,
PageHelperAutoConfiguration.class,
ServiceDemoAutoConfiguration.class,
})
@PropertySource("classpath:application-demo-service-test.properties")
public class TestServiceDemoAutoConfiguration {
}
<file_sep>package com.github.peacetrue.demo.service;
import com.github.peacetrue.core.Range;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.io.Serializable;
/**
* @author xiayx
*/
@Data
@NoArgsConstructor
public class DemoQuery implements Serializable {
public static final DemoQuery DEFAULT = new DemoQuery();
private static final long serialVersionUID = 0L;
private String code;
private String name;
private Range.Date createdTime;
}
<file_sep>peacetrue.demo.urls.base-path=/demos
peacetrue.demo.urls.add=
peacetrue.demo.urls.query=
peacetrue.demo.urls.get=
peacetrue.demo.urls.modify=
peacetrue.demo.urls.delete=<file_sep>plugins {
id "org.springframework.boot" version "1.5.20.RELEASE"
}
description '模板样章'
dependencies {
compile 'ch.qos.logback:logback-classic'
compile 'com.github.peacetrue:peacetrue-spring'
compile project(":peacetrue-demo-controller")
compile project(":peacetrue-demo-type")
compile project(":peacetrue-demo-service-mybatis")
compile project(path: ':peacetrue-demo-service-mybatis', configuration: 'testArtifacts')
compile 'com.github.peacetrue:peacetrue-result-web'
compile 'com.github.peacetrue:peacetrue-result-exception-support'
compile 'org.springframework.boot:spring-boot-starter-web'
compile 'org.springframework.boot:spring-boot-starter-actuator'
implementation 'com.github.peacetrue.module:peacetrue-module-controller'
implementation 'com.github.peacetrue.module:peacetrue-module-service-mybatis:1.0-SNAPSHOT'
}
| 5977da0e77579b6955102a513c650f5a5f76f3bf | [
"Java",
"AsciiDoc",
"INI",
"Gradle"
] | 7 | INI | peacetrue/peacetrue-template-model-library | 38041d7ce7ab8e1d333e5f1f5a5b7f8dd022cf85 | 5347e83dda558786a8e5d717113cac082e9274cf |
refs/heads/master | <repo_name>chi42/go_hangman<file_sep>/threaded_hangman.go
package main
import (
"fmt"
"os"
"container/list"
// "regexp"
// "strings"
// "rand"
// "time"
)
const (
BUF_SIZ = 100
BLANK= '*'
)
var (
word_list *list.List
)
// printers ...
func list_print(l *list.List) {
for e := l.Front(); e != nil; e = e.Next() {
fmt.Printf("%s\n", e)
}
}
func main() {
file_scan("dict")
// rg,err := regexp.Compile("...")
}
// bring the whole dictionary into main memory
// (based on the assumption that we will be guessing
// at multiple words throughout the lifetime of the program)
func file_scan(dict_name string) {
word_list = new(list.List)
f, err := os.Open(dict_name)
str := ""
num_bytes := 0
var read_buf [BUF_SIZ]byte
if f == nil {
fmt.Printf("File error: %s\n", err.String())
os.Exit(1)
}
num_bytes, err = f.Read(read_buf[:])
// loop over file, until no more bytes to be read
for ; num_bytes > 0; {
// loop over buffer, newline indicates new word
for i := 0; i < num_bytes; i++ {
if read_buf[i] == '\n' {
b := []byte(str)
word_list.PushBack(b)
str = ""
} else {
str += string(read_buf[i])
}
}
num_bytes, err = f.Read(read_buf[:])
}
}
<file_sep>/hangman.go
package main
import (
"fmt"
"os"
"container/list"
"strings"
"rand"
"time"
)
const (
BUF_SIZ = 100
BLANK= '*'
)
var (
l_lis *list.List
l_counts counters
l_so_far []byte
l_word string
l_list_size int
)
type counters struct {
uniq []uint
total []uint
pos [][26]uint
}
func main() {
if len(os.Args) > 1 {
try_word(strings.ToUpper(os.Args[1]))
}
}
// given a new word "word" we will now attempt to guess the word
// program will try until completion
func try_word (word string) {
total_tries := 0
bad_tries := 0
l_word = word
word_len := len(l_word)
l_so_far = make([]byte, word_len)
for i,_ := range l_so_far {
l_so_far[i] = BLANK
}
file_scan("dict", word_len)
char_count(word_len)
fmt.Printf("%2d %2d guess: \tleft: %6d\t", total_tries, bad_tries, l_list_size)
fmt.Printf("so far: %s\n", l_so_far)
for {
l, w := pick()
lg, wg := try_guess(l, w)
if !lg {
bad_tries++
}
total_tries += 1
if !updates (l, w) {
fmt.Printf("No such word in dictionary!\n")
return
}
fmt.Printf("%2d %2d guess: %c\tleft: %6d\t", total_tries, bad_tries, l, l_list_size)
fmt.Printf("so far: %s\n", l_so_far)
if wg {
//fmt.Printf("<%d %s\n>%d %s\n", total_tries, l_word, bad_tries, l_word)
break
}
}
fmt.Printf("\n")
}
// based on counters and current progress, attempt to guess a new
// letter or word
func pick () (byte, string) {
// a pseudo randomish seed, not perfect but better then
// an obviously deterministic random number generater
rand.Seed(time.Nanoseconds())
max_val := uint(0)
max_pos := uint(0)
// pick the "obvious" matches, i.e., certain letters
// are the only possible letters that can fit in a spot
// so we pick those first
for i, _ := range l_counts.pos {
for j, _ := range l_counts.pos[i] {
if l_counts.pos[i][j] == uint(l_list_size) {
max_pos = uint(j + 65)
//fmt.Printf("OBVIOUS!!!!\n")
break
} else {
if l_counts.pos[i][j] != 0 {
break
}
}
}
if max_pos > 0 {
break
}
}
// weren't able to pick an obvious match, so we pick the
// letter that occurs in the most words
if max_pos == 0 {
max_total := uint(0)
mod := 10.0
for i, v := range l_counts.uniq {
if v > max_val {
max_val = v
max_pos = uint(i)
max_total = l_counts.total[i]
}
// the tie breakers
if v == max_val {
// pick the letter randomly
if rand.Int() % int(mod) == 0 {
//fmt.Printf("***RAND SELECTED\n")
max_val = v
max_pos = uint(i)
max_total = l_counts.total[i]
// this number is choosen somewhat arbitrarily
// the intention is to randomly select the new letter
// with decreasing probability
mod += (float64(i) * 2.0)
// pick the letter that occurs overall the most
} else if l_counts.total[i] > max_total {
//fmt.Printf("***SWITCH MADE\n")
max_val = v
max_pos = uint(i)
max_total = l_counts.total[i]
}
}
}
max_pos += 65
}
return byte(max_pos), ""
}
// update counts and various other variables after a letter
// or word is guessed
func updates (l byte, w string) bool {
if l > 0 {
index := l - 65
l_counts.total[index] = 0
l_counts.uniq[index] = 0
for i, _ := range l_counts.pos {
l_counts.pos[i][index] = 0
}
var e_prev *list.Element
e_prev = l_lis.Front().Next()
for e := l_lis.Front(); e != nil; e = e.Next() {
for i, val := range e.Value.(string) {
// two kinds of words to remove:
// word does not contain the guessed letter at the same spot(s)
// or word contains the guessed letter, but not in the same spot(s)
if (l_so_far[i] != BLANK && l_so_far[i] != uint8(val)) ||
(l_so_far[i] != l && uint8(val) == l) {
word_removal_count(e.Value.(string))
l_list_size--
if l_list_size == 0 {
return false
}
l_lis.Remove(e)
e = e_prev
break
}
}
e_prev = e
}
// consider the instance of where we guessed a wrong, and guessed wrong
} else {
}
return true
}
// update the counters when a single word 'w' is stripped
func word_removal_count(w string) {
var temp [26]uint
var index int
// iterate across all letters in w
// for each letter update the counters
for pos, v := range w {
index = v - 65
temp[index] = 1
if l_counts.total[index] > 0 {
l_counts.total[index] -= 1
}
if l_counts.pos[pos][index] > 0 {
l_counts.pos[pos][index] -=1
}
}
for i, v := range temp {
if l_counts.uniq[i] > 0 {
l_counts.uniq[i] -= v
}
}
}
// evaluate the status of the guess that uses
// either the character 'l' or the word 'w'
// return:
// success of letter guess, overall success (i.e if word is completed)
func try_guess (l byte, w string) (bool, bool) {
l_match := false
if l > 0 {
for i,v := range l_word {
if byte(v) == l {
l_match = true
l_so_far[i] = l
}
}
}
for _, v := range l_so_far {
if v == BLANK {
return l_match, false
}
}
return true, true
}
// generate counters initially
func char_count(word_len int) {
var temp [26]uint
l_counts.uniq = make([]uint, 26)
l_counts.total = make([]uint, 26)
l_counts.pos = make([][26]uint, word_len)
// for each word
for e := l_lis.Front(); e != nil; e = e.Next() {
// (string) needed, as type assertion for interface type
// for each letter in word
for i, val := range e.Value.(string) {
l_counts.total[val - 65] += 1
temp[val - 65] = 1
l_counts.pos[i][val-65] += 1
}
// set and init
for i, v := range temp {
l_counts.uniq[i] += v
temp[i] = 0
}
}
}
// initial build, scan the dictionary and bring into
// main memory all the words of matching length
func file_scan(name string, word_len int) {
l_list_size = 0
l_lis = new(list.List)
f, err := os.Open(name, os.O_RDONLY, 0666)
str := ""
num_b := 0
i := 0
var store_a [BUF_SIZ]byte
if f == nil {
fmt.Printf("File error: %s\n", err.String())
os.Exit(1)
}
// loop over file, until no more bytes to be read
num_b, err = f.Read(store_a[:])
for ; num_b > 0; {
// iterate through byte array, break if end of line
for ; i < num_b; i++ {
if store_a[i] == '\n' {
if len(str) == word_len {
l_list_size++
l_lis.PushBack(str)
}
str = ""
i++
break
}
str += string(store_a[i])
}
// read more bytes if no more bytes in buffer
if i == num_b {
num_b, err = f.Read(store_a[:])
i = 0
}
}
}
<file_sep>/looper.sh
#!/bin/bash
while true
do
read -p ">> " INPUT
./8.out $INPUT
done
| 454dc667f4e6787a8fc6f1cc7b3ed135e1988204 | [
"Go",
"Shell"
] | 3 | Go | chi42/go_hangman | f13c3a7dca5ab613d008e8b3eaa1db3a0a2a752a | f5bc3f737e725ff57626edb7d4359be5eea96560 |
refs/heads/master | <repo_name>aivo0/frontend-transcriber<file_sep>/config.js
export const endpoint = `http://localhost:4444`;
export const prodEndpoint = `https://tekstiks.ee/api`;
//export const endpoint = `https://heli-yoga-prod.herokuapp.com/`;
/* export const wssEndpoint = `ws://localhost:4000`;
export const prodWssEndpoint = `wss://heli-yoga-prod.herokuapp.com/`; */
| 76758101e18fdf12a875f301aeace9cffc7b8d5a | [
"JavaScript"
] | 1 | JavaScript | aivo0/frontend-transcriber | 5e63842317a5ce70c10a1c60f4ce14be06910646 | c3b82e05f4bdab7840a2e9b7045b411ee80e0dc3 |
refs/heads/master | <repo_name>BradhamLab/vnet.pytorch<file_sep>/torchpmc/datasets/__init__.py
from .pmcdataset import PMC_Dataset
__all__ = ('PMCs')
<file_sep>/train.py
""" """#!/usr/bin/env python3
# from local import *
import time
import argparse
import torch
import numpy as np
import torch.nn as nn
import torch.nn.init as init
import torch.optim as optim
import torch.nn.functional as F
from torch.autograd import Variable
import torchvision.transforms as transforms
from torch.utils.data import DataLoader
import sys
from torchpmc import loss as bioloss
from torchpmc import utils
from torchpmc import datasets as dset
import os
import sys
import math # change math imports to numpy
import shutil
import setproctitle
import vnet
import make_graph
from functools import reduce
import operator
# root_dir = "/home/mia/Desktop/GoogleDrive/Images/PMCLabels"
root_dir = "/home/mia/Desktop/Images/PMCLabels"
target_split = []
def weights_init(m):
classname = m.__class__.__name__
if classname.find('Conv3d') != -1:
nn.init.kaiming_normal(m.weight)
m.bias.data.zero_()
def datestr():
now = time.gmtime()
return '{}{:02}{:02}_{:02}{:02}'.format(now.tm_year, now.tm_mon, now.tm_mday, now.tm_hour, now.tm_min)
def save_checkpoint(state, is_best, path, prefix, filename='checkpoint.pth.tar'):
prefix_save = os.path.join(path, prefix)
name = prefix_save + '_' + filename
torch.save(state, name)
if is_best:
shutil.copyfile(name, prefix_save + '_model_best.pth.tar')
def inference(args, loader, model, transforms):
src = args.inference
dst = args.save
model.eval()
nvols = reduce(operator.mul, target_split, 1)
# assume single GPU / batch size 1
for data in loader:
data, series, origin, spacing = data[0]
shape = data.size()
# convert names to batch tensor
if args.cuda:
data.pin_memory()
data = data.cuda()
data = Variable(data, volatile=True)
output = model(data)
_, output = output.max(1)
output = output.view(shape)
output = output.cpu()
# merge subvolumes and save
results = output.chunk(nvols)
results = map(lambda var : torch.squeeze(var.data).numpy().astype(np.int16), results)
volume = utils.merge_image([results], target_split)
print("save {}".format(series))
utils.save_updated_image(volume, os.path.join(dst, series + ".mhd"), origin, spacing)
# performing post-train inference:
# train.py --resume <model checkpoint> --i <input directory (*.mhd)> --save <output directory>
def noop(x):
return x
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--batchSz', type=int, default=1)
parser.add_argument('--dice', action='store_true')
parser.add_argument('--ngpu', type=int, default=1)
parser.add_argument('--nEpochs', type=int, default=300)
parser.add_argument('--start-epoch', default=0, type=int, metavar='N',
help=' manual epoch number (useful on restarts)')
parser.add_argument('--resume', default='', type=str, metavar='PATH',
help='path to latest checkpoint (default: none)')
parser.add_argument('-e', '--evaluate', dest='evaluate', action='store_true',
help='evaluate model on validation set')
parser.add_argument('-i', '--inference', default='', type=str, metavar='PATH',
help='run inference on data set and save results')
# 1e-8 works well for lung masks but seems to prevent
# rapid learning for nodule masks
parser.add_argument('--weight-decay', '--wd', default=1e-8, type=float,
metavar='W', help='weight decay (default: 1e-8)')
parser.add_argument('--no-cuda', action='store_true')
parser.add_argument('--save')
parser.add_argument('--seed', type=int, default=1)
parser.add_argument('--opt', type=str, default='adam',
choices=('sgd', 'adam', 'rmsprop'))
args = parser.parse_args()
best_prec1 = 100.
args.cuda = not args.no_cuda and torch.cuda.is_available()
args.save = args.save or 'work/vnet.base.{}'.format(datestr())
nll = True
if args.dice:
nll = False
weight_decay = args.weight_decay
setproctitle.setproctitle(args.save)
torch.manual_seed(args.seed)
if args.cuda:
torch.cuda.manual_seed(args.seed)
print("build vnet")
model = vnet.VNet(elu=False, nll=nll)
batch_size = args.ngpu*args.batchSz
gpu_ids = range(args.ngpu)
model = nn.parallel.DataParallel(model, device_ids=gpu_ids)
if args.resume:
if os.path.isfile(args.resume):
print("=> loading checkpoint '{}'".format(args.resume))
checkpoint = torch.load(args.resume)
args.start_epoch = checkpoint['epoch']
best_prec1 = checkpoint['best_prec1']
model.load_state_dict(checkpoint['state_dict'])
print("=> loaded checkpoint '{}' (epoch {})"
.format(args.evaluate, checkpoint['epoch']))
else:
print("=> no checkpoint found at '{}'".format(args.resume))
else:
model.apply(weights_init)
if nll:
train = train_nll
test = test_nll
class_balance = True
else:
train = train_dice
test = test_dice
class_balance = False
print(' + Number of params: {}'.format(
sum([p.data.nelement() for p in model.parameters()])))
if args.cuda:
model = model.cuda()
if os.path.exists(args.save):
shutil.rmtree(args.save)
os.makedirs(args.save, exist_ok=True)
masks = None
if args.inference != '':
if not args.resume:
print("args.resume must be set to do inference")
exit(1)
kwargs = {'num_workers': 1} if args.cuda else {}
src = args.inference
dst = args.save
inference_batch_size = args.ngpu
root = os.path.dirname(src)
images = os.path.basename(src)
dataset = dset.PMC_Dataset(root=root, images=root, transform=testTransform,
split=target_split, mode="infer")
loader = DataLoader(dataset, batch_size=inference_batch_size,
shuffle=False, collate_fn=noop, **kwargs)
inference(args, loader, model)
# inference(args, loader, model, trainTransform)
return
kwargs = {'num_workers': 1, 'pin_memory': True} if args.cuda else {}
print("loading training set")
trainSet = dset.PMC_Dataset(root=root_dir, images=root_dir, targets=root_dir,
mode="train", class_balance= None, split= None, test_fraction=0.25)
trainLoader = DataLoader(trainSet, batch_size=batch_size, shuffle=True, **kwargs)
print("loading test set")
testSet = dset.PMC_Dataset(root=root_dir, images=root_dir, targets=root_dir,
mode="test", split=target_split)
testLoader = DataLoader(testSet, batch_size=batch_size, shuffle=False, **kwargs)
target_mean = trainSet.target_mean()
bg_weight = target_mean / (1. + target_mean)
fg_weight = 1. - bg_weight
print("bg_weight:",bg_weight)
class_weights = torch.FloatTensor([bg_weight, fg_weight])
if args.cuda:
class_weights = class_weights.cuda()
if args.opt == 'sgd':
optimizer = optim.SGD(model.parameters(), lr=1e-1,
momentum=0.99, weight_decay=weight_decay)
elif args.opt == 'adam':
optimizer = optim.Adam(model.parameters(), weight_decay=weight_decay)
elif args.opt == 'rmsprop':
optimizer = optim.RMSprop(model.parameters(), weight_decay=weight_decay)
trainF = open(os.path.join(args.save, 'train.csv'), 'w')
testF = open(os.path.join(args.save, 'test.csv'), 'w')
err_best = 100.
for epoch in range(1, args.nEpochs + 1):
print(args)
adjust_opt(args.opt, optimizer, epoch)
train(args, epoch, model, trainLoader, optimizer, trainF, class_weights)
err = test(args, epoch, model, testLoader, optimizer, testF, class_weights)
is_best = False
if err < best_prec1:
is_best = True
best_prec1 = err
save_checkpoint({'epoch': epoch,
'state_dict': model.state_dict(),
'best_prec1': best_prec1},
is_best, args.save, "vnet")
os.system('./plot.py {} {} &'.format(len(trainLoader), args.save))
trainF.close()
testF.close()
def train_nll(args, epoch, model, trainLoader, optimizer, trainF, weights):
model.train()
nProcessed = 0
nTrain = len(trainLoader.dataset)
for batch_idx, (data, target) in enumerate(trainLoader):
if args.cuda:
data, target = data.cuda(), target.cuda()
data, target = Variable(data), Variable(target)
optimizer.zero_grad()
# forward
output = model(data)
target = target.view(target.numel())
loss = F.nll_loss(output, target, weight=weights)
dice_loss = bioloss.dice_error(output, target)
# make_graph.save('/tmp/t.dot', loss.creator); assert(False)
loss.backward()
optimizer.step()
nProcessed += len(data)
pred = output.data.max(1)[1] # get the index of the max log-probability
incorrect = pred.ne(target.data).cpu().sum()
err = 100.*incorrect/target.numel()
partialEpoch = epoch + batch_idx / len(trainLoader) - 1
print('Train Epoch: {:.2f} [{}/{} ({:.0f}%)]\tLoss: {:.4f}\tError: {:.3f}\t Dice: {:.6f}'.format(
partialEpoch, nProcessed, nTrain, 100. * batch_idx / len(trainLoader),
loss.data[0], err, dice_loss))
trainF.write('{},{},{}\n'.format(partialEpoch, loss.data[0], err))
trainF.flush()
def test_nll(args, epoch, model, testLoader, optimizer, testF, weights):
model.eval()
test_loss = 0
dice_loss = 0
incorrect = 0
numel = 0
for data, target in testLoader:
if args.cuda:
data, target = data.cuda(), target.cuda()
data, target = Variable(data, volatile=True), Variable(target)
target = target.view(target.numel())
numel += target.numel()
output = model(data)
test_loss += F.nll_loss(output, target, weight=weights).data[0]
dice_loss += bioloss.dice_error(output, target)
pred = output.data.max(1)[1] # get the index of the max log-probability
incorrect += pred.ne(target.data).cpu().sum()
test_loss /= len(testLoader) # loss function already averages over batch size
dice_loss /= len(testLoader)
err = 100.*incorrect/numel
print('\nTest set: Average loss: {:.4f}, Error: {}/{} ({:.3f}%) Dice: {:.6f}\n'.format(
test_loss, incorrect, numel, err, dice_loss))
testF.write('{},{},{}\n'.format(epoch, test_loss, err))
testF.flush()
return err
def train_dice(args, epoch, model, trainLoader, optimizer, trainF, weights):
model.train()
nProcessed = 0
nTrain = len(trainLoader.dataset)
for batch_idx, (data, target) in enumerate(trainLoader):
if args.cuda:
data, target = data.cuda(), target.cuda()
data, target = Variable(data), Variable(target)
optimizer.zero_grad()
output = model(data)
loss = bioloss.dice_loss(output, target)
# make_graph.save('/tmp/t.dot', loss.creator); assert(False)
loss.backward()
optimizer.step()
nProcessed += len(data)
err = 100.*(1. - loss.data[0])
partialEpoch = epoch + batch_idx / len(trainLoader) - 1
print('Train Epoch: {:.2f} [{}/{} ({:.0f}%)]\tLoss: {:.8f}\tError: {:.8f}'.format(
partialEpoch, nProcessed, nTrain, 100. * batch_idx / len(trainLoader),
loss.data[0], err))
trainF.write('{},{},{}\n'.format(partialEpoch, loss.data[0], err))
trainF.flush()
def test_dice(args, epoch, model, testLoader, optimizer, testF, weights):
model.eval()
test_loss = 0
incorrect = 0
for data, target in testLoader:
if args.cuda:
data, target = data.cuda(), target.cuda()
data, target = Variable(data, volatile=True), Variable(target)
output = model(data)
loss = bioloss.dice_loss(output, target).data[0]
test_loss += loss
incorrect += (1. - loss)
test_loss /= len(testLoader) # loss function already averages over batch size
nTotal = len(testLoader)
err = 100.*incorrect/nTotal
print('\nTest set: Average Dice Coeff: {:.4f}, Error: {}/{} ({:.0f}%)\n'.format(
test_loss, incorrect, nTotal, err))
testF.write('{},{},{}\n'.format(epoch, test_loss, err))
testF.flush()
return err
def adjust_opt(optAlg, optimizer, epoch):
if optAlg == 'sgd':
if epoch < 150:
lr = 1e-1
elif epoch == 150:
lr = 1e-2
elif epoch == 225:
lr = 1e-3
else:
return
for param_group in optimizer.param_groups:
param_group['lr'] = lr
## Transform need to changed to be given stack wide and not image specific ##
# Cropping x3
class RandomCrop(object):
"""
Crop randomly the image in a sample.
Args:
output_size (tuple or int): Desired output size. If int, square crop
is made.
"""
def __init__(self, output_size):
assert isinstance(output_size, (int, tuple))
if isinstance(output_size, int):
self.output_size = (output_size, output_size)
else:
assert len(output_size) == 2
self.output_size = output_size
def __call__(self, sample):
image, landmarks = sample['image'], sample['landmarks']
h, w = image.shape[:2]
new_h, new_w = self.output_size
top = np.random.randint(0, h - new_h)
left = np.random.randint(0, w - new_w)
image = image[top: top + new_h,
left: left + new_w]
landmarks = landmarks - [left, top]
return {'image': image, 'landmarks': landmarks}
class RandomRotation(object):
"""
Rotation (More to come....maybe)
"""
def __init__(self, degrees, resample=False, expand=False, center=None):
if isinstance(degrees, numbers.Number):
if degrees < 0:
raise ValueError("If degrees is a single number, it must be positive.")
self.degrees = (-degrees, degrees)
else:
if len(degrees) != 2:
raise ValueError("If degrees is a sequence, it must be of len 2.")
self.degrees = degrees
self.resample = resample
self.expand = expand
self.center = center
def __call__(self, img):
"""
The landmarks need to be rotated with the image so if (x,y) is a
point in the image the corresponding rotated point
will be (x', y') where:
x' = x*cos(degrees) - y*sin(degrees)
y' = y*cos(degrees) + x*sin(degrees)
math.sin(x) - returns the sine of x radians
math.cos(x) - returns the cosine of x radians
math.radians(x) - converts degrees to radians
"""
image, landmarks = sample['image'], sample['landmarks']
angle = self.degrees
rads = math.radians(angle)
rotMatrix = [[np.cos(rads),-1*np.sin(rads)],[np.sin(rads), np.cos(rads)]]
for i in range(0,len(landmarks)):
x = landmarks[i][0]
y = landmarks[i][1]
merp = [[x],[y]]
new_points = np.matmul(rotMatrix,merp)
new_x = new_points[0]
new_y = new_points[1]
landmarks[i][0] = new_x
landmarks[i][1] = new_y
return {'image': image, 'landmarks': landmarks}
class ToTensor(object):
"""Convert ndarrays in sample to Tensors."""
def __call__(self, sample):
image, landmarks = sample['image'], sample['landmarks']
# swap color axis because
# numpy image: H x W x C
# torch image: C X H X W
image = image.transpose((2, 0, 1))
return {'image': torch.from_numpy(image),
'landmarks': torch.from_numpy(landmarks)}
def hist_match(source, template):
"""
Adjust the pixel values of a grayscale image such that its histogram
matches that of a target image
Arguments:
-----------
source: np.ndarray
Image to transform; the histogram is computed over the flattened
array
template: np.ndarray
Template image; can have different dimensions to source
Returns:
-----------
matched: np.ndarray
The transformed output image
From:
--------
https://stackoverflow.com/questions/32655686/histogram-matching-of-two-images-in-python-2-x
"""
oldshape = source.shape
source = source.ravel()
template = template.ravel()
# get the set of unique pixel values and their corresponding indices and
# counts
s_values, bin_idx, s_counts = np.unique(source, return_inverse=True,
return_counts=True)
t_values, t_counts = np.unique(template, return_counts=True)
# take the cumsum of the counts and normalize by the number of pixels to
# get the empirical cumulative distribution functions for the source and
# template images (maps pixel value --> quantile)
s_quantiles = np.cumsum(s_counts).astype(np.float64)
s_quantiles /= s_quantiles[-1]
t_quantiles = np.cumsum(t_counts).astype(np.float64)
t_quantiles /= t_quantiles[-1]
# interpolate linearly to find the pixel values in the template image
# that correspond most closely to the quantiles in the source image
interp_t_values = np.interp(s_quantiles, t_quantiles, t_values)
return interp_t_values[bin_idx].reshape(oldshape)
if __name__ == '__main__':
main()
<file_sep>/torchpmc/__init__.py
from torchpmc import datasets
from torchpmc import utils
from torchpmc import transforms
from torchpmc import loss
<file_sep>/torchpmc/datasets/o_pmcdataset.py
import numpy as np
import torch
import torch.utils.data as data
from torchpmc import utils
import glob
import os
import os.path
import SimpleITK as sitk
import pandas as pd
from skimage.draw import polygon
from PIL import Image
import json
image_dict = {}
label_dict = {}
mask_dict = {}
stats_dict = {}
test_split = []
train_split = []
def train_test_split(full, positive, test_fraction):
negative = full - positive
test_neg_count = int(np.ceil(len(negative)*test_fraction))
test_pos_count = int(np.ceil(len(positive)*test_fraction))
negative_list = list(negative)
positive_list = list(positive)
np.random.shuffle(positive_list)
np.random.shuffle(negative_list)
test_positive = set()
for i in range(test_pos_count):
test_positive |= set([positive_list[i]])
train_positive = positive - test_positive
if test_neg_count > 1:
test_negative = set()
for i in range(test_neg_count):
test_negative |= set([negative_list[i]])
train_negative = negative - test_negative
train = list(train_positive | train_negative)
test = list(test_positive | test_negative)
else:
train = list(train_positive)
test = list(test_positive)
np.random.shuffle(train)
np.random.shuffle(test)
return (train, test)
def load_image(root, series):
img_file = series + ".png"
itk_img = sitk.ReadImage(img_file)
img = sitk.GetArrayFromImage(itk_img)
y, x = np.shape(img)
img = img.reshape((1, y, x))
return img
def load_label(root, series):
img_file = series + ".png"
json_file = series + ".json"
im = sitk.ReadImage(img_file)
im_array = sitk.GetArrayFromImage(im)
columns = len(im_array)
rows = len(im_array[1])
img = np.zeros((rows, columns))
labels_n_points = obtainpoints(json_file)
for label in labels_n_points:
points = labels_n_points[label]
r = []
c = []
for i in range(0,len(points)):
r.append(points[i][0])
c.append(points[i][1])
rr, cc = polygon(r, c)
img[rr, cc] == 1
return img
def obtainpoints(json_file):
label_n_points = dict()
x = open(json_file)
json_file2 = json.load(x)
x.close()
nlabels = len(json_file2["shapes"])
for i in range(0,nlabels):
label = json_file2["shapes"][i]["label"]
points = json_file2["shapes"][i]["points"]
label_n_points[label] = points
return label_n_points
def full_dataset(root_dir, images):
image_list = []
image_files = []
for (root,dirs,files) in os.walk(root_dir):
root_embroyo = root
files_embroyo = files
for i in range(0,len(files_embroyo)):
if(files_embroyo[i] == 'metadata.json'):
embroyo_metadata = os.path.join(root_embroyo,files_embroyo[i])
channel = obtainchannel(embroyo_metadata)
image_directory = os.path.join(root_embroyo, channel) + "/IntensityImages"
image_file_list = glob.glob(image_directory + "/*png")
for img in image_file_list:
base = os.path.splitext(img)[0]
image_list.append(base)
return image_list
def make_dataset(root_dir, images, targets, seed, train, class_balance, partition, nonempty, test_fraction, mode):
global image_dict, label_dict, test_split, train_split
zero_tensor = None
train = mode == "train"
label_list = []
for (root,dirs,files) in os.walk(root_dir):
for i in range(0,len(files)):
if(files[i] == 'metadata.json'):
embroyo_metadata = os.path.join(root,files[i])
channel = obtainchannel(embroyo_metadata)
image_directory = os.path.join(root, channel) + "/IntensityImages"
json_file_list = glob.glob(image_directory + "/*json")
for anno in json_file_list:
base = os.path.splitext(anno)[0]
label_list.append(base)
zero = label_list[1]
sample_label = load_label(root_dir, zero)
shape = np.shape(sample_label)
if len(test_split) == 0:
zero_tensor = np.zeros(shape, dtype=np.uint8)
image_list = []
file_list=[]
for (root,dirs,files) in os.walk(root_dir):
root_embroyo = root
files_embroyo = files
for i in range(0,len(files_embroyo)):
if(files_embroyo[i] == 'metadata.json'):
embroyo_metadata = os.path.join(root_embroyo,files_embroyo[i])
channel = obtainchannel(embroyo_metadata)
image_directory = os.path.join(root_embroyo, channel) + "/IntensityImages"
image_file_list = glob.glob(image_directory + "/*png")
for img in image_file_list:
base = os.path.splitext(img)[0]
file_list.append(base)
image_list = label_list
np.random.seed(seed)
full = set(image_list)
positives = set(label_list) & full
train_split, test_split = train_test_split(full, positives, test_fraction)
if train:
print(len(label_list))
print(len(train_split))
keys = train_split
else:
print(len(test_split))
keys = test_split
y, x = shape
result = []
target_means = []
for key in keys:
target = load_label(root_dir, key)
target_means.append(np.mean(target))
result.append(key)
target_mean = np.mean(target_means)
return (result, target_mean)
def obtainchannel(metad):
x = open(metad)
metad = json.load(x)
x.close()
for key in metad:
if key[:-1] == "Channel":
if metad[key]["Stain"] == "PMC":
channel = key
return (channel)
class PMC_Dataset(data.Dataset):
def __init__(self, root='.', images=None, targets=None, transform=None,
target_transform=None, co_transform=None, mode="train", seed=1,
class_balance=False, split=None, masks=None, nonempty=True,
test_fraction=0.25):
self.mode = mode
self.root = root
if masks is not None:
self.masks = os.path.join(self.root, masks)
if targets is not None:
self.targets = os.path.join(self.root)
self.images = images
self.transform = transform
self.target_transform = target_transform
self.co_transform = co_transform
if images is None:
raise(RuntimeError("images must be set"))
if targets is None and mode != "infer":
raise(RuntimeError("both images and targets must be set if mode is not 'infer'"))
if mode == "infer":
imgs = full_dataset(root, images)
else:
imgs, target_mean = make_dataset(root, images, targets, seed, mode,
class_balance, split, nonempty,
test_fraction, mode)
self.data_mean = target_mean
if len(imgs) == 0:
raise(RuntimeError("Found 0 images: " + os.path.join(root + "\n")))
self.imgs = imgs
self.masks = None
self.split = split
def target_mean(self):
return self.data_mean
def __getitem__(self, index):
if self.mode == "train" or self.mode == "test":
return self.__getitem_dev(index)
elif self.mode == "infer":
return self.__getitem_prod(index)
def __getitem_prod(self, index):
series = self.imgs[index]
image = load_image(self.images, series)
origin, spacing = stats_dict[series]
image = image.astype(np.float32)
if self.split is not None:
batches = utils.partition_image(image, self.split)
else:
batches = [image]
if self.transform is not None:
batches = map(self.transform, batches)
batches = [batches]
batches = torch.cat(batches)
return batches, series, origin, spacing
def __getitem_dev(self, index):
series = self.imgs[index]
target = load_label(self.root, series)
target = torch.from_numpy(target.astype(np.int64))
img = load_image(self.images, series)
if self.transform is not None:
img = self.transform(image)
if self.target_transform is not None:
target = self.target_transform(target)
if self.co_transform is not None:
img, target = self.co_transform(img, target)
return img, target
def __len__(self):
return len(self.imgs)
| c04abe40caf11d5e08d139251d05ab7abfccaf61 | [
"Python"
] | 4 | Python | BradhamLab/vnet.pytorch | c63313a30ab844b14e2e2904a31f38dcc7211830 | 00f1d8406994abcfe345360c210779f2f8507430 |
refs/heads/master | <file_sep>import { Injectable } from '@angular/core';
import { getRequestParams } from '../utils/utils';
import { HttpClient } from '../../../node_modules/@angular/common/http';
import { UserInfomation } from '../model/userInfomation';
@Injectable({
providedIn: 'root'
})
export class RegistrationServiceComponent {
API_URL = 'api/registration';
constructor(
private _http: HttpClient
) {}
setUserInfomation(params: UserInfomation) {
const reqParams = params.getSearchParams();
console.log(reqParams);
// {name: 'morikawa', hurigana: 'morikawa'
return this._http.post(this.API_URL, params).toPromise().then(res => {
console.log(res);
return res;
});
}
getUserInfomation() {
return this._http.get(this.API_URL).toPromise().then(res => {
console.log(res);
return res;
});
}
}
<file_sep>import { Component } from "../../../../node_modules/@angular/core";
import { UserService } from "../../service/user.service";
import { Router } from "../../../../node_modules/@angular/router";
import { AuthGuard } from "../../service/authGuard";
import { UserInfomation } from "../../model/userInfomation";
@Component({
selector: 'app-login',
templateUrl: 'login.component.html'
})
export class LoginComponent {
name: string;
password: string;
constructor(
private _userService: UserService,
private route: Router,
private _auth: AuthGuard
) { }
login() {
const params = {
name: this.name,
password: <PASSWORD>
};
this._userService.login(params).then(res => {
if (res) {
this._auth.getObservable().next(<UserInfomation>res);
this.route.navigate(['myApp/registration']);
} else {
window.alert('名前かパスワードが違います');
}
});
}
}
<file_sep>import { getRequestParams } from '../utils/utils';
export class UserInfomation {
hurigana: string;
name: string;
password: string;
constructor(userInfo) {
this.hurigana = userInfo.hurigana;
this.name = userInfo.name;
this.password = userInfo.password;
}
getSearchParams() {
const params = [];
if (this.hurigana) {
params['hurigana'] = this.hurigana;
}
if (this.name) {
params['name'] = this.name;
}
if (this.password) {
params['password'] = <PASSWORD>.password;
}
return getRequestParams(params);
}
}
<file_sep>import { Component, OnInit, EventEmitter, Output } from '@angular/core';
import { UserInfomation } from '../../../model/userInfomation';
import { HttpClient } from '@angular/common/http';
@Component({
selector: 'app-input-infomation',
templateUrl: './input-infomation.component.html',
styleUrls: ['./input-infomation.component.css']
})
export class InputInfomationComponent implements OnInit {
@Output() confirmUserInfo: EventEmitter<UserInfomation> = new EventEmitter();
hurigana: string;
name: string;
password: string;
errorFlg = false;
error = {
huriganaError: false,
nameError: false,
passwordError: false,
};
date = new Date();
constructor(private http: HttpClient) { }
ngOnInit() {
console.log('サーバーに送る');
this.http.get('api/character').toPromise().then((res) => {
console.log(res);
});
}
changeHurigana(hurigana: string) {
console.log(typeof hurigana);
const validation = hurigana.match(/^[\u3040-\u309f ]+$/) ? true : false;
if (validation) {
this.hurigana = hurigana;
} else {
this.error.huriganaError = true;
this.hurigana = undefined;
}
}
changeName(name: string) {
const validation = name.match(/^[\u30e0-\u9fcf\u3040-\u309f ]+$/) ? true : false;
if (validation) {
this.name = name;
} else {
this.error.nameError = true;
this.name = undefined;
}
}
changePassword(password) {
const validation = password.match(/^[a-zA-Z\d]+$/) ? true : false;
if (validation) {
this.password = <PASSWORD>;
} else {
this.error.passwordError = true;
this.password = undefined;
}
}
confirm() {
if (this.hurigana === undefined ||
this.name === undefined ||
this.password === undefined) {
this.errorFlg = true;
return;
}
const tmp = {
hurigana: this.hurigana,
name: this.name,
password: this.password
};
console.log(tmp);
const userInfo = new UserInfomation(tmp);
this.confirmUserInfo.emit(userInfo);
}
}
<file_sep>import * as express from 'express';
import * as bodyParser from 'body-parser';
import * as cookieParser from 'cookie-parser';
import * as serveStatic from 'serve-static';
import * as session from 'express-session';
import { request } from 'https';
const path = require('path');
const app = express();
const SQLiteStore = require('connect-sqlite3')(session);
app.use(express.static(path.join(__dirname, '/../public')));
const server = app.listen(3000, function() {
console.log(__dirname);
// console.log(path.join(__dirname, '..dist'));
console.log('node.js is listening to PORT:' + server.address().port);
});
app.use(session({
store: new SQLiteStore({
db: './sessions.sqlite3'
}),
secret: 'secret',
resave: false,
saveUninitialized: false,
cookie: {
httpOnly: true,
secure: false,
maxAge: 1000 * 30
}
}));
app.use(bodyParser.json());
app.use(cookieParser());
const URL_ROUTES = [
'/myApp(/[^/]+)?',
];
URL_ROUTES.forEach(routerName => {
app.use(routerName, serveStatic(path.join(__dirname, '/../public')));
});
const ROUTES = [
'character',
'registration',
'login',
'logout'
];
ROUTES.forEach(routerName => {
console.log(routerName);
const router = require('./routes/' + routerName);
app.use('/api/' + routerName, router);
});
// const a = require('./routes/character');
// app.use('/api/character', a);
const sessionData = {};
// get sessiondata
// var session = JSON.parse(sessionData[request.cookie['connect.sid']]);
// // set sessiondata
// sessionData[request.cookie['connect.sid']] = {
// data: JSON.stringify(somedata),
// expireted: 2018/10/22 13:44:00,
// };
<file_sep>import { Injectable } from '../../../node_modules/@angular/core';
import { CanActivate, Router } from '../../../node_modules/@angular/router';
import { BehaviorSubject } from '../../../node_modules/rxjs';
import { UserInfomation } from '../model/userInfomation';
import { HttpClient } from '../../../node_modules/@angular/common/http';
import { UserService } from './user.service';
@Injectable({
providedIn: 'root',
}
)
export class AuthGuard implements CanActivate {
API_URL_LOGIN = 'api/login/check';
userinfo = {hurigana: '', name: '', password: ''};
private userSubject = new BehaviorSubject<UserInfomation>(new UserInfomation(this.userinfo));
constructor(
private _http: HttpClient,
private route: Router,
private _userService: UserService
) {
}
async canActivate() {
const login: UserInfomation = await this._userService.isAuthricated();
console.log(login);
if ( login.name === '') {
this.route.navigate(['myApp/login']);
return false;
}
return true;
}
// async checkAsync() {
// const login: UserInfomation = await this._http.get(this.API_URL_LOGIN).toPromise()
// .then(res => {
// console.log(res);
// return new UserInfomation(res);
// });
// console.log('login', login);
// this.userSubject.next(login);
// return login;
// }
getObservable() {
return this.userSubject;
}
}
<file_sep><div class="center">
<table>
<thead class="thead">
<tr class="thead-tr">
<th>
ふりがな
</th>
<th>
名前
</th>
<th>
パスワード
</th>
</tr>
</thead>
<tbody class="tbody">
<tr class="tbody-tr" *ngFor="let member of members">
<td>
{{member.hurigana}}
</td>
<td>
{{member.name}}
</td>
<td>
{{member.password}}
</td>
</tr>
</tbody>
</table>
</div><file_sep>import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { ConfirmInfomationComponent } from './confirm-infomation.component';
describe('ConfirmInfomationComponent', () => {
let component: ConfirmInfomationComponent;
let fixture: ComponentFixture<ConfirmInfomationComponent>;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [ ConfirmInfomationComponent ]
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(ConfirmInfomationComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
});
<file_sep>import * as express from 'express';
import * as fs from 'fs';
import * as path from 'path';
const router = express.Router();
router.post('/', (req, res, next) => {
const jsonUserDatas = JSON.parse(fs.readFileSync(path.join(__dirname, '../../../userData/userData.json'), 'utf8'));
let matchUser;
jsonUserDatas.List.forEach(userData => {
if (userData.name === req.body.name && userData.password === req.body.password) {
// fs.writeFileSync(path.join(__dirname, '../../../userData/login.json'), JSON.stringify(userData));
matchUser = userData;
req.session.user = userData;
console.log('req.session.user', req.session.user);
}
});
res.json(matchUser);
// const jsonUserDatas = JSON.parse(fs.readFileSync(path.join(__dirname, '../../../userData/userData.json'), 'utf8'));
});
router.get('/check', (req, res, next) => {
// const loginUser = fs.readFileSync(path.join(__dirname, '../../../userData/login.json'), 'utf-8');
// if (loginUser !== '') {
// res.json(JSON.parse(loginUser));
// } else {
// res.json({hurigana: '', name: '', password: ''});
// }
console.log('check', req.session.user);
console.log('session', req.session);
if (req.session.user) {
// const returnBody = {
// status: true,
// userInfo: req.session.user
// };
res.json(req.session.user);
} else {
res.json({hurigana: '', name: '', password: ''});
}
});
module.exports = router;
<file_sep>import { ModuleWithProviders, NgModule } from '@angular/core';
import {Routes, RouterModule} from '@angular/router';
import { LoginComponent } from './component/login/login.component';
// const myRoutes = [
// {path: 'registration', component: MainComponent},
// {path: 'member', component: MemberInfomationComponent}
// ];
// function loadMyApp() {
// return Promise.resolve(require('./routing/myApp/myApp.routing.module')['MyAppRoutingModule']);
// }
const myRoutes: Routes = [
{path: 'myApp', loadChildren: './routing/myApp/myApp.routing.module#MyAppRoutingModule'},
{path: 'login', component: LoginComponent}
];
// export const MY_ROUTES: ModuleWithProviders = RouterModule.forRoot(myRoutes);
@NgModule({
imports: [RouterModule.forRoot(myRoutes, {enableTracing: false})],
exports: [RouterModule]
})
export class AppRoutingModule { }
<file_sep>// import * as express from 'express';
// import { RegiInfo } from '../models/mongoDBmodel';
// const router = express.Router();
// router.get('/', (req, res , next) => {
// RegiInfo.find((err, doc) => {
// if (err) {
// return res.json(err);
// }
// return res.json(doc);
// });
// });
// router.post('/', (req, res) => {
// const regiInfo = new RegiInfo({
// regiInfo: req.body
// });
// regiInfo.save((err, result) => {
// if (err) {
// return res.json(err);
// }
// });
// });
// module.exports = router;
<file_sep>import * as express from 'express';
import * as fs from 'fs';
import * as path from 'path';
import { utf8Encode } from '../../node_modules/@angular/compiler/src/util';
const router = express.Router();
// const userData = require('./../../userData/userData.json');
interface UserData {
List: any[];
}
router.post('/', (req, res, next) => {
console.log(req.body);
console.log(__dirname);
console.log(typeof fs.readFileSync(path.join(__dirname, '../../../userData/userData.json'), 'utf8'));
const userDatas: UserData = {List: []};
const jsonUserDatas = fs.readFileSync(path.join(__dirname, '../../../userData/userData.json'), 'utf8');
if (jsonUserDatas === '') {
userDatas.List.push(req.body);
fs.writeFileSync(path.join(__dirname, '../../../userData/userData.json'), JSON.stringify(userDatas));
} else {
const userData = JSON.parse(jsonUserDatas);
console.log(userData);
// const userDatas = {
// List: [userData]
// };
userDatas.List = userData.List;
console.log(userDatas);
console.log(typeof userDatas);
userDatas.List.push(req.body);
fs.writeFileSync(path.join(__dirname, '../../../userData/userData.json'), JSON.stringify(userDatas));
}
res.json('完了');
});
router.get('/', (req, res, next) => {
const userData = JSON.parse(fs.readFileSync(path.join(__dirname, '../../../userData/userData.json'), 'utf8'));
res.json(userData);
});
module.exports = router;
<file_sep>import * as express from 'express';
const router = express.Router();
router.get('/', (req, res) => {
const character = [
{
name: 'anira',
type: 'fire'
}, {
name: 'yuel',
type: 'fire',
}
];
res.json(character);
});
module.exports = router;
<file_sep>import { Component, OnInit, ChangeDetectionStrategy } from '@angular/core';
import { HttpClient } from '@angular/common/http';
@Component({
selector: 'app-complete-registration',
templateUrl: './complete-registration.component.html',
styleUrls: ['./complete-registration.component.css'],
changeDetection: ChangeDetectionStrategy.OnPush,
})
export class CompleteRegistrationComponent implements OnInit {
constructor(private http: HttpClient) {
this.sendserver();
}
ngOnInit() {
console.log('aaa');
}
sendserver() {
console.log('サーバーに送る');
this.http.get('api/character').toPromise().then((res) => {
console.log(res);
});
}
}
<file_sep>import { Injectable } from "../../../node_modules/@angular/core";
import { HttpClient } from "../../../node_modules/@angular/common/http";
import { Router } from "../../../node_modules/@angular/router";
@Injectable({
providedIn: 'root'
})
export class UserService {
API_URL_LOGIN = 'api/login';
API_URL_LOGOUT = 'api/logout';
API_URL_CHECK = 'api/login/check';
loginUser: any = {name: '', hurigana: '', password: ''};
constructor(
private _http: HttpClient,
private router: Router,
) {}
login(userInfo) {
return this._http.post(this.API_URL_LOGIN, userInfo).toPromise().then(res => {
console.log('res', res);
this.loginUser = res;
return res;
});
}
logout() {
return this._http.get(this.API_URL_LOGOUT).toPromise().then(res => {
this.loginUser = {name: '', hurigana: '', password: ''};
window.alert(res);
this.router.navigate(['myApp/registration']);
});
}
async isAuthricated() {
await this._http.get(this.API_URL_CHECK).toPromise().then(res => {
console.log(res);
this.loginUser = res;
});
return this.loginUser;
}
}
<file_sep>import { NgModule } from '../../../../node_modules/@angular/core';
import { RegistrationMainComponent } from '../../component/registration-main/main.component';
import { InputInfomationComponent } from '../../component/registration-main/input-infomation/input-infomation.component';
import { ConfirmInfomationComponent } from '../../component/registration-main/confirm-infomation/confirm-infomation.component';
import { CompleteRegistrationComponent } from '../../component/registration-main/complete-registration/complete-registration.component';
import { CurrencyStatusComponent } from '../../component/registration-main/currency-status/currency-status.component';
import { MemberInfomationComponent } from '../../component/member-infomation/member-infomation';
import { BrowserModule } from '../../../../node_modules/@angular/platform-browser';
import { FormsModule } from '../../../../node_modules/@angular/forms';
import { HttpClientModule } from '../../../../node_modules/@angular/common/http';
import { MyAppRoutingModule } from './myApp.routing.module';
import { MyAppComponent } from './myApp.component';
import { Location } from '../../../../node_modules/@angular/common';
import { RegistrationComponent } from '../../component/registration/registration.component';
import { RouteReuseStrategy } from '../../../../node_modules/@angular/router';
import { AuthGuard } from '../../service/authGuard';
import { MultiLineChartComponent } from '../../component/chart/chart.component';
@NgModule({
declarations: [
MyAppComponent,
RegistrationMainComponent,
InputInfomationComponent,
ConfirmInfomationComponent,
CompleteRegistrationComponent,
CurrencyStatusComponent,
MemberInfomationComponent,
RegistrationComponent,
MultiLineChartComponent,
],
imports: [
BrowserModule,
FormsModule,
HttpClientModule,
MyAppRoutingModule,
],
providers: [
],
// entryComponents: [MyAppComponent],
bootstrap: [MyAppComponent]
})
export class MyAppModule { }
<file_sep>import { Component } from '@angular/core';
@Component({
selector: 'myApp',
templateUrl: 'myApp.component.html',
})
export class MyAppComponent {
title = '会員登録';
}
<file_sep>import { Injectable } from "../../../../node_modules/@angular/core";
import { UserService } from "../../service/user.service";
import { ActivatedRouteSnapshot, RouterStateSnapshot, Resolve } from "../../../../node_modules/@angular/router";
@Injectable({
providedIn: 'root'
})
export class RegistrationResolver implements Resolve<any> {
constructor(
private _userService: UserService
) {}
resolve(route: ActivatedRouteSnapshot, state: RouterStateSnapshot) {
return this._userService.isAuthricated();
}
}
<file_sep>import { HttpParams } from '@angular/common/http';
import { HttpParamsOptions } from '@angular/common/http/src/params';
export function getRequestParams(object: any): HttpParams {
const paramsOptions = <HttpParamsOptions>{fromObject: object};
const params = new HttpParams(paramsOptions);
return params;
}
<file_sep>import * as d3 from "d3";
import { Component, ElementRef, ViewChild, ChangeDetectorRef } from "../../../../node_modules/@angular/core";
// import { select } from "../../../../node_modules/@types/d3-selection";
// import { min, max } from "../../../../node_modules/@types/d3-array";
// import { scaleLinear } from "../../../../node_modules/@types/d3-scale";
// import { line } from "../../../../node_modules/@types/d3";
// import {svg, select, scaleLinear} from "../../../../node_modules/@types/d3";
// export interface ChartPoint {
// x: number;
// y: number;
// }
// export type ChartData = ChartPoint[];
// export interface ChartOptions {
// width?: number | string;
// height?: number | string;
// strokeColor?: string;
// margin?: number | [number, number , number, number];
// animateDuration?: number;
// }
@Component({
selector: 'app-chart',
templateUrl: 'chart.component.html'
})
export class MultiLineChartComponent {
// @ViewChild('chart') svgElement: any;
svg;
datas = [10, 70, 50, 60 , 20, 80, 40, 60, 80, 100];
margin = 2;
constructor(private cd: ChangeDetectorRef) {
this.init();
}
init(): void {
this.svg = d3.select('#chart')
.append('svg')
.attr('width', 100)
.attr('height', 100);
console.log('##########');
console.log(this.svg);
const yMin = d3.min(this.datas);
const yMax = d3.max(this.datas);
const xscale = d3.scaleLinear().domain([0, this.datas.length]).range([this.margin, 100 - this.margin]);
const yscale = d3.scaleLinear().domain([yMin, yMax]).range([100 - this.margin, this.margin]);
console.log(xscale(3));
console.log(yscale(30));
const d3Line = d3.line()
.x((d, i) => xscale(d[0]))
.y((d, i) => yscale(d[1]));
const listData = [];
for (let i = 0; i < this.datas.length; i++) {
listData.push([i, this.datas[i]]);
}
this.svg.append('path')
.attr('d', d3Line(listData))
.style('stroke', 'black')
.style('fill', 'none');
}
}
<file_sep>import { Component, OnInit, ChangeDetectorRef } from '../../../../node_modules/@angular/core';
import { RegistrationServiceComponent } from '../../service/registration.service';
import { UserInfomation } from '../../model/userInfomation';
import { forEach } from '../../../../node_modules/@angular/router/src/utils/collection';
@Component({
selector: 'app-member-infomation',
templateUrl: 'member-infomation.html',
styleUrls: ['member-infomation.scss'],
})
export class MemberInfomationComponent implements OnInit {
constructor(
private _registrationService: RegistrationServiceComponent,
private _cdRef: ChangeDetectorRef
) {
}
members;
ngOnInit(): void {
this._registrationService.getUserInfomation().then(res => {
this.setUser(res);
this._cdRef.detectChanges();
});
}
setUser(userDatas) {
const members = [];
console.log(userDatas);
userDatas.List.forEach((userData) => {
members.push(userData);
});
this.members = members;
}
}
<file_sep>import { Component, OnInit, ChangeDetectionStrategy } from '@angular/core';
import { UserInfomation } from '../../model/userInfomation';
@Component({
selector: 'app-main',
templateUrl: './main.component.html',
styleUrls: ['./main.component.css'],
changeDetection: ChangeDetectionStrategy.OnPush,
})
export class RegistrationMainComponent implements OnInit {
selectedPage: number;
userInfo: UserInfomation;
DISPLAY_PAGE = {
INPUT_INFOMATION: 1,
COMFIRM: 2,
COMPLETE_MEMBER_JOIN: 3
};
constructor() { }
ngOnInit() {
this.selectedPage = this.DISPLAY_PAGE.INPUT_INFOMATION;
}
openConfirmPage(userInfo) {
console.log(userInfo);
this.userInfo = userInfo;
this.selectedPage = this.DISPLAY_PAGE.COMFIRM;
}
openInputInfomationPage() {
this.selectedPage = this.DISPLAY_PAGE.INPUT_INFOMATION;
}
openCompletePage() {
this.selectedPage = this.DISPLAY_PAGE.COMPLETE_MEMBER_JOIN;
}
}
<file_sep>import { NgModule } from '@angular/core';
import { RouterModule } from '@angular/router';
import { RegistrationComponent } from '../../component/registration/registration.component';
import { AuthGuard } from '../../service/authGuard';
import { LoginComponent } from '../../component/login/login.component';
import { RegistrationResolver } from '../../component/registration/registration-resolver';
const myRoutes = [
{path: '', component: RegistrationComponent, resolve: {'registration': RegistrationResolver}},
{path: 'member', component: RegistrationComponent, canActivate: [AuthGuard]},
{path: 'login', component: LoginComponent},
{path: ':routerParam', component: RegistrationComponent, resolve: {'reqistration': RegistrationResolver}},
// {path: 'registration', component: MainComponent},
// {path: 'member', component: MemberInfomationComponent}
];
@NgModule({
imports: [RouterModule.forChild(myRoutes)],
exports: [RouterModule]
})
export class MyAppRoutingModule {}
<file_sep>import { Component, ChangeDetectorRef, OnInit, ChangeDetectionStrategy } from "../../../../node_modules/@angular/core";
import { Location, NgSwitchCase } from "../../../../node_modules/@angular/common";
import { Router, ActivatedRoute, NavigationEnd, ActivatedRouteSnapshot } from "../../../../node_modules/@angular/router";
import { routerNgProbeToken } from "../../../../node_modules/@angular/router/src/router_module";
import { AuthGuard } from "../../service/authGuard";
import { UserService } from "../../service/user.service";
@Component({
selector: 'app-registration',
templateUrl: 'registration.component.html',
changeDetection: ChangeDetectionStrategy.OnPush,
})
export class RegistrationComponent implements OnInit {
title = '会員登録';
currentPath = 'registration';
// ログイン情報
loginInfo;
constructor(
public _location: Location,
private _router: Router,
private _auth: AuthGuard,
private _userService: UserService,
private _cdRef: ChangeDetectorRef,
) {
// _router.routeReuseStrategy.shouldReuseRoute = () => {
// return false;
// };
// _auth.getObservable().subscribe(loginInfo => {
// this.loginInfo = loginInfo;
// });
console.log(this._userService.loginUser);
this.loginInfo = this._userService.loginUser;
}
ngOnInit(): void {
// this._router.events.subscribe(event => {
// if (!(event instanceof NavigationEnd)) {
// return;
// }
// });
}
currentUrl(path: string) {
const currentPath = path.split('/').pop();
console.log('test1', currentPath);
if (currentPath.indexOf('?') !== -1) {
return currentPath.substring(0, currentPath.indexOf('?'));
}
console.log('test', currentPath === 'registration');
return currentPath;
// this._cdRef.markForCheck();
}
moveMemberPanel() {
this._router.navigate(['myApp/member']);
}
moveLoginPanel() {
this._router.navigate(['myApp/login']);
}
logout() {
this._userService.logout().then(() => {
this.loginInfo = this._userService.loginUser;
this._cdRef.detectChanges();
});
}
}
<file_sep>import * as mongoose from 'mongoose';
export const RegiInfo = mongoose.model('regiInfo', new mongoose.Schema({
regiInfo: {type: String}
}));
<file_sep>import { Component, OnInit, Input, Output, EventEmitter, ChangeDetectionStrategy } from '@angular/core';
import { UserInfomation } from '../../../model/userInfomation';
import { HttpClient, HttpParams } from '../../../../../node_modules/@angular/common/http';
import { RegistrationServiceComponent } from '../../../service/registration.service';
@Component({
selector: 'app-confirm-infomation',
templateUrl: './confirm-infomation.component.html',
styleUrls: ['./confirm-infomation.component.css'],
changeDetection: ChangeDetectionStrategy.OnPush,
})
export class ConfirmInfomationComponent implements OnInit {
@Input() userInfo: UserInfomation;
@Output() onclickCompleteButton: EventEmitter<void> = new EventEmitter();
@Output() onclickBack: EventEmitter<void> = new EventEmitter();
constructor(
private _registrationService: RegistrationServiceComponent
) { }
ngOnInit() {
}
complete() {
const req = new UserInfomation(this.userInfo);
this._registrationService.setUserInfomation(req).then(res => {
console.log(res);
this.onclickCompleteButton.emit();
});
}
backInputInfomation() {
this.onclickBack.emit();
}
}
| 2c01a4276403b690e4e46507c991e2004a8300eb | [
"TypeScript",
"HTML"
] | 26 | TypeScript | wizemorikawa/registration-form | 695259506eebccaf0b3e9445d1ef1e6780b5cd50 | 5ce5fef16a9d25dc095dc8177e7b753f994a08dc |
refs/heads/master | <repo_name>t4d-classes/react_10142020<file_sep>/redux-hooks/src/utils.js
export const nanToValue = (x) => {
if (isNaN(x)) {
return '';
} else {
return x;
}
};
export const valueToNaN = (x) => {
if (x.length === 0) {
return NaN;
} else {
return Number(x);
};
}<file_sep>/demo-app/src/functional/ShowArchive.js
import React from "react";
export function ShowArchive({ showArchive, onSetShowArchive: setShowArchive }) {
return (
<div>
<input
type="checkbox"
checked={showArchive}
onChange={() => setShowArchive(!showArchive)}
/>
Show Archive
</div>
);
}
<file_sep>/demo-app/exercises/assessment.md
# Assessment
Implement the code below with both class-based and functional (with hooks) components.
1. Using the "create-react-app" tool, create a new React application named "color-tool".
2. The "color-tool" application should provide the following features:
- Display a header with the following text "My Favorite Colors".
- Below the header, display a list of colors, each color list item should display a name and hexcode.
- Below the list of colors, display a form with two fields: name and hexcode. Provide a button such that when the button is clicked, the data entered into the fields is added to the list of colors, and the fields are cleared.
3. The header, list of colors and form should not be in the same component. But all three can be called from the same parent component.
4. Ensure it works!
<file_sep>/demo-app/exercises/exercise-2.md
# Exercise 2
1. Add a new button to each list item with a label of "Edit".
2. When the "Edit" button is clicked, display two input fields for the name and hexcode of the color in the list item.
3. When the input fields are displayed, the "Edit" and "Archive" buttons should not be shown. Instead, display two buttons "Save" and "Cancel". The save button saves the changes, the cancel button cancels the changes.
4. After saving or canceling, remove the input controls.
5. Ensure it works!
<file_sep>/demo-app/src/class/ColorList.js
import React, { Component } from "react";
export class ColorList extends Component {
render() {
return (
<ul>
{this.props.colors.map((color) => (
<li key={color.id}>
{color.name} {color.hexcode}
<button
type="button"
onClick={() => this.props.onArchive(color.id)}
>
Archive
</button>
</li>
))}
</ul>
);
}
}
ColorList.defaultProps = {
colors: [],
};
<file_sep>/demo-app/src/functional/ColorListContainer.js
import React from "react";
import { ShowArchive } from "./ShowArchive";
import { ColorList } from "./ColorList";
import { useColorToolStoreContext } from "../contexts/colorToolStoreContext";
export function ColorListContainer() {
const {
colors,
editColorId,
showArchive,
editColor,
archiveColor,
saveColor,
cancelColor,
setShowArchive,
} = useColorToolStoreContext();
return (
<>
<ShowArchive
showArchive={showArchive}
onSetShowArchive={setShowArchive}
/>
<ColorList
colors={colors.filter((c) => !c.archive || showArchive)}
editColorId={editColorId}
onEdit={editColor}
onArchive={archiveColor}
onSave={saveColor}
onCancel={cancelColor}
/>
</>
);
}
<file_sep>/perf-car-tool-app-starter/src/models/CarsSort.ts
import { CarKeys } from './Car';
export const SORT_ASC = 'asc';
export const SORT_DESC = 'desc';
export type CarsSort = {
col: CarKeys,
dir: 'asc' | 'desc',
}
export type ColHeaders = { id: number; col: CarKeys; caption: string }[];<file_sep>/demo-app/src/hocs/withColorTool.js
import React, { Component } from "react";
export const withColorTool = (PresentationalComponent) => {
return class ColorToolContainer extends Component {
state = { colors: [], showArchive: false };
// componentDidMount() {
// const that = this;
// return fetch("http://localhost:3060/colors")
// .then(function resJSON(res) {
// return res.json();
// })
// .then(function updateColor(colors) {
// that.setState({ colors });
// });
// }
// componentDidMount() {
// return fetch("http://localhost:3060/colors")
// .then((res) => {
// return res.json();
// })
// .then((colors) => {
// this.setState({ colors });
// });
// }
async componentDidMount() {
const res = await fetch("http://localhost:3060/colors");
const colors = await res.json();
this.setState({ colors });
}
addColor = (color) => {
this.setState({
colors: [
...this.state.colors,
{
...color,
archive: false,
id: Math.max(...this.state.colors.map((c) => c.id), 0) + 1,
},
],
});
};
toggleShowArchive = () => {
this.setState({
showArchive: !this.state.showArchive,
});
};
archiveColor = (colorId) => {
const colorIndex = this.state.colors.findIndex((c) => c.id === colorId);
const newColors = [...this.state.colors];
newColors[colorIndex] = { ...newColors[colorIndex], archive: true };
this.setState({ colors: newColors });
};
render() {
return (
<PresentationalComponent
colors={this.state.colors.filter(
(c) => !c.archive || this.state.showArchive
)}
onAddColor={this.addColor}
showArchive={this.state.showArchive}
onArchiveColor={this.archiveColor}
onToggleShowArchive={this.toggleShowArchive}
/>
);
}
};
};
<file_sep>/redux-hooks/src/containers/ColorToolContainer.js
import React, { useMemo, useEffect } from 'react';
import { bindActionCreators } from 'redux';
import { useSelector, useDispatch } from 'react-redux';
import {
refreshColors, addColor, deleteColor,
} from '../actions/colorToolActions';
import { ColorTool } from '../components/ColorTool';
export const ColorToolContainer = () => {
const stateProps = useSelector(state => state);
const dispatch = useDispatch();
const dispatchProps = useMemo(() => bindActionCreators({
onRefreshColors: refreshColors,
onAddColor: addColor,
onDeleteColor: deleteColor,
}, dispatch), [dispatch]);
useEffect(() => {
dispatchProps.onRefreshColors();
}, [dispatchProps]);
return <ColorTool {...dispatchProps} {...stateProps} />;
};<file_sep>/redux-hooks/src/components/CarViewRow.js
import React from 'react';
import { carPropTypes } from '../propTypes/cars';
import { nanToValue } from '../utils';
export const CarViewRow = ({
car,
onEditCar,
onDeleteCar,
}) => {
return (
<tr>
<td>{car.id}</td>
<td className="text">{car.make}</td>
<td className="text">{car.model}</td>
<td>{nanToValue(car.year)}</td>
<td>{car.color}</td>
<td className="number">{nanToValue(car.price)}</td>
<td>
<button type="button"
onClick={() => onEditCar(car.id)}>Edit</button>
<button type="button"
onClick={() => onDeleteCar(car.id)}>Delete</button>
</td>
</tr>
);
};
CarViewRow.propTypes = {
car: carPropTypes,
};<file_sep>/demo-app/exercises/exercise-3.md
# Exercise 3
1. Update the class version of Color Tool to use download the colors from the REST API on the initial render.
http://localhost:3060/colors
2. Ensure it works.
<file_sep>/chat-app-starter/client-app-js/src/App.js
import React, { useState, useCallback } from "react";
import { useChatAppStore } from "./useChatAppStore";
import "./App.css";
export function App() {
const { messages, sendMessage } = useChatAppStore();
const [messageInput, setMessageInput] = useState("");
const submitMessageForm = useCallback(
function submitMessageForm(e) {
e.preventDefault();
sendMessage(messageInput);
setMessageInput("");
},
[sendMessage, messageInput]
);
return (
<>
<ul>
{messages.map((message) => (
<li key={message.id}>{message.content}</li>
))}
</ul>
<form onSubmit={submitMessageForm}>
<input
autoComplete="off"
value={messageInput}
onChange={(e) => setMessageInput(e.target.value)}
/>
<button>Send</button>
</form>
</>
);
}
<file_sep>/redux-hooks/src/actions/calcToolActions.js
export const ADD_ACTION = 'ADD';
export const SUBTRACT_ACTION = 'SUBTRACT';
export const MULTIPLY_ACTION = 'MULTIPLY';
export const DIVIDE_ACTION = 'DIVIDE';
export const CLEAR_ACTION = 'CLEAR';
export const DELETE_HISTORY_ENTRY_ACTION = 'DELETE_HISTORY_ENTRY';
export const createAddAction = value => ({ type: ADD_ACTION, payload: { value } });
export const createSubtractAction = value => ({ type: SUBTRACT_ACTION, payload: { value } });
export const createMultiplyAction = value => ({ type: MULTIPLY_ACTION, payload: { value } });
export const createDivideAction = value => ({ type: DIVIDE_ACTION, payload: { value } });
export const createClearAction = () => ({ type: CLEAR_ACTION });
export const createDeleteHistoryEntryAction = historyEntryId =>
({
type: DELETE_HISTORY_ENTRY_ACTION,
payload: { historyEntryId },
});<file_sep>/demo-app/src/functional/ColorFormContainer.js
import React from "react";
import { ColorForm } from "./ColorForm";
import { useColorToolStoreContext } from "../contexts/colorToolStoreContext";
export function ColorFormContainer() {
const { addColor } = useColorToolStoreContext();
return <ColorForm buttonText="Add Color" onSubmitColor={addColor} />;
}
<file_sep>/demo-app/src/hooks/useColorTool.js
import { useState, useDebugValue, useCallback } from "react";
import { useList } from "./useList";
export function useColorTool() {
const [colors, appendColor, replaceColor] = useList(
useCallback(function getColors() {
return fetch("http://localhost:3060/colors").then((res) => {
return res.json();
});
}, [])
);
const [editColorId, setEditColorId] = useState(-1);
useDebugValue("colors: " + JSON.stringify(colors));
const [showArchive, setShowArchive] = useState(false);
useDebugValue("showArchive: " + showArchive);
const addColor = (color) => {
appendColor({
...color,
archive: false,
});
};
const saveColor = (color) => {
replaceColor(color);
setEditColorId(-1);
};
const cancelColor = () => setEditColorId(-1);
const archiveColor = (colorId) => {
const colorIndex = colors.findIndex((c) => c.id === colorId);
replaceColor({ ...colors[colorIndex], archive: true });
};
return {
colors,
editColorId,
showArchive,
addColor,
editColor: setEditColorId,
archiveColor,
saveColor,
cancelColor,
setShowArchive,
};
}
<file_sep>/demo-app/src/functional/ColorTool.js
import React from "react";
import { ColorListContainer } from "./ColorListContainer";
import { ColorFormContainer } from "./ColorFormContainer";
export function ColorTool() {
return (
<>
<ColorListContainer />
<ColorFormContainer />
</>
);
}
<file_sep>/redux-hooks/src/components/ItemList.js
import React from 'react';
import PropTypes from 'prop-types';
export const ItemList = ({ items, keyFn, contentFn, actionButtonText, onAction }) => {
return (
<ul>
{items.map(item => <li key={keyFn(item)}>
{contentFn(item)}
{onAction && <button type="button" onClick={() => onAction(keyFn(item))}>
{actionButtonText}
</button>}
</li>)}
</ul>
);
};
ItemList.defaultProps = {
items: [],
keyFn: item => item.id,
};
ItemList.propTypes = {
items: PropTypes.arrayOf(PropTypes.shape({
id: PropTypes.oneOfType([ PropTypes.string, PropTypes.number ]).isRequired,
})),
keyFn: PropTypes.func,
contentFn: PropTypes.func.isRequired,
actionButtonText: PropTypes.string,
onAction: PropTypes.func,
};
<file_sep>/redux-hooks/src/actions/carToolActions.js
export const REFRESH_CARS_REQUEST_ACTION = 'REFRESH_CARS_REQUEST';
export const REFRESH_CARS_DONE_ACTION = 'REFRESH_CARS_DONE';
export const ADD_CAR_REQUEST_ACTION = 'ADD_CAR_REQUEST';
export const SAVE_CAR_REQUEST_ACTION = 'SAVE_CAR_REQUEST';
export const DELETE_CAR_REQUEST_ACTION = 'DELETE_CAR_REQUEST';
export const EDIT_CAR_ACTION = 'EDIT_CAR';
export const CANCEL_CAR_ACTION = 'CANCEL_CAR';
export const createRefreshCarsRequestAction = () => ({
type: REFRESH_CARS_REQUEST_ACTION,
});
export const createRefreshCarsDoneAction = cars => ({
type: REFRESH_CARS_DONE_ACTION,
cars,
});
export const refreshCars = () => {
return dispatch => {
dispatch(createRefreshCarsRequestAction());
return fetch('http://localhost:3060/cars')
.then(res => res.json())
.then(cars => dispatch(createRefreshCarsDoneAction(cars)));
};
};
export const createAddCarRequestAction = car =>
({ type: ADD_CAR_REQUEST_ACTION, car });
export const addCar = car => {
return dispatch => {
dispatch(createAddCarRequestAction(car));
return fetch('http://localhost:3060/cars', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(car),
})
.then(() => dispatch(refreshCars()));
};
};
export const createSaveCarRequestAction = car =>
({ type: SAVE_CAR_REQUEST_ACTION, car });
export const saveCar = car => {
return dispatch => {
dispatch(createSaveCarRequestAction(car));
return fetch('http://localhost:3060/cars/' + encodeURIComponent(car.id), {
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(car),
})
.then(() => dispatch(refreshCars()));
};
}
export const createDeleteCarRequestAction = carId =>
({ type: DELETE_CAR_REQUEST_ACTION, carId });
export const deleteCar = carId => {
return dispatch => {
dispatch(createDeleteCarRequestAction(carId));
return fetch('http://localhost:3060/cars/' + encodeURIComponent(carId), {
method: 'DELETE',
})
.then(() => dispatch(refreshCars()));
};
};
export const createEditCarAction = carId =>
({ type: EDIT_CAR_ACTION, carId });
export const createCancelCarAction = () =>
({ type: CANCEL_CAR_ACTION });<file_sep>/redux-hooks/src/components/Position.js
import React from 'react';
export const Position = () => {
return (
<div className="container">
<div>A</div>
<div>B</div>
<div>C</div>
<div>D</div>
</div>
);
};<file_sep>/redux-hooks/src/containers/CarToolContainer.js
import React, { useMemo, useEffect } from 'react';
import { bindActionCreators } from 'redux';
import { useSelector, useDispatch } from 'react-redux';
import {
addCar, saveCar, deleteCar, createEditCarAction,
createCancelCarAction, refreshCars,
} from '../actions/carToolActions';
import { CarTool } from '../components/CarTool';
import { LoadingModal } from '../components/LoadingModal';
export const CarToolContainer = () => {
const stateProps = useSelector(state => state);
const dispatch = useDispatch();
const dispatchProps = useMemo(() => bindActionCreators({
onRefreshCars: refreshCars,
onAddCar: addCar,
onSaveCar: saveCar,
onDeleteCar: deleteCar,
onEditCar: createEditCarAction,
onCancelCar: createCancelCarAction,
}, dispatch), [ dispatch ]);
useEffect(() => {
dispatchProps.onRefreshCars();
}, [ dispatchProps ]);
return <>
<CarTool {...dispatchProps} {...stateProps} />
<LoadingModal isLoading={stateProps.isLoading} />
</>;
};<file_sep>/demo-app/exercises/exercise-1.md
# Exercise 1.
1. In the Color Tool application, add a button to each item in the list. The button should have the label "Archive".
2. When the archive button is clicked, the color should disappear from the list, but should still be in the array colors.
3. Add a check box to top of the list with a label of "Show Archived". When the button is clicked, show the archived items in a slightly lighter font color in the list along with the non-archived items.
4. Ensure it works!
<file_sep>/redux-hooks/src/components/FormControls.js
import React from 'react';
import { NumberInput } from './NumberInput';
export const StringFormControl = (props) => {
return (
<div>
<label htmlFor={props.name + '-input'}>{props.caption}</label>
<input type="text" id={props.name + '-input'} name={props.name}
value={props.value} onChange={props.onChange} />
</div>
);
}
export const NumberFormControl = (props) => {
return (
<div>
<label htmlFor={props.name + '-input'}>{props.caption}</label>
<NumberInput name={props.name}
value={props.value} onChange={props.onChange} />
</div>
);
}<file_sep>/demo-app/src/functional/ColorList.js
import React from "react";
import { ColorViewListItem } from "./ColorViewListItem";
import { ColorEditListItem } from "./ColorEditListItem";
export function ColorList({
colors,
editColorId,
onEdit: edit,
onArchive: archive,
onSave: save,
onCancel: cancel,
}) {
return (
<ul>
{colors.map((color) =>
color.id === editColorId ? (
<ColorEditListItem
key={color.id}
color={color}
onSave={save}
onCancel={cancel}
/>
) : (
<ColorViewListItem
key={color.id}
color={color}
onEdit={edit}
onArchive={archive}
/>
)
)}
</ul>
);
}
ColorList.defaultProps = {
colors: [],
};
<file_sep>/demo-app/src/App.js
import React from "react";
import { ColorToolStoreProvider } from "./contexts/colorToolStoreContext";
import { ColorTool as ColorToolFunc } from "./functional/ColorTool";
import { ColorTool as ColorToolClass } from "./class/ColorTool";
function App() {
return (
<div className="App">
<div>
<h2>Functional</h2>
<ColorToolStoreProvider>
<ColorToolFunc />
</ColorToolStoreProvider>
</div>
<div>
<h2>Class-based</h2>
<ColorToolClass />
</div>
</div>
);
}
export default App;
<file_sep>/demo-app/src/class/ColorTool.js
import React, { Component } from "react";
import { ColorList } from "./ColorList";
import { ColorForm } from "./ColorForm";
import { withColorTool } from "../hocs/withColorTool";
class BaseColorTool extends Component {
render() {
return (
<>
<div>
<input
type="checkbox"
checked={this.props.showArchive}
onChange={this.props.onToggleShowArchive}
/>
Show Archive
</div>
<ColorList
colors={this.props.colors}
onArchive={this.props.onArchiveColor}
/>
<ColorForm
buttonText="Add Color"
onSubmitColor={this.props.onAddColor}
/>
</>
);
}
}
export const ColorTool = withColorTool(BaseColorTool);
<file_sep>/redux-hooks/src/reducers/colorToolReducers.js
import { combineReducers } from 'redux';
import { REFRESH_COLORS_DONE_ACTION } from '../actions/colorToolActions';
export const colorsReducer = (colors = [], action) => {
if (action.type === REFRESH_COLORS_DONE_ACTION) {
return action.colors;
}
return colors;
};
export const isLoadingReducer = (isLoading = false, action) => {
if (action.type.endsWith('_REQUEST')) {
return true;
}
if (action.type.endsWith('_DONE')) {
return false;
}
return isLoading;
};
export const colorToolReducer = combineReducers({
isLoading: isLoadingReducer,
colors: colorsReducer,
});<file_sep>/redux-hooks/src/index.js
import React from "react";
import ReactDOM from "react-dom";
import { Provider } from "react-redux";
import { BrowserRouter as Router, Switch, Route, Link } from "react-router-dom";
import { colorToolStore } from "./stores/colorToolStore";
import { carToolStore } from "./stores/carToolStore";
import { calcToolStore } from "./stores/calcToolStore";
import { ColorToolContainer } from "./containers/ColorToolContainer";
import { CarToolContainer } from "./containers/CarToolContainer";
import { CalcToolContainer } from "./containers/CalcToolContainer";
import { Layout } from "./components/Layout";
ReactDOM.render(
<Router>
<Layout>
<header id="page-header">
<h1>App Tools</h1>
</header>
<nav id="menu">
<ul>
<li>
<Link to="/">Home</Link>
</li>
<li>
<Link to="/color-tool">Color Tool</Link>
</li>
<li>
<Link to="/car-tool">Car Tool</Link>
</li>
<li>
<Link to="/calc-tool">Calc Tool</Link>
</li>
</ul>
</nav>
<main id="content">
<Switch>
<Route path="/" exact>
<div>Home</div>
</Route>
<Route path="/color-tool">
<Provider store={colorToolStore}>
<ColorToolContainer />
</Provider>
</Route>
<Route path="/car-tool">
<Provider store={carToolStore}>
<CarToolContainer />
</Provider>
</Route>
<Route path="/calc-tool">
<Provider store={calcToolStore}>
<CalcToolContainer />
</Provider>
</Route>
</Switch>
</main>
<aside id="sidebar">Sidebar (No Content)</aside>
<footer id="page-footer">
<small>A Cool Company, Inc.</small>
</footer>
</Layout>
</Router>,
document.querySelector("#root")
);
<file_sep>/demo-app/src/class/ColorForm.js
import React, { Component } from "react";
// extending provides us stateful logic
// and because of the instance we will have
// life cycle methods
export class ColorForm extends Component {
// state = {
// name: "",
// hexcode: "",
// };
constructor(props) {
super(props);
// set the state on the component
// instance
this.state = {
name: "",
hexcode: "",
};
// most people really have no idea what's going on here...
this.change = this.change.bind(this);
this.submitColor = this.submitColor.bind(this);
}
change(e) {
// dive into the topic of the value of "this"
this.setState({ [e.target.name]: e.target.value });
}
// is not valid JavaScript, but the arrow function means lexical scope,
// so the reasoning goes that the function is defined in the lexical scope
// of the class, so the value of this should always be the instance of that class
// change = (e) => {
// // dive into the topic of the value of "this"
// this.setState({ name: e.target.value });
// }
submitColor() {
this.props.onSubmitColor({ ...this.state });
this.setState({
name: "",
hexcode: "",
});
}
render() {
console.log(this.state);
return (
<form>
<div>
<label htmlFor="color-name-input">Color Name:</label>
<input
type="text"
id="color-name-input"
name="name"
value={this.state.name}
onChange={this.change}
/>
</div>
<div>
<label htmlFor="color-hexcode-input">Color Hexcode:</label>
<input
type="text"
id="color-hexcode-input"
name="hexcode"
value={this.state.hexcode}
onChange={this.change}
/>
</div>
<button type="button" onClick={this.submitColor}>
{this.props.buttonText}
</button>
</form>
);
}
}
ColorForm.defaultProps = {
buttonText: "Submit Color",
};
<file_sep>/demo-app/exercises/exercise-6.md
# Exercise 6
1. Upgrade the Color Tool app, to utilize two container components. Wrap one container component around the Color List, and one around the Color Form. Each container component should only use the data and actions which are needed for the particular presentational subtree under it.
2. Remove the `useColorToolStoreContext` hook from the `ColorTool` component.
3. Ensure it works!
<file_sep>/redux-hooks/src/components/CarTable.js
import React from 'react';
import { carsPropTypes } from '../propTypes/cars';
import { CarViewRow } from './CarViewRow';
import { CarEditRow } from './CarEditRow';
export const CarTable = ({
cars, editCarId,
onEditCar: editCar,
onDeleteCar: deleteCar,
onSaveCar: saveCar,
onCancelCar: cancelCar,
}) => {
return (
<table>
<thead>
<tr>
<th>Id</th>
<th>Make</th>
<th>Model</th>
<th>Year</th>
<th>Color</th>
<th>Price</th>
<th>Actions</th>
</tr>
</thead>
<tbody>
{!cars.length
? <tr><td colSpan="6">There are no cars.</td></tr>
: cars.map(car => car.id === editCarId
? <CarEditRow key={car.id} car={car}
onSaveCar={saveCar} onCancelCar={cancelCar} />
: <CarViewRow key={car.id} car={car}
onEditCar={editCar} onDeleteCar={deleteCar} />)}
</tbody>
</table>
);
};
CarTable.defaultProps = {
cars: [],
};
CarTable.propTypes = {
cars: carsPropTypes,
};<file_sep>/redux-hooks/src/components/ColorTool.js
import React from 'react';
import { ToolHeader } from './ToolHeader';
import { ItemList } from './ItemList';
import { ColorForm } from './ColorForm';
import './ColorTool.css';
export const ColorTool = ({
colors,
onAddColor: addColor,
onDeleteColor: deleteColor,
}) => {
return (
<div className="color-tool">
<ToolHeader headerText={ 'Color Tool' } />
<ItemList items={colors}
contentFn={color => color.name + ' ' + color.hexcode}
actionButtonText="X" onAction={deleteColor} />
<ColorForm buttonText="Add Color" onSubmitColor={addColor} />
</div>
);
};
<file_sep>/redux-hooks/src/components/CarTool.js
import React from 'react';
import { ToolHeader } from './ToolHeader';
import { CarTable } from './CarTable';
import { CarForm } from './CarForm';
import "./CarTool.css";
export const CarTool = ({
cars, editCarId,
onAddCar: addCar, onSaveCar: saveCar,
onDeleteCar: deleteCar, onEditCar: editCar,
onCancelCar: cancelCar,
}) => {
return (
<div className="car-tool">
<ToolHeader headerText="Car Tool" />
<CarTable cars={cars} editCarId={editCarId}
onEditCar={editCar} onDeleteCar={deleteCar}
onSaveCar={saveCar} onCancelCar={cancelCar} />
<CarForm buttonText="Add Car" onSubmitCar={addCar} />
</div>
);
};<file_sep>/demo-app/src/hooks/useList.js
import { useState, useEffect } from "react";
export const useList = (getInitialItems) => {
const [items, setItems] = useState([]);
useEffect(
function useListMount() {
getInitialItems().then((items) => setItems(items));
},
[getInitialItems]
);
const appendItem = (item) => {
setItems([
...items,
{
...item,
id: Math.max(...items.map((c) => c.id), 0) + 1,
},
]);
};
const replaceItem = (item) => {
const itemIndex = items.findIndex((c) => c.id === item.id);
const newItems = [...items];
newItems[itemIndex] = item;
setItems(newItems);
};
const removeItem = (itemId) => {
setItems(items.filter((i) => i.id !== itemId));
};
return [items, appendItem, replaceItem, removeItem];
};
<file_sep>/redux-hooks/src/reducers/carToolReducers.js
import { combineReducers } from "redux";
import {
EDIT_CAR_ACTION, CANCEL_CAR_ACTION, REFRESH_CARS_DONE_ACTION,
} from '../actions/carToolActions';
export const carsReducer = (cars = [], action) => {
if (action.type === REFRESH_CARS_DONE_ACTION) {
return action.cars;
}
return cars;
};
export const editCarIdReducer = (editCarId = -1, action) => {
if (action.type === EDIT_CAR_ACTION) {
return action.carId;
}
if ([
CANCEL_CAR_ACTION,
REFRESH_CARS_DONE_ACTION,
].includes(action.type)) {
return -1;
}
return editCarId;
};
export const isLoadingReducer = (isLoading = false, action) => {
if (action.type.endsWith('_REQUEST')) {
return true;
}
if (action.type.endsWith('_DONE')) {
return false;
}
return isLoading;
};
export const carToolReducer = combineReducers({
isLoading: isLoadingReducer,
cars: carsReducer,
editCarId: editCarIdReducer,
}); | 7409317c19e460d1ad7fbb997c9974df2c044631 | [
"JavaScript",
"TypeScript",
"Markdown"
] | 34 | JavaScript | t4d-classes/react_10142020 | 15ec9f791dbb758383b3d708969bbe90acf4dc18 | 47fbd976f72991b8462f45c2e561537ff4807653 |
refs/heads/master | <file_sep># -*-coding:utf-8-*-
"""
Author: <NAME>
"""
import h5py
import sys
import argparse
from PIL import Image
import numpy
def get_box_data(index, hdf5_data):
meta_data = dict()
meta_data['height'] = []
meta_data['label'] = []
meta_data['left'] = []
meta_data['top'] = []
meta_data['width'] = []
def print_attrs(name, obj):
vals = []
if obj.shape[0] == 1:
vals.append(obj[0][0])
else:
for k in range(obj.shape[0]):
vals.append(int(hdf5_data[obj[k][0]][0][0]))
meta_data[name] = vals
box = hdf5_data['/digitStruct/bbox'][index]
hdf5_data[box[0]].visititems(print_attrs)
return meta_data
def get_name(index, hdf5_data):
name = hdf5_data['digitStruct/name']
return ''.join([chr(v[0]) for v in hdf5_data[name[index][0]].value])
def read_mat(file_name, begin, end):
f = h5py.File(file_name, 'r')
names = list()
boxes = list()
for i in range(begin, end):
pic = get_name(i, f)
names.append(pic)
box = get_box_data(i, f)
boxes.append(box)
f.close()
return names, boxes
def main(argv):
parser = argparse.ArgumentParser()
parser.add_argument("--dataset_dir", default='../data/SVHN/test')
parser.add_argument("--dataset_txt",
default='../data/SVHN/quick_test_data.txt')
flags = parser.parse_args()
digitStruct = flags.dataset_dir + "/digitStruct.mat"
dataset = h5py.File(digitStruct, 'r')
name = dataset['digitStruct']['name']
data_size = name.size
with open(flags.dataset_txt, "w") as f:
names, boxes = read_mat(digitStruct, 0, data_size)
for i in range(data_size):
name = flags.dataset_dir + "/" + names[i]
class_ids = boxes[i]["label"]
image = Image.open(name)
image_width, image_height = image.size[0], image.size[1]
image = numpy.array(image)
image_pad = image
if image_width > image_height:
padding_size = (image_width - image_height) // 2
image_pad = numpy.pad(image, ((padding_size, padding_size), (0, 0), (0, 0)), "constant")
if image_height > image_width:
padding_size = (image_height - image_width) // 2
image_pad = numpy.pad(image, ((0, 0), (padding_size, padding_size), (0, 0)), "constant")
image = Image.fromarray(image_pad)
file_name = "../data/SVHN/PaddingTest/" + str(i + 1) + ".png"
image.save(file_name)
f.write(file_name + " ")
for j in range(len(class_ids)):
left = boxes[i]["left"][j]
top = boxes[i]["top"][j]
if image_width > image_height:
top += padding_size
if image_height > image_width:
left += padding_size
f.write(str(left) + " ")
f.write(str(top) + " ")
f.write(str(left + boxes[i]["width"][j]) + " ")
f.write(str(top + boxes[i]["height"][j]) + " ")
if j == len(class_ids) - 1:
f.write(str(int(class_ids[j]) - 1) + "\n")
else:
f.write(str(int(class_ids[j]) - 1) + " ")
if __name__ == "__main__":
main(sys.argv[1:])
<file_sep>#! /usr/bin/env python
# coding=utf-8
import tensorflow as tf
from core import utils, yolov3
from core.dataset import dataset, Parser
from PIL import Image
sess = tf.Session()
IMAGE_H, IMAGE_W = 416, 416
BATCH_SIZE = 1
SHUFFLE_SIZE = 200
CLASSES = utils.read_coco_names("data/SVHN/SVHN.names")
ANCHORS = utils.get_anchors("data/SVHN/SVHN_anchors.txt", IMAGE_H, IMAGE_W)
NUM_CLASSES = len(CLASSES)
test_tfrecord = "data/SVHN/tfrecords/quick_test_data.tfrecords"
parser = Parser(IMAGE_H, IMAGE_W, ANCHORS, NUM_CLASSES)
testset = dataset(parser, test_tfrecord, BATCH_SIZE, shuffle=None)
is_training = tf.placeholder(tf.bool)
example = testset.get_next()
images, *y_true = example
model = yolov3.yolov3(NUM_CLASSES, ANCHORS)
with tf.variable_scope('yolov3'):
pred_feature_map = model.forward(images, is_training=is_training)
loss = model.compute_loss(pred_feature_map, y_true)
y_pred = model.predict(pred_feature_map)
saver = tf.train.Saver()
saver.restore(sess, "data/SVHN/checkpoint5/yolov3.ckpt-4000")
acc = 0
STEPS = 13068
for step in range(STEPS):
run_items = sess.run([y_pred, y_true], feed_dict={is_training: False})
if step == 5:
acc = utils.compute_accuracy(run_items[0], run_items[1])
y_pred_data = run_items[0]
pred_boxes = y_pred_data[0][0]
pred_confs = y_pred_data[1][0]
pred_probs = y_pred_data[2][0]
pred_boxes, pred_scores, pred_labels = utils.cpu_nms(pred_boxes, pred_confs * pred_probs, NUM_CLASSES,
score_thresh=0.3, iou_thresh=0.5)
img = Image.open("data/SVHN/PaddingTest/" + str(step + 1) + ".png")
image = utils.draw_boxes(img, pred_boxes, pred_scores, pred_labels, CLASSES, [IMAGE_H, IMAGE_W], show=False)
# if acc == 1:
# image.save("data/SVHN/RightRecognition/" + str(step + 1) + ".png")
# else:
# image.save("data/SVHN/WrongRecognition/" + str(step + 1) + ".png")
print("=> STEP %10d [VALID]:\tacc:%7.4f" % (step+1, acc))
acc += acc * BATCH_SIZE
acc /= 13068
print("精度为%7.4f" % acc)
| 797eaf099c87e24d4e5c9fcf6d75209da4e99468 | [
"Python"
] | 2 | Python | InvokerLiu/tensorflow-yolov3 | 66ce04858a5a5f7c1aee8b93f8b7071df29a3837 | 3780e612654be24b94595f2ec43e773cb915a9c9 |
refs/heads/master | <file_sep>
/* -----------------------------------------------------------
PROGRAM gomoku 1.0
----------------------------------------------------------- */
/* -----------------------------------------------------------
<NAME> (C) 2007
Zezwalam na dowolna modyfikacje tego kodu.
Prosze tylko nie usuwanie tego naglowka
i o dopisanie, ze dokonano zmian bez mojej wiedzy.
----------------------------------------------------------- */
/* -----------------------------------------------------------
Program napisany w ramach kursu:
-----------------------------------------
METODY I ALGORYTMY SZTUCZNEJ INTELIGENCJI
-----------------------------------------
Prowadzacy: dr inz. <NAME>
-----------------------------------------
Politechnika Wroclawska
Wydzial Elektroniki
----------------------------------------------------------- */
// -----------------------------------------------------------
// --- BEGIN: INCLUDES
#include <QApplication>
#include <QStatusBar>
#include <QString>
#include <QVBoxLayout>
#include <QHBoxLayout>
#include "gomoku.hpp"
// ----- END: INCLUDES
// -----------------------------------------------------------
// -----------------------------------------------------------
// --- BEGIN: class MyButton
MyButton::MyButton(int i, int j, QString str,
QWidget *wRodzic): QPushButton(wRodzic)
{
_i = i; _j = j;
_stan = ZADEN;
setText(str);
connect(this, SIGNAL(clicked()),
this, SLOT(sendClicked()));
}
void MyButton::sendClicked()
{
emit isClicked(_i, _j);
}
void MyButton::setStan(int stan)
{
_stan = stan;
}
int MyButton::stan() const
{
return _stan;
}
// ----- END: class MyButton
// -----------------------------------------------------------
// -----------------------------------------------------------
// --- BEGIN: class Kanwa
Kanwa::Kanwa(QWidget *wRodzic): QWidget(wRodzic)
{
// inicjalizacja atrybutow
ruch = KRZYZYK;
endgame = false;
x = new QPalette (QBrush(Qt::yellow), // windowText
QBrush(Qt::yellow), // button
QBrush(Qt::yellow), // light
QBrush(Qt::yellow), // dark
QBrush(Qt::yellow), // mid
QBrush(Qt::black), // text
QBrush(Qt::yellow), // bright_text
QBrush(Qt::yellow), // base
QBrush(Qt::yellow)); // window
o = new QPalette (QBrush(Qt::white), // windowText
QBrush(Qt::white), // button
QBrush(Qt::white), // light
QBrush(Qt::white), // dark
QBrush(Qt::white), // mid
QBrush(Qt::black), // text
QBrush(Qt::white), // bright_text
QBrush(Qt::white), // base
QBrush(Qt::white)); // window
n = new QPalette (QBrush(Qt::gray), // windowText
QBrush(Qt::gray), // button
QBrush(Qt::gray), // light
QBrush(Qt::gray), // dark
QBrush(Qt::gray), // mid
QBrush(Qt::black), // text
QBrush(Qt::gray), // bright_text
QBrush(Qt::gray), // base
QBrush(Qt::gray)); // window
w = new QPalette (QBrush(Qt::black), // windowText
QBrush(Qt::black), // button
QBrush(Qt::black), // light
QBrush(Qt::black), // dark
QBrush(Qt::black), // mid
QBrush(Qt::white), // text
QBrush(Qt::black), // bright_text
QBrush(Qt::black), // base
QBrush(Qt::black)); // window
// glowny szkielet to HBOX
QHBoxLayout *szkielet = new QHBoxLayout;
QGridLayout *siatka = new QGridLayout;
siatka->setSpacing(0);
siatka->setSizeConstraint(QLayout::SetFixedSize);
for(int j=0; j<=SIZE; j++) {
QString temp;
pole[0][j] = new MyButton(0,j,temp.setNum(j),this);
pole[0][j]->setFixedSize(25,25);
pole[0][j]->setDisabled(true);
siatka->addWidget(pole[0][j],0,j);
}
for(int i=1; i<=SIZE; i++) {
QString temp;
pole[i][0] = new MyButton(i,0,temp.setNum(i),this);
pole[i][0]->setFixedSize(25,25);
pole[i][0]->setDisabled(true);
siatka->addWidget(pole[i][0],i,0);
}
for(int i=1; i<=SIZE; i++)
for(int j=1; j<=SIZE; j++) {
pole[i][j] = new MyButton(i, j, tr(" "), this);
pole[i][j]->setFixedSize(25,25);
pole[i][j]->setFocusPolicy(Qt::NoFocus);
pole[i][j]->setPalette(*n);
siatka->addWidget(pole[i][j],i,j);
connect(pole[i][j], SIGNAL(isClicked(int,int)),
this, SLOT(setButtons(int,int)));
}
bruch = new QPushButton("X",this);
bruch->setDisabled(true);
bruch->setPalette(*x);
bruch->setFixedSize(25,25);
QLabel *nastepny = new QLabel(tr("Ruch "),this);
QGroupBox *opcje = new QGroupBox(tr("Opcje"));
QSpacerItem *sprezyna = new QSpacerItem(0,0,
QSizePolicy::Expanding,
QSizePolicy::Expanding);
setx = new QCheckBox(tr("Ustaw X"),this);
seto = new QCheckBox(tr("Ustaw O"),this);
lcd = new QLCDNumber(5,this);
lcd->display(0);
lcd->setFixedSize(75,30);
nowa = new QPushButton(tr("Nowa gra"),this);
nowa->setFixedSize(75,30);
komp = new QPushButton(tr("Komputer"),this);
komp->setFixedSize(75,30);
QHBoxLayout *kolejny = new QHBoxLayout;
kolejny->addWidget(nastepny);
kolejny->addWidget(bruch);
QPushButton *bsetx = new QPushButton(tr("Ruch X"),this);
QPushButton *bseto = new QPushButton(tr("Ruch O"),this);
bsetx->setFixedSize(75,30);
bseto->setFixedSize(75,30);
QVBoxLayout *pion = new QVBoxLayout;
pion->addLayout(kolejny);
pion->addWidget(setx);
pion->addWidget(seto);
pion->addWidget(bsetx);
pion->addWidget(bseto);
pion->addItem(sprezyna);
pion->addWidget(lcd);
pion->addWidget(komp);
pion->addWidget(nowa);
opcje->setLayout(pion);
szkielet->addLayout(siatka);
szkielet->addWidget(opcje);
setLayout(szkielet);
connect(nowa, SIGNAL(clicked()),
this, SLOT(setNewGame()));
connect(komp, SIGNAL(clicked()),
this, SLOT(autoGame()));
connect(this, SIGNAL(zwiekszLCD()),
this, SLOT(lcdPP()));
connect(bsetx, SIGNAL(clicked()),
this, SLOT(setRuchX()));
connect(bseto, SIGNAL(clicked()),
this, SLOT(setRuchO()));
connect(setx, SIGNAL(stateChanged(int)),
this, SLOT(zmienO(int)));
connect(seto, SIGNAL(stateChanged(int)),
this, SLOT(zmienX(int)));
}
void Kanwa::zmienO(int state)
{
if(state)
seto->setCheckState(Qt::Unchecked);
}
void Kanwa::zmienX(int state)
{
if(state)
setx->setCheckState(Qt::Unchecked);
}
void Kanwa::setRuchO()
{
ruch = KOLKO;
bruch->setPalette(*o);
bruch->setText("O");
komp->setDisabled(true);
}
void Kanwa::setRuchX()
{
ruch = KRZYZYK;
bruch->setPalette(*x);
bruch->setText("X");
komp->setDisabled(false);
}
void Kanwa::ustawPole(TPole t_pole, int typ)
{
pole[t_pole.i][t_pole.j]->setStan(typ);
}
void Kanwa::setButtons(int i, int j)
{
std::list<Event> wygrane;
if(!endgame)
if(setx->checkState()!=Qt::Checked && seto->checkState()!=Qt::Checked) {
if(ruch == KRZYZYK) {
setX(i, j);
checkEvent("XXXXX",PIATKA,wygrane);
endGame(wygrane,KRZYZYK);
} else if(ruch == KOLKO) {
setO(i, j);
checkEvent("OOOOO",PIATKA,wygrane);
endGame(wygrane,KOLKO);
}
} else if(ruch!=ZADEN && setx->checkState()==Qt::Checked) {
setX(i, j);
checkEvent("XXXXX",PIATKA,wygrane);
endGame(wygrane,KRZYZYK);
} else if(ruch!=ZADEN && seto->checkState()==Qt::Checked) {
setO(i, j);
checkEvent("OOOOO",PIATKA,wygrane);
endGame(wygrane,KOLKO);
}
}
Ruch Kanwa::obronaPrzedO()
{
Ruch punkt; Event temp;
std::list<Event> zagrozenia;
std::list<Event>::iterator pos;
checkEvent("OOOO ",CZTERY5,zagrozenia);
checkEvent(" OOOO",CZTERY5,zagrozenia);
checkEvent("O OOO",CZTERY5,zagrozenia);
checkEvent("OO OO",CZTERY5,zagrozenia);
checkEvent("OOO O",CZTERY5,zagrozenia);
checkEvent(" OOOO ",CZTERY6,zagrozenia);
if(zagrozenia.size()) {
int p;
temp = *(zagrozenia.begin());
for(p=0; p<temp.size && temp.thread[p].stan!=ZADEN; p++);
punkt.i = temp.thread[p].i;
punkt.j = temp.thread[p].j;
punkt.error = false;
return punkt;
}
zagrozenia.clear();
checkEvent(" OOO ",TRZY7,zagrozenia);
checkEvent(" OOO ",TRZY6,zagrozenia);
checkEvent(" OOO ",TRZY6,zagrozenia);
checkEvent(" OO O ",TRZY6,zagrozenia);
checkEvent(" O OO ",TRZY6,zagrozenia);
for(pos=zagrozenia.begin(); pos!=zagrozenia.end(); pos++) {
punkt = wywolajDlaKazdego(*pos,KRZYZYK,KOLKO);
if(!punkt.error)
return punkt;
}
punkt.error = true;
return punkt;
}
Ruch Kanwa::obronaPrzedX()
{
Ruch punkt; Event temp;
std::list<Event> zagrozenia;
std::list<Event>::iterator pos;
checkEvent("XXXX ",CZTERY5,zagrozenia);
checkEvent(" XXXX",CZTERY5,zagrozenia);
checkEvent("X XXX",CZTERY5,zagrozenia);
checkEvent("XX XX",CZTERY5,zagrozenia);
checkEvent("XXX X",CZTERY5,zagrozenia);
checkEvent(" XXXX ",CZTERY6,zagrozenia);
if(zagrozenia.size()) {
int p;
temp = *(zagrozenia.begin());
for(p=0; p<temp.size && temp.thread[p].stan!=ZADEN; p++);
punkt.i = temp.thread[p].i;
punkt.j = temp.thread[p].j;
punkt.error = false;
return punkt;
}
zagrozenia.clear();
checkEvent(" XXX ",TRZY7,zagrozenia);
checkEvent(" XXX ",TRZY6,zagrozenia);
checkEvent(" XXX ",TRZY6,zagrozenia);
checkEvent(" XX X ",TRZY6,zagrozenia);
checkEvent(" X XX ",TRZY6,zagrozenia);
for(pos=zagrozenia.begin(); pos!=zagrozenia.end(); pos++) {
punkt = wywolajDlaKazdego(*pos,KOLKO,KRZYZYK);
if(!punkt.error)
return punkt;
}
punkt.error = true;
return punkt;
}
Ruch Kanwa::wywolajDlaKazdego(Event zdarzenie, int znak, int vsznak)
{
Ruch punkt;
for(int p=0; p<zdarzenie.size; p++) {
if(zdarzenie.thread[p].stan==ZADEN) {
zdarzenie.thread[p].stan=znak;
ustawPole(zdarzenie.thread[p],znak);
for(int k=0; k<zdarzenie.size; k++)
if(zdarzenie.thread[k].stan==ZADEN) {
ustawPole(zdarzenie.thread[k],vsznak);
punkt = obronaPrzedO();
ustawPole(zdarzenie.thread[k],ZADEN);
if(!punkt.error) break;
}
zdarzenie.thread[p].stan=ZADEN;
ustawPole(zdarzenie.thread[p],ZADEN);
if(!punkt.error) {
return punkt;
}
}
}
punkt.error = true;
return punkt;
}
void Kanwa::autoGame()
{
//printf("AUTOGAME\n");
std::list<Event> zagrozenia;
lcd->display(0);
Ruch punkt;
punkt.error = true;
// 1. szukaj zagrozen z mojej strony - CZWORKI
checkEvent("XXXX ",CZTERY5,zagrozenia);
checkEvent(" XXXX",CZTERY5,zagrozenia);
checkEvent(" XXXX ",CZTERY6,zagrozenia);
checkEvent("X XXX",CZTERY5,zagrozenia);
checkEvent("XX XX",CZTERY5,zagrozenia);
checkEvent("XXX X",CZTERY5,zagrozenia);
if(zagrozenia.size())
punkt = TSS(); // TSS
else {
checkEvent("OOOO ",CZTERY5,zagrozenia);
checkEvent(" OOOO",CZTERY5,zagrozenia);
checkEvent(" OOOO ",CZTERY6,zagrozenia);
checkEvent("O OOO",CZTERY5,zagrozenia);
checkEvent("OO OO",CZTERY5,zagrozenia);
checkEvent("OOO O",CZTERY5,zagrozenia);
if(zagrozenia.size())
punkt = obronaPrzedO(); // OBRONA
else {
checkEvent(" XXX ",TRZY7,zagrozenia);
checkEvent(" XXX ",TRZY6,zagrozenia);
checkEvent(" XXX ",TRZY6,zagrozenia);
checkEvent(" XX X ",TRZY6,zagrozenia);
checkEvent(" X XX ",TRZY6,zagrozenia);
if(zagrozenia.size())
punkt = TSS(); // TSS
else {
checkEvent(" XXX ",CZTERY5,zagrozenia);
checkEvent("X XX ",CZTERY5,zagrozenia);
checkEvent("XX X ",CZTERY5,zagrozenia);
checkEvent("XXX ",CZTERY5,zagrozenia);
checkEvent(" XXX",CZTERY5,zagrozenia);
checkEvent(" X XX",CZTERY5,zagrozenia);
checkEvent(" XX X",CZTERY5,zagrozenia);
checkEvent(" XXX ",CZTERY5,zagrozenia);
checkEvent(" XXX ",CZTERY6,zagrozenia);
checkEvent(" X XX ",CZTERY6,zagrozenia);
checkEvent(" XX X ",CZTERY6,zagrozenia);
checkEvent(" XXX ",CZTERY6,zagrozenia);
if(zagrozenia.size())
punkt = createThread(0);
else {
checkEvent(" OOO ",TRZY7,zagrozenia);
checkEvent(" OOO ",TRZY6,zagrozenia);
checkEvent(" OOO ",TRZY6,zagrozenia);
checkEvent(" OO O ",TRZY6,zagrozenia);
checkEvent(" O OO ",TRZY6,zagrozenia);
if(zagrozenia.size())
punkt = obronaPrzedO(); // OBRONA
}
}
}
}
if(!punkt.error)
setButtons(punkt.i, punkt.j);
else {
//printf("WYWOLANIE CREATE\n");
punkt = createThread(0);
if(!punkt.error)
setButtons(punkt.i, punkt.j);
else
emit setStatus("GOMOKU; autor: <NAME> - ERROR autoGAME2");
}
}
Ruch Kanwa::createThread(int g)
{
Ruch punkt;
Event temp;
std::list<Event> zagrozenia;
std::list<Event>::iterator pos;
checkEvent(" XXX ",CZTERY5,zagrozenia);
checkEvent("X XX ",CZTERY5,zagrozenia);
checkEvent("XX X ",CZTERY5,zagrozenia);
checkEvent("XXX ",CZTERY5,zagrozenia);
checkEvent(" XXX",CZTERY5,zagrozenia);
checkEvent(" X XX",CZTERY5,zagrozenia);
checkEvent(" XX X",CZTERY5,zagrozenia);
checkEvent(" XXX ",CZTERY5,zagrozenia);
checkEvent(" XXX ",CZTERY6,zagrozenia);
checkEvent(" X XX ",CZTERY6,zagrozenia);
checkEvent(" XX X ",CZTERY6,zagrozenia);
checkEvent(" XXX ",CZTERY6,zagrozenia);
printf("CREATE %3d\t%3d\n",g,zagrozenia.size());
if(zagrozenia.size())
for(pos=zagrozenia.begin(); pos!=zagrozenia.end(); pos++) {
temp = *pos;
for(int p=0; p<pos->size; p++) {
if((*pos).thread[p].stan==ZADEN) {
(*pos).thread[p].stan=KRZYZYK;
ustawPole((*pos).thread[p],KRZYZYK);
for(int k=0; k<pos->size; k++)
if((*pos).thread[k].stan==ZADEN) {
ustawPole((*pos).thread[k],KOLKO);
punkt = TSS();
if(punkt.error && g<2)
punkt = createThread(g+1);
ustawPole((*pos).thread[k],ZADEN);
if(!punkt.error)
break;
}
(*pos).thread[p].stan=ZADEN;
ustawPole((*pos).thread[p],ZADEN);
if(!punkt.error) {
punkt.i = (*pos).thread[p].i;
punkt.j = (*pos).thread[p].j;
printf("WYJSCIE OK\n");
emit setStatus("GOMOKU; autor: <NAME> - Create");
return punkt;
}
}
}
}
zagrozenia.clear();
checkEvent(" XX ",TRZY7,zagrozenia);
checkEvent(" X X ",TRZY7,zagrozenia);
checkEvent(" XX ",TRZY7,zagrozenia);
checkEvent(" XX ",TRZY6,zagrozenia);
checkEvent(" X X ",TRZY6,zagrozenia);
checkEvent(" XX ",TRZY6,zagrozenia);
checkEvent(" XX ",TRZY6,zagrozenia);
checkEvent(" X X ",TRZY6,zagrozenia);
checkEvent(" X X ",TRZY6,zagrozenia);
if(zagrozenia.size())
for(pos=zagrozenia.begin(); pos!=zagrozenia.end(); pos++) {
temp = *pos;
for(int p=0; p<pos->size; p++) {
if((*pos).thread[p].stan==ZADEN) {
(*pos).thread[p].stan=KRZYZYK;
ustawPole((*pos).thread[p],KRZYZYK);
for(int k=0; k<pos->size; k++)
if((*pos).thread[k].stan==ZADEN) {
ustawPole((*pos).thread[k],KOLKO);
punkt = TSS();
if(punkt.error && g<5)
punkt = createThread(g+1);
ustawPole((*pos).thread[k],ZADEN);
if(!punkt.error)
break;
}
(*pos).thread[p].stan=ZADEN;
ustawPole((*pos).thread[p],ZADEN);
if(!punkt.error) {
punkt.i = (*pos).thread[p].i;
punkt.j = (*pos).thread[p].j;
printf("WYJSCIE OK\n");
emit setStatus("GOMOKU; autor: <NAME> - Create");
return punkt;
}
}
}
}
emit setStatus("GOMOKU; autor: <NAME> - Error Create");
//printf("WYJSCIE ERROR\n");
punkt.error = true;
return punkt;
}
void Kanwa::lcdPP()
{
lcd->display(lcd->value()+1);
}
Ruch Kanwa::TSS()
{
emit zwiekszLCD();
emit setStatus("GOMOKU; autor: <NAME> - TSS");
Ruch punkt, obr;
Event temp;
std::list<Event> zagrozenia;
std::list<Event> zagr;
std::list<Event>::iterator pos;
std::list<int> jakieK;
std::list<int>::iterator ipos;
checkEvent("XXXX ",CZTERY5,zagrozenia);
checkEvent(" XXXX",CZTERY5,zagrozenia);
checkEvent(" XXXX ",CZTERY6,zagrozenia);
checkEvent("X XXX",CZTERY5,zagrozenia);
checkEvent("XX XX",CZTERY5,zagrozenia);
checkEvent("XXX X",CZTERY5,zagrozenia);
printf("TSS4 \t%3d\n",zagrozenia.size());
if(zagrozenia.size()) {
int i;
temp = *(zagrozenia.begin());
for(i=0; i<temp.size && temp.thread[i].stan!=ZADEN; i++);
if(temp.thread[i].stan==ZADEN) {
punkt.i = temp.thread[i].i;
punkt.j = temp.thread[i].j;
printf("4: %d - %d\n",punkt.i, punkt.j);
punkt.error = false;
} else {
punkt.error = true;
}
return punkt;
}
checkEvent(" XXX ",TRZY7,zagrozenia);
checkEvent(" XXX ",TRZY6,zagrozenia);
checkEvent(" XXX ",TRZY6,zagrozenia);
checkEvent(" XX X ",TRZY6,zagrozenia);
checkEvent(" X XX ",TRZY6,zagrozenia);
printf("TSS3 \t%3d\n",zagrozenia.size());
if(zagrozenia.size()) {
int i;
temp = *(zagrozenia.begin());
for(i=1; i<temp.size-1 && temp.thread[i].stan!=ZADEN; i++);
if(temp.thread[i].stan==ZADEN) {
punkt.i = temp.thread[i].i;
punkt.j = temp.thread[i].j;
printf("3: %d - %d\n",punkt.i, punkt.j);
for(int p=0; p<temp.size; p++)
printf("|%c|%d,%d",temp.thread[p].stan,
temp.thread[p].i,temp.thread[p].j);
printf("|\n");
punkt.error = false;
} else {
punkt.error = true;
}
return punkt;
}
punkt.error = true;
return punkt;
}
void Kanwa::setNewGame()
{
for(int i=1; i<=SIZE; i++)
for(int j=1; j<=SIZE; j++) {
pole[i][j]->setPalette(*n);
pole[i][j]->setText(" ");
pole[i][j]->setDisabled(false);
pole[i][j]->setStan(ZADEN);
ruch = KRZYZYK;
bruch->setPalette(*x);
bruch->setText("X");
}
emit setStatus("GOMOKU; autor: <NAME>");
endgame = false;
komp->setDisabled(false);
}
void Kanwa::setX(int i, int j)
{
pole[i][j]->setText("X");
pole[i][j]->setStan(KRZYZYK);
pole[i][j]->setDisabled(true);
pole[i][j]->setPalette(*x);
ruch = KOLKO;
bruch->setPalette(*o);
bruch->setText("O");
komp->setDisabled(true);
}
void Kanwa::setO(int i, int j)
{
pole[i][j]->setText("O");
pole[i][j]->setStan(KOLKO);
pole[i][j]->setDisabled(true);
pole[i][j]->setPalette(*o);
ruch = KRZYZYK;
bruch->setPalette(*x);
bruch->setText("X");
komp->setDisabled(false);
}
void Kanwa::checkEvent(const char *str, int typ,
std::list<Event> &lista)
{
char odczyt[SIZE+1], *temp, *wsk;
Event ptr;
int i, j;
// poziom
for(i=1; i<=SIZE; i++) {
for(j=1; j<=SIZE; j++) {
odczyt[j-1] = (char) pole[i][j]->stan();
}
odczyt[j-1] = '\0';
temp = odczyt;
while((wsk = strstr(temp,str))!=NULL) {
ptr.size = typ;
for(int p=0; p<typ; p++) {
ptr.thread[p].i = i;
ptr.thread[p].j = wsk - odczyt + p + 1;
ptr.thread[p].stan = wsk[p];
}
lista.push_back(ptr);
temp = wsk + 1;
}
} // end: poziom
// pion
for(j=1; j<=SIZE; j++) {
for(i=1; i<=SIZE; i++) {
odczyt[i-1] = (char) pole[i][j]->stan();
}
odczyt[i-1] = '\0';
temp = odczyt;
while((wsk = strstr(temp,str))!=NULL) {
ptr.size = typ;
for(int p=0; p<typ; p++) {
ptr.thread[p].i = wsk - odczyt + p + 1;
ptr.thread[p].j = j;
ptr.thread[p].stan = wsk[p];
}
lista.push_back(ptr);
temp = wsk + 1;
}
} // end: pion
// pod ukosem 1.1
for(int q=1; q<=SIZE; q++) {
for(i=1,j=q;i<=q;i++,j--)
odczyt[i-1] = (char) pole[i][j]->stan();
odczyt[i-1] = '\0';
temp = odczyt;
while((wsk = strstr(temp,str))!=NULL) {
ptr.size = typ;
for(int p=0; p<typ; p++) {
ptr.thread[p].j = q - (wsk - odczyt) - p;
ptr.thread[p].i = q - ptr.thread[p].j + 1;
ptr.thread[p].stan = wsk[p];
}
lista.push_back(ptr);
temp = wsk + 1;
}
} // end: pod ukosem 1.1
// pod ukosem 1.2
for(int q=2; q<=SIZE; q++) {
for(i=q,j=SIZE;i<=SIZE;i++,j--)
odczyt[i-q-1] = (char) pole[i][j]->stan();
odczyt[i-q-1] = '\0';
temp = odczyt;
while((wsk = strstr(temp,str))!=NULL) {
ptr.size = typ;
for(int p=0; p<typ; p++) {
ptr.thread[p].i = q + (wsk - odczyt) + p + 2;
ptr.thread[p].j = q - ptr.thread[p].i;
ptr.thread[p].stan = wsk[p];
}
lista.push_back(ptr);
temp = wsk + 1;
}
} // end: pod ukosem 1.2
// pod ukosem 2.1
for(int q=1; q<=SIZE; q++) {
for(i=1,j=q;i<=SIZE-q;i++,j++)
odczyt[i-1] = (char) pole[i][j]->stan();
odczyt[i-1] = '\0';
temp = odczyt;
while((wsk = strstr(temp,str))!=NULL) {
ptr.size = typ;
for(int p=0; p<typ; p++) {
ptr.thread[p].i = wsk - odczyt + p + 1;
ptr.thread[p].j = q + ptr.thread[p].i - 1;
ptr.thread[p].stan = wsk[p];
}
lista.push_back(ptr);
temp = wsk + 1;
}
} // end: pod ukosem 2.1
// pod ukosem 2.2
for(int q=2; q<=SIZE; q++) {
for(i=q,j=1;i<=SIZE;i++,j++)
odczyt[j-1] = (char) pole[i][j]->stan();
odczyt[j-1] = '\0';
temp = odczyt;
while((wsk = strstr(temp,str))!=NULL) {
ptr.size = typ;
for(int p=0; p<typ; p++) {
ptr.thread[p].j = wsk - odczyt + p + 1;
ptr.thread[p].i = q + ptr.thread[p].j - 1;
ptr.thread[p].stan = wsk[p];
}
lista.push_back(ptr);
temp = wsk + 1;
}
} // end: pod ukosem 2.2
}
void Kanwa::endGame(std::list<Event> &lista, int stan)
{
Event ptr;
int i, j;
if(lista.size()) {
ptr = *(lista.begin());
for(int p=0; p<ptr.size; p++) {
i = ptr.thread[p].i;
j = ptr.thread[p].j;
pole[i][j]->setPalette(*w);
}
ruch = ZADEN;
if(stan == KRZYZYK)
emit setStatus("GOMOKU; autor: <NAME> - WYGRAL KRZYZYK");
else
emit setStatus("GOMOKU; autor: <NAME> - WYGR<NAME>");
komp->setDisabled(true);
endgame = true;
}
}
// ----- END: class Kanwa
// -----------------------------------------------------------
// -----------------------------------------------------------
// --- BEGIN: class OknoGlowne
OknoGlowne::OknoGlowne(QWidget *wRodzic): QMainWindow(wRodzic)
{
// inicjalizacja glownej siatki
glownyGrid = new Kanwa(this);
glownyGrid->setFixedHeight(375);
// inicjalizacja paska statusu
setStatusBar(new QStatusBar());
setFixedSize(555,425);
// dodanie glownej siatki
setCentralWidget(glownyGrid);
// sygnaly i sloty
connect(glownyGrid, SIGNAL(setStatus(const QString &)),
this, SLOT(odbierzNapisStatusu(const QString &)));
odbierzNapisStatusu("GOMOKU; autor: <NAME>");
}
void OknoGlowne::odbierzNapisStatusu(const QString &Napis)
{
// aktualizuje napis statusu
statusBar()->showMessage(Napis);
}
// ----- END: class OknoGlowne
// -----------------------------------------------------------
// -----------------------------------------------------------
// --- BEGIN: MAIN
int main( int argc, char * argv[] )
{
QApplication App(argc,argv);
OknoGlowne *Okno;
Okno = new OknoGlowne;
Okno->show();
return App.exec();
}
// ----- END: MAIN
// -----------------------------------------------------------
<file_sep>
#ifndef IRVIEW_HPP
#define IRVIEW_HPP
/* -----------------------------------------------------------
PROGRAM gomoku 1.0
----------------------------------------------------------- */
/* -----------------------------------------------------------
<NAME> (C) 2007
Zezwalam na dowolna modyfikacje tego kodu.
Prosze tylko nie usuwanie tego naglowka
i o dopisanie, ze dokonano zmian bez mojej wiedzy.
----------------------------------------------------------- */
/* -----------------------------------------------------------
Program napisany w ramach kursu:
-----------------------------------------
METODY I ALGORYTMY SZTUCZNEJ INTELIGENCJI
-----------------------------------------
Prowadzacy: dr inz. <NAME>
-----------------------------------------
Politechnika Wroclawska
Wydzial Elektroniki
----------------------------------------------------------- */
// -----------------------------------------------------------
// --- BEGIN: INCLUDES
#include <QWidget> // QT 4.3
#include <QMainWindow>
#include <QLabel>
#include <QPushButton>
#include <QSlider>
#include <QComboBox>
#include <QLineEdit>
#include <QThread>
#include <QProgressBar>
#include <QCheckBox>
#include <QLCDNumber>
#include <QGroupBox>
#include <QFile>
#include <QByteArray>
#include <list> // STL
#include <cstdio> // C
#include <cstring>
// ----- END: INCLUDES
// -----------------------------------------------------------
// -----------------------------------------------------------
// --- BEGIN: DEFINES
#define SIZE 15 // rozmiar planszy
#define TSIZE 7 // rozmiar maksymalnego zagrozenia
#define KRZYZYK 'X' // stany pola
#define KOLKO 'O'
#define ZADEN ' '
#define ZAGROZENIE 'Z'
#define TRZY5 5 // rozmiary zagrozen
#define TRZY6 6
#define TRZY7 7
#define CZTERY6 6
#define CZTERY5 5
#define PIATKA 5
// ----- END: DEFINES
// -----------------------------------------------------------
// -----------------------------------------------------------
// --- BEGIN: struct Ruch
struct Ruch
{
int i, j;
bool error;
};
// ----- END: struct Ruch
// -----------------------------------------------------------
// -----------------------------------------------------------
// --- BEGIN: struct TPole
struct TPole
{
int i, j; // wspolrzedne pola
int stan; // stan pola
};
// ----- END: struct TPole
// -----------------------------------------------------------
// -----------------------------------------------------------
// --- BEGIN: struct Event
struct Event
{
TPole thread[TSIZE];
int size;
};
// ----- END: struct Event
// -----------------------------------------------------------
// -----------------------------------------------------------
// --- BEGIN: class MyButton
class MyButton: public QPushButton {
Q_OBJECT
int _i;
int _j;
int _stan;
public:
MyButton(int i, int j, QString str, QWidget *wRodzic = 0L);
void setStan(int stan);
int stan() const;
public slots:
void sendClicked();
signals:
void isClicked(int i, int j);
};
// ----- END: class MyButton
// -----------------------------------------------------------
// -----------------------------------------------------------
// --- BEGIN: class Kanwa
class Kanwa: public QWidget {
Q_OBJECT
private:
MyButton *pole[SIZE+1][SIZE+1];
QPushButton *bruch;
QPushButton *nowa;
QPushButton *komp;
QPalette *x;
QPalette *o;
QPalette *n;
QPalette *w;
QLCDNumber *lcd;
QCheckBox *setx;
QCheckBox *seto;
int ruch;
bool endgame;
void setX(int i, int j);
void setO(int i, int j);
public:
Kanwa(QWidget *wRodzic = 0L);
void checkEvent(const char *str, int stan,
std::list<Event> &lista);
void endGame(std::list<Event> &lista, int stan);
// algorytm
Ruch TSS();
void ustawPole(TPole t_pole, int typ);
Ruch createThread(int);
Ruch obronaPrzedO();
Ruch obronaPrzedX();
Ruch wywolajDlaKazdego(Event zdarzenie,
int znak, int vsznak);
public slots:
void setButtons(int i, int j);
void setNewGame();
void autoGame();
void lcdPP();
void setRuchX();
void setRuchO();
void zmienX(int state);
void zmienO(int state);
signals:
void setStatus(const QString &);
void zwiekszLCD();
};
// ----- END: class Kanwa
// -----------------------------------------------------------
// -----------------------------------------------------------
// --- BEGIN: class OknoGlowne
class OknoGlowne: public QMainWindow {
Q_OBJECT
public:
OknoGlowne(QWidget *wRodzic = 0L);
/**
*
* \brief Inicjalizacja zarządcy widgetów.
*
*/
Kanwa *glownyGrid;
public slots:
/**
*
* \brief Wyświetla odebrany napis na pasku statusu.
*
*/
void odbierzNapisStatusu(const QString &);
};
// ----- END: class OknoGlowne
// -----------------------------------------------------------
#endif
<file_sep>################################################################################
# Copyright (c) 2007 <NAME>, wszystkie prawa zastrzezone
#
# Niniejszy plik zostal utworzony jako przyklad na potrzebe nauki
# programowania z wykorzystaniem biblioteki Qt w ramach kursu
# "Wizualizacja danych sensorycznych" prowadzonym na specjalnosci Robotyka,
# kierunku Automatyka i Robotyka, Wydzialu Elektroniki Politechniki
# Wroclawskiej.
# Plik ten mozna wykorzystywac we wlasnych projektach pod warunkiem, ze
# utworzone oprogramowanie bedzie objete licencja GNU General Public License.
# Jezeli w tym pliku wprowadzone zostana wlasne zmiany, to nalezy rowniez
# usunac niniejszy naglowek.
#
# Niniejszy plik nie jest objety zadnymi gwarancjami, ze bedzie mogl on
# byc wykorzystany do innych celow nie zwiazanych z nauka programowania.
#
################################################################################
NAZWA_APLIKACJI=gomoku
URUCHOMIENIE_APLIKACJI=./gomoku
ZRODLA_PROJEKTU=src/*.cpp inc/*.hpp
PODSTAWIENIA=OBJECTS_DIR=${KATALOG_OBJ} INCLUDEPATH=inc\
MOC_DIR=${KATALOG_MOC}
KATALOG_OBJ=./obj
KATALOG_MOC=./moc
QMAKE=qt4-qmake
SLAD_SPRAWDZENIA=.qt_juz_sprawdzone
__start__: ${SLAD_SPRAWDZENIA} ${NAZWA_APLIKACJI}
rm -f core*; ${URUCHOMIENIE_APLIKACJI}
${SLAD_SPRAWDZENIA}:
@if which $(QMAKE) > /dev/null; then exit 0;\
else\
echo; echo " Brak programu qmake."\
" Prawdopodobnie biblioteka Qt nie zostala zainstalowana.";\
echo; exit 1;\
fi
@if $(QMAKE) -v | grep 'ver.*4\.[0-9]*\.[0-9]*' > /dev/null;\
then exit 0;\
else echo; echo " Brak biblioteki Qt w wersji 4.x.x";\
echo; exit 1;\
fi
touch ${SLAD_SPRAWDZENIA}
${NAZWA_APLIKACJI}: Makefile.app __sprawdz__
__sprawdz__:
make -f Makefile.app
Makefile.app: ${NAZWA_APLIKACJI}.pro
$(QMAKE) -o Makefile.app ${NAZWA_APLIKACJI}.pro
${NAZWA_APLIKACJI}.pro:
rm -f ${NAZWA_APLIKACJI}
$(QMAKE) -project -nopwd -o ${NAZWA_APLIKACJI}.pro\
${PODSTAWIENIA} ${ZRODLA_PROJEKTU}
project: __usun_pro__ ${NAZWA_APLIKACJI}.pro
__usun_pro__:
rm -f ${NAZWA_APLIKACJI}.pro
clean:
make -f Makefile.app clean
rm -r ${SLAD_SPRAWDZENIA}
cleanall: clean
rm -f ${NAZWA_APLIKACJI}
clean_copies:
find . -name \*~ -exec rm {} \;
find . -name \*.bak -exec rm {} \;
cleantotally: clean_copies cleanall
rm -f ${NAZWA_APLIKACJI}.pro Makefile.app
rm -fr ${KATALOG_MOC} ${KATALOG_OBJ}
cleandocs:
rm -rf doc/*
help:
@echo
@echo "Dzialania:"
@echo
@echo " project - wymusza utworzenie nowego projektu"
@echo " clean - usuwa wszystkie produkty kompilacji i konsolidacji"
@echo " cleanall - usuwa produkty kompilacji wraz z aplikacja"
@echo " cleantotally - usuwa wszystko oprocz zrodel i pliku Makefile"
@echo " cleandocs - usuwa dokumentacje wygenerowana doxygen"
@echo " help - wyswietla niniejsza pomoc"
@echo
<file_sep>An attempt to implement Thread Space Search (TSS) algorithm for Gomoku game (in C++ & QT 4.3 library) based on "Go-Moku and Threat-Space Search" by <NAME>, <NAME>, <NAME>. Course Methods and Algorithms Artificial Intelligence by <NAME> and dr eng. <NAME> (Poland) 20-06-2007.
Original website: http://rab.ict.pwr.wroc.pl/~abogdzie/gomoku/
Copyright © 20-06-2007 <NAME> and dr eng. <NAME> Email: <EMAIL> Country: Poland






Tags: Five in a Row, Tic Tac Toe, TicTacToe, 5 in a Row, Go-Moku, Connect, Connect5, Connect6, Caro, Noughts and Crosses, Gomoku, Renju, Pente, Piskvork, Amoba, Kó³ko i Krzy¿yk, Gomocup, AI, Engine, Artificial Intelligence, Brain, Pbrain, Gra, Game, Source Code Files, Program, Programming, Github, Board, Coding. | 6ffb5639054c5be95b06f78f060d5d8ee637634d | [
"Markdown",
"Makefile",
"C++"
] | 4 | C++ | keselasela/Gomoku-Thread-Space-Search | 8bad5497536a74ebbc76a1b53d3bdd0bcb3ae6b8 | bf5aeb91668eaecd6e5fec10a77590c3563d2dc1 |
refs/heads/master | <file_sep>package com.course.localization.exactumpositioner;
import android.app.IntentService;
import android.content.Context;
import android.content.Intent;
import android.os.Environment;
import android.os.Handler;
import android.os.Looper;
import android.util.Log;
import android.widget.Toast;
import com.course.localization.exactumpositioner.domain.WifiFingerPrint;
import java.io.Serializable;
import java.util.List;
/**
* An {@link IntentService} subclass for handling asynchronous task requests in
* a service on a separate handler thread.
* <p/>
* TODO: Customize class - update intent actions and extra parameters.
*/
public class DataExportService extends IntentService {
private static final String TAG = IntentService.class.getSimpleName();
public static final String ACTION_EXPORT_DATA = "com.course.localization.exactumpositioner.action.EXPORT_FINGERPRINTS";
public static final String PARAM_FILENAME = "com.course.localization.exactumpositioner.param.FILENAME";
public static final String RESPONSE_SUCCESS = "successDataExport";
public static final String RESPONSE_FAILURE = "failureDataExport";
public static final String ERROR_MESSAGE_KEY = "error";
public DataExportService() {
super("DataExportService");
}
@Override
protected void onHandleIntent(Intent intent) {
if (intent != null) {
final String action = intent.getAction();
if (ACTION_EXPORT_DATA.equals(action)) {
long startTime = System.currentTimeMillis();
final String fileName = intent.getStringExtra(PARAM_FILENAME);
//List<WifiFingerPrint> prints = WifiFingerPrint.listAll(WifiFingerPrint.class);
List<WifiFingerPrint> prints =
WifiFingerPrint.find(
WifiFingerPrint.class, //type
null, //where clause
null, //where args
null, //group by
"x, y, z", //order by
null //limit
);
Log.d(TAG, "fetched all the fingerprints");
String result = DataExporter.writePrintsToFile(
prints,
getExternalFilesDir(Environment.DIRECTORY_DOCUMENTS),
fileName
);
Intent broadcastIntent = new Intent();
broadcastIntent.setAction(CommonConstants.ACTION_RESP);
broadcastIntent.addCategory(Intent.CATEGORY_DEFAULT);
if( result.isEmpty() ){
broadcastIntent.putExtra(CommonConstants.SERVICE_RESPONSE_KEY, RESPONSE_SUCCESS);
}else{
broadcastIntent.putExtra(CommonConstants.SERVICE_RESPONSE_KEY, RESPONSE_FAILURE);
broadcastIntent.putExtra(ERROR_MESSAGE_KEY, result);
}
sendBroadcast(broadcastIntent);
Long endTime = System.currentTimeMillis();
Log.d(TAG, "Export took " + (endTime - startTime)/1000 + " seconds, " + prints.size() + " records");
}
}
}
public static void exportAllData(String fileName, Context context){
if( fileName != null){
Toast.makeText(context, "Started export", Toast.LENGTH_SHORT).show();
Intent intent = new Intent(context, DataExportService.class);
intent.putExtra(DataExportService.PARAM_FILENAME, fileName);
intent.setAction(DataExportService.ACTION_EXPORT_DATA);
context.startService(intent);
}else{
Toast.makeText(context, "You must specify a file name", Toast.LENGTH_SHORT).show();
}
}
private void showToast(final String message, final int length){
new Handler(Looper.getMainLooper()).post(new Runnable() {
@Override
public void run() {
Toast.makeText(getApplicationContext(), message, length).show();
}
});
}
}
<file_sep>package com.course.localization.exactumpositioner.activity;
import android.app.AlertDialog;
import android.app.ProgressDialog;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.IntentFilter;
import android.graphics.PointF;
import android.net.wifi.ScanResult;
import android.net.wifi.WifiManager;
import android.os.Bundle;
import android.os.Environment;
import android.os.PowerManager;
import android.util.Log;
import android.view.View;
import android.support.design.widget.NavigationView;
import android.support.v4.view.GravityCompat;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.ActionBarDrawerToggle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.Toast;
import com.course.localization.exactumpositioner.CommonConstants;
import com.course.localization.exactumpositioner.CustomImageView;
import com.course.localization.exactumpositioner.DataExportService;
import com.course.localization.exactumpositioner.DbService;
import com.course.localization.exactumpositioner.OptionProgressDialog;
import com.course.localization.exactumpositioner.PositionMapDrawer;
import com.course.localization.exactumpositioner.R;
import com.course.localization.exactumpositioner.Utils;
import com.course.localization.exactumpositioner.domain.WifiFingerPrint;
import java.io.File;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
public class Calibration extends AppCompatActivity
implements NavigationView.OnNavigationItemSelectedListener {
public static final String TAG = Calibration.class.getSimpleName();
private CustomImageView imageView;
private WifiManager mainWifi;
private PowerManager.WakeLock wakeLock;
private WifiReceiver receiverWifi;
private List<ScanResult> wifiList;
static StringBuilder fingerprint;
static StringBuilder macs;
static StringBuilder rssi;
//Popup dialog that displays progress (also helps detect if the user has aborted the training process)
static OptionProgressDialog progressDialog;
//The maximum number of fingerprints we want to record (for a ballpark figure, assume approx. 1 fingerprint/second on current Android devices)
static final int MAXPRINTS = CommonConstants.NUMBER_OF_SCANS;
//Asynchronous task (thread). We capture the initialization so we can control it after it's started
//AsyncTask<Integer, String, Hashtable<String, List<Integer>>> task = new RecordFingerprints();
//Root directory of the phone's SD card. We delve into subfolders from here
static final File PATH = Environment.getExternalStorageDirectory();
private List<WifiFingerPrint> prints;
private BroadcastReceiver receiverDbAction;
private long timestamp;
private ProgressDialog exportProgressDialog;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_calibration);
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
setTitle(R.string.title_activity_calibration);
DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout);
ActionBarDrawerToggle toggle = new ActionBarDrawerToggle(
this, drawer, toolbar, R.string.navigation_drawer_open, R.string.navigation_drawer_close);
drawer.setDrawerListener(toggle);
toggle.syncState();
NavigationView navigationView = (NavigationView) findViewById(R.id.nav_view);
navigationView.setNavigationItemSelectedListener(this);
imageView = (CustomImageView) findViewById(R.id.imageView);
int floorNumber = 1;
String[] arr = new String[1];
arr[0] = String.valueOf(floorNumber);
imageView.setImageViewDrawer(new PositionMapDrawer(floorNumber, imageView));
//Initializations
mainWifi = (WifiManager) getSystemService(Context.WIFI_SERVICE);
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
wakeLock = pm.newWakeLock(
PowerManager.SCREEN_DIM_WAKE_LOCK, "My wakelock");
receiverWifi = new WifiReceiver();
setupProgressDialog();
fingerprint = new StringBuilder();
prints = new ArrayList<>();
IntentFilter filter = new IntentFilter(CommonConstants.ACTION_RESP);
filter.addCategory(Intent.CATEGORY_DEFAULT);
receiverDbAction = new ResponseReceiver();
registerReceiver(receiverDbAction, filter);
}
private void setupProgressDialog(){
progressDialog= new OptionProgressDialog(Calibration.this);
progressDialog.setProgressStyle(ProgressDialog.STYLE_HORIZONTAL);
progressDialog.setCancelable(true);
progressDialog.setTitle("RECORDING");
progressDialog.setMax(MAXPRINTS);
progressDialog.setButton(ProgressDialog.BUTTON_POSITIVE, "Save", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
// Toast.makeText(CalibrationActivity.this, "Saving " + prints.size() + " fingerprints...", Toast.LENGTH_SHORT).show();
savePrints();
}
});
progressDialog.setButton(ProgressDialog.BUTTON_NEUTRAL, "View", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
progressDialog.dismiss();
startListActivity(prints, true);
}
});
progressDialog.setButton(ProgressDialog.BUTTON_NEGATIVE, "Dismiss", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
progressDialog.dismiss();
}
});
progressDialog.setOnDismissListener(new DialogInterface.OnDismissListener() {
@Override
public void onDismiss(DialogInterface dialog) {
prints = null;
}
});
}
private void startListActivity(List<WifiFingerPrint> fingerPrints, boolean newRecords){
Intent intent = new Intent(Calibration.this, ScanResults.class);
if(fingerPrints != null){
intent.putExtra(CommonConstants.FINGERPRINT_KEY, (Serializable) fingerPrints);
}
intent.putExtra(CommonConstants.NEW_RECORDS, newRecords);
startActivity(intent);
}
/* public void saveRecord(View v){
if(imageView != null && ((PositionMapDrawer) imageView.getDrawer()).getLastChosenPointImgCoords() != null){
PointF point = ((PositionMapDrawer) imageView.getDrawer()).getLastChosenPointImgCoords();
Log.d(TAG, "last point: " + point.toString());
WifiFingerPrint fp = new WifiFingerPrint(point.x, point.y, ((PositionMapDrawer) imageView.getDrawer()).getFloorNumber(), null, null, null);
fp.save();
}
}*/
private void savePrints(){
/*Log.d(TAG, "saving prints...");
if(prints != null){
Intent intent = new Intent(this, DbService.class);
intent.putExtra(CommonConstants.FINGERPRINT_KEY, (Serializable) prints);
intent.setAction(DbService.ACTION_SAVE_ALL);
this.startService(intent);
}else{
Toast.makeText(this, "No prints to save", Toast.LENGTH_SHORT).show();
}*/
Utils.saveAll(prints, this, "No prints to save");
/*if(prints != null){
for( WifiFingerPrint print : prints ){
print.save();
}
}
prints = null;*/
}
public void toggleShowFingerPrints(){
if(imageView != null){
int floorNumber = ((PositionMapDrawer) imageView.getDrawer()).getFloorNumber();
//List<WifiFingerPrint> prints = WifiFingerPrint.find(WifiFingerPrint.class, "z= ?", String.valueOf(floorNumber));
List<WifiFingerPrint> prints = DbService.findPrintsGrouppedByLocation(floorNumber);
if(prints == null || prints.isEmpty()){
Toast.makeText(this, "No fingerprints recorded yet for this floor!", Toast.LENGTH_LONG).show();
}
((PositionMapDrawer) imageView.getDrawer()).toggleShowFingerPrints(prints, imageView);
}
}
public void startScan(View view){
if( ((PositionMapDrawer) imageView.getDrawer()).getLastChosenPointImgCoords() != null ){
registerReceiver(receiverWifi, new IntentFilter(WifiManager.SCAN_RESULTS_AVAILABLE_ACTION));
mainWifi.startScan();
timestamp = System.currentTimeMillis();
progressDialog.show();
}else{
Toast.makeText(this, "Please choose your position first by clicking the map", Toast.LENGTH_SHORT).show();
}
/* //Task has been initialized but not run a single time yet
if(task.getStatus()== AsyncTask.Status.PENDING){
//Show the progress dialog
progressDialog.setTitle("TRAINING CELL ");
progressDialog.show();
//Start the recording
task.execute(0);
}
//Task has been allowed to finish
if(task.getStatus()== AsyncTask.Status.FINISHED){
//Re-initialize
task = new RecordFingerprints();
progressDialog.setTitle("TRAINING");
progressDialog.show();
task.execute(0);
}*/
}
private void removeReceiver(BroadcastReceiver receiver){
try{
unregisterReceiver(receiver);
}catch (Exception e){
Log.e(TAG, "tried to unregister receiver again");
}
}
/* private void listAll(){
// List<WifiFingerPrint> allPrints = WifiFingerPrint.findAll(WifiFingerPrint.class);
//List<WifiFingerPrint> allPrints = WifiFingerPrint.find(WifiFingerPrint.class, null, null);
List<WifiFingerPrint> allPrints =
WifiFingerPrint.findWithQuery(
WifiFingerPrint.class,
CommonConstants.QUERY_LIMIT_PRINTS,
String.valueOf(CommonConstants.DEFAULT_LIMIT),
"0");
startListActivity(allPrints, false);
}*/
@Override
protected void onPause() {
removeReceiver(receiverWifi);
removeReceiver(receiverDbAction);
super.onPause();
}
@Override
protected void onResume() {
super.onResume();
}
@Override
public void onBackPressed() {
DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout);
if (drawer.isDrawerOpen(GravityCompat.START)) {
drawer.closeDrawer(GravityCompat.START);
} else {
super.onBackPressed();
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.calibration, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}/*else if(id == R.id.action_save){
// saveRecord(null);
}*/else if(id == R.id.action_show_all){
toggleShowFingerPrints();
}else if(id == R.id.action_delete_all){
AlertDialog diaBox = ConfirmDelete();
diaBox.show();
}else if(id == R.id.action_list_all){
startListActivity(null, false);
}else if( id == R.id.action_export_all_data){
String fileName = "prints.txt";
DataExportService.exportAllData(fileName, this);
exportProgressDialog = new ProgressDialog(this);
exportProgressDialog.setIndeterminate(true);
exportProgressDialog.setMessage("Exporting all fingerprints into " + fileName + "... " +
"Feel free to dismiss this window, export will continue in the background."
);
exportProgressDialog.setTitle("Exporting");
exportProgressDialog.show();
}
return super.onOptionsItemSelected(item);
}
@SuppressWarnings("StatementWithEmptyBody")
@Override
public boolean onNavigationItemSelected(MenuItem item) {
// Handle navigation view item clicks here.
int id = item.getItemId();
if (id == R.id.basement) {
((PositionMapDrawer) imageView.getDrawer()).setFloorNumber(imageView, 0);
} else if (id == R.id.firstFloor) {
((PositionMapDrawer) imageView.getDrawer()).setFloorNumber(imageView, 1);
}else if (id == R.id.secondFloor) {
((PositionMapDrawer) imageView.getDrawer()).setFloorNumber(imageView, 2);
} else if (id == R.id.thirdFloor) {
((PositionMapDrawer) imageView.getDrawer()).setFloorNumber(imageView, 3);
} else if (id == R.id.fourthFloor) {
((PositionMapDrawer) imageView.getDrawer()).setFloorNumber(imageView, 4);
}
DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout);
drawer.closeDrawer(GravityCompat.START);
return true;
}
private AlertDialog ConfirmDelete()
{
AlertDialog myQuittingDialogBox =new AlertDialog.Builder(this)
//set message, title, and icon
.setTitle("Delete")
.setMessage("Do you want to delete all fingerprints?")
.setIcon(R.drawable.ic_delete_white_24dp)
.setPositiveButton("Delete", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
WifiFingerPrint.deleteAll(WifiFingerPrint.class);
dialog.dismiss();
}
})
.setNegativeButton("cancel", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
})
.create();
return myQuittingDialogBox;
}
class WifiReceiver extends BroadcastReceiver {
public final String TAG = WifiReceiver.class.getSimpleName();
/*
What to do when our BroadcastReceiver (or in this case, the WifiReceiver that implements it) returns its result
*/
public void onReceive(Context c, Intent intent) {
Log.d("FINGER","Scan received");
if(prints == null){
prints = new ArrayList<>();
}
if(progressDialog.getProgress()<progressDialog.getMax()){
wifiList = mainWifi.getScanResults();
rssi = new StringBuilder();
macs = new StringBuilder();
for(int j=0;j<wifiList.size();j++){
PointF point = ((PositionMapDrawer) imageView.getDrawer()).getLastChosenPointImgCoords();
WifiFingerPrint fp = new WifiFingerPrint(
point.x,
point.y,
((PositionMapDrawer) imageView.getDrawer()).getFloorNumber(),
wifiList.get(j).level,
wifiList.get(j).BSSID,
wifiList.get(j).SSID,
timestamp);
prints.add(fp);
macs.append(wifiList.get(j).BSSID);
if(j<wifiList.size()-1){
macs.append(",");
}
rssi.append(wifiList.get(j).level);
if(j<wifiList.size()-1){
rssi.append(",");
}
}
fingerprint.append(macs);
fingerprint.append("\n");
fingerprint.append(rssi);
fingerprint.append("\n");
progressDialog.incrementProgressBy(1);
mainWifi.startScan();
timestamp = System.currentTimeMillis();
Log.d("FINGER", "Scan initiated");
Log.d(TAG, "progress: " + progressDialog.getProgress());
}else{
// progressDialog.incrementProgressBy(-MAXPRINTS);
removeReceiver(receiverWifi);
}
}
}
public class ResponseReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
if(exportProgressDialog != null){
exportProgressDialog.dismiss();
}
String response = intent.getStringExtra(CommonConstants.SERVICE_RESPONSE_KEY);
if(response.equals(DbService.RESPONSE_SUCCESS)){
Toast.makeText(Calibration.this, "Prints saved...", Toast.LENGTH_SHORT).show();
}else if(response.equals(DbService.RESPONSE_FAILURE)){
Toast.makeText(Calibration.this, response, Toast.LENGTH_SHORT).show();
}else if(response.equals(DataExportService.RESPONSE_SUCCESS)){
Toast.makeText(Calibration.this, "Data exported", Toast.LENGTH_SHORT).show();
}else if(response.equals(DataExportService.RESPONSE_FAILURE)){
String failure = intent.getStringExtra(DataExportService.ERROR_MESSAGE_KEY);
Toast.makeText(Calibration.this, failure, Toast.LENGTH_SHORT).show();
}
}
}
/*
//Asynchronous task runs in background so we don't make the UI wait
private class RecordFingerprints extends AsyncTask<Integer, String, Hashtable<String,List<Integer>>> {
boolean running = true;
protected Hashtable<String,List<Integer>> doInBackground(Integer... params) {
progressDialog.setProgress(0);
mainWifi.startScan();
fingerprint = new StringBuilder();
wakeLock.acquire();
while(running){
//Store the recorded fingerprint in a file named after the cell in which it was recorded
if(!progressDialog.isShowing() || progressDialog.getProgress()>=progressDialog.getMax()){
File file = new File(PATH, "/fingerprints/"+params[0]+".txt");
try{
OutputStream os = new FileOutputStream(file,false);
os.write(fingerprint.toString().getBytes());
os.close();
}
catch(Exception e){Log.d("HELP","Need somebody");}
progressDialog.dismiss();
return null;
}
else{
publishProgress("");
}
Thread.currentThread();
try {
Thread.sleep(500);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
wakeLock.release();
return null;
}
protected void onProgressUpdate(String... progress) {
//progressDialog.incrementProgressBy(1);
}
@SuppressWarnings("unused")
protected void onPostExecute(ArrayList<Integer> result) {
running = true;
}
@Override
protected void onCancelled() {
progressDialog.dismiss();
running = false;
return;
}
}*/
}
<file_sep>package com.course.localization.exactumpositioner.activity;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.design.widget.FloatingActionButton;
import android.support.design.widget.Snackbar;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.View;
import android.widget.AbsListView;
import android.widget.Button;
import android.widget.ExpandableListView;
import android.widget.LinearLayout;
import android.widget.Toast;
import com.course.localization.exactumpositioner.CommonConstants;
import com.course.localization.exactumpositioner.DbService;
import com.course.localization.exactumpositioner.ExpandableListAdapter;
import com.course.localization.exactumpositioner.FreshDataHolder;
import com.course.localization.exactumpositioner.R;
import com.course.localization.exactumpositioner.Title;
import com.course.localization.exactumpositioner.Utils;
import com.course.localization.exactumpositioner.domain.WifiFingerPrint;
import java.lang.reflect.Array;
import java.text.SimpleDateFormat;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.List;
import java.util.ArrayList;
public class ScanResults extends AppCompatActivity {
public static final String TAG = ScanResults.class.getSimpleName();
private List<WifiFingerPrint> prints;
private Snackbar bar;
private BroadcastReceiver receiver;
private ExpandableListView listView;
private int offSet;
private int limit;
private boolean newRecords;
private ArrayList<Title> titles;
private ArrayList<ArrayList<WifiFingerPrint>> childItems;
private ExpandableListAdapter listAdapter;
private Button btnLoadMore;
private AsyncTask updateDataTask;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_view_finger_prints);
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
FloatingActionButton fab = (FloatingActionButton) findViewById(R.id.fab);
newRecords = getIntent().getBooleanExtra(CommonConstants.NEW_RECORDS, false);
titles = new ArrayList<>();
childItems = new ArrayList<>();
prints = (ArrayList<WifiFingerPrint>) getIntent().getSerializableExtra(CommonConstants.FINGERPRINT_KEY); //todo change this to user the fresh data holder class
if( prints != null ){
Collections.sort(prints, new Comparator<WifiFingerPrint>() {
@Override
public int compare(WifiFingerPrint o1, WifiFingerPrint o2) {
return o1.getTimeStamp().compareTo(o2.getTimeStamp());
}
});
updateDataTask = new UpdateDataTask();
updateDataTask.execute(new ArrayList[] {new ArrayList<>(prints)});
//updateData(prints);
}else{
prints = new ArrayList<>();
}
listAdapter = new ExpandableListAdapter(titles, childItems, this);
listView = ((ExpandableListView) findViewById(R.id.fingerprintList));
listView.setAdapter(listAdapter);
listView.setClickable(true);
listView.expandGroup(0);
findViewById(R.id.loadingLayout).bringToFront(); //make sure the view is not hidden behind the load more button
if( !newRecords ){
offSet = 0;
limit = CommonConstants.DEFAULT_LIMIT;
// Creating a button - Load More
btnLoadMore = new Button(this);
btnLoadMore.setText(getResources().getString(R.string.load_more));
btnLoadMore.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
loadMore(v);
}
});
listView.addFooterView(btnLoadMore);
loadMore(null);
fab.hide();
//setTitle((offSet + limit) + " latest prints");
}else {
fab.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
bar = Snackbar.make(view, "Would you like to save the fingerprints?", Snackbar.LENGTH_LONG)
.setAction("Save", new View.OnClickListener() {
@Override
public void onClick(View v) {
bar.dismiss();
savePrints();
}
});
bar.setActionTextColor(ContextCompat.getColor(ScanResults.this, android.R.color.holo_green_light));
bar.show();
}
});
}
IntentFilter filter = new IntentFilter(CommonConstants.ACTION_RESP);
filter.addCategory(Intent.CATEGORY_DEFAULT);
receiver = new ResponseReceiver();
registerReceiver(receiver, filter);
if( getSupportActionBar() != null){
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
}
}
public void updateData(List<WifiFingerPrint> newPrints){
if(titles == null){
titles = new ArrayList<>();
}
if(childItems == null){
childItems = new ArrayList<>();
}
Long currentTimeStamp = null;
for(WifiFingerPrint print: newPrints){
if( !print.getTimeStamp().equals(currentTimeStamp) ){
//titles.add(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date(print.getTimeStamp())));
childItems.add(new ArrayList<WifiFingerPrint>());
currentTimeStamp = print.getTimeStamp();
}
ArrayList<WifiFingerPrint> prints = childItems.get(childItems.size()-1);
prints.add(print);
}
}
private void showLoadingState(){
if(btnLoadMore != null ){
btnLoadMore.setEnabled(false);
}
findViewById(R.id.loadingLayout).setVisibility(View.VISIBLE);
}
private void showNormalState(){
if( btnLoadMore != null){
btnLoadMore.setEnabled(true);
}
LinearLayout layout = (LinearLayout) findViewById(R.id.loadingLayout);
layout.setVisibility(View.INVISIBLE);
}
private void savePrints(){
Utils.saveAll(prints, this, "it seems that saving prints is in progress" );
}
private void removeReceiver(){
try{
unregisterReceiver(receiver);
}catch (Exception e){
Log.e(TAG, "tried to unregister receiver again");
}
}
@Override
protected void onPause() {
if(updateDataTask != null ){
updateDataTask.cancel(true);
}
removeReceiver();
super.onPause();
}
public void loadMore(View view) {
if( !prints.isEmpty() ){
offSet = offSet + limit;
}
showLoadingState();
DbService.findPrintsOrderByTimeStamp(limit, offSet, ScanResults.this);
}
public class ResponseReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
String actionPerformed = intent.getStringExtra(CommonConstants.SERVICE_ACTION_PERFORMED);
if( actionPerformed.equals(DbService.ACTION_LOAD_ORDER_BY_TIMESTAMP) ){
//ArrayList<WifiFingerPrint> nextPrints = (ArrayList<WifiFingerPrint>) intent.getSerializableExtra(CommonConstants.FINGERPRINT_KEY);
List<WifiFingerPrint> nextPrints = FreshDataHolder.getInstance().getLatestFetchedPrints();
prints.addAll(nextPrints);
updateDataTask = new UpdateDataTask();
List<WifiFingerPrint>[] arr = new ArrayList[1];
arr[0] = nextPrints;
updateDataTask.execute(arr);
}else{
String response = intent.getStringExtra(CommonConstants.SERVICE_RESPONSE_KEY);
if(response.equals(DbService.RESPONSE_SUCCESS)){
Toast.makeText(ScanResults.this, "Prints saved...", Toast.LENGTH_SHORT).show();
}else{
Toast.makeText(ScanResults.this, response, Toast.LENGTH_SHORT).show();
}
onBackPressed();
}
}
}
private class UpdateDataTask extends AsyncTask<List<WifiFingerPrint>, String, Integer> {
List<WifiFingerPrint> newPrints;
@Override
protected Integer doInBackground(List<WifiFingerPrint>... params) {
newPrints = params[0];
if(titles == null){
titles = new ArrayList<>();
}
if(childItems == null){
childItems = new ArrayList<>();
}
Long currentTimeStamp = null;
Float currentX = null;
Float currentY = null;
Float currentZ = null;
for(WifiFingerPrint print: newPrints){
if( isCancelled() ){
Log.d(TAG, "async task cancelled");
break;
}
if( !print.getTimeStamp().equals( currentTimeStamp )
|| (currentX == null || currentX != print.getX())
|| (currentY == null || currentY != print.getY())
|| (currentZ == null || currentZ != print.getZ())
){
Title title = new Title(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date(print.getTimeStamp())),
String.valueOf(print.getX()), String.valueOf(print.getY()), String.valueOf((int) print.getZ()));
titles.add(title);
childItems.add(new ArrayList<WifiFingerPrint>());
currentTimeStamp = print.getTimeStamp();
currentX = print.getX();
currentY = print.getY();
currentZ = print.getZ();
}
ArrayList<WifiFingerPrint> prints = childItems.get(childItems.size()-1);
prints.add(print);
}
return newPrints.size();
}
protected void onPostExecute(Integer numberofNewPrints){
if(numberofNewPrints == limit){
setTitle((offSet + limit) + " latest prints");
}else{
setTitle((offSet + numberofNewPrints) + " latest prints");
}
listAdapter.notifyDataSetChanged();
showNormalState();
updateDataTask = null;
}
}
}
<file_sep>package com.course.localization.exactumpositioner;
import android.app.Activity;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.PointF;
import android.support.v4.content.ContextCompat;
import android.util.Log;
import android.widget.ImageView;
import android.widget.TextView;
import com.course.localization.exactumpositioner.domain.WifiFingerPrint;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Created by Pete on 23.11.2015.
*/
public class PositionMapDrawer implements ImageViewDrawer{
public static final String TAG = PositionMapDrawer.class.getSimpleName();
private static final int CIRCLE_RADIUS = 20;
private List<WifiFingerPrint> fingerPrints;
private boolean showFingerPrints;
private int floorNumber;
private static final Map<Integer, Integer> floorPlans;
static {
Map<Integer, Integer> aMap = new HashMap<>();
aMap.put(0, R.drawable.basement_0_exactum);
aMap.put(1, R.drawable.floor_1_exactum);
aMap.put(2, R.drawable.floor_2_exactum);
aMap.put(3, R.drawable.floor_3_exactum);
aMap.put(4, R.drawable.floor_4_exactum);
floorPlans = Collections.unmodifiableMap(aMap);
}
private PointF lastChosenPointImgCoords;
public PositionMapDrawer( int floorNumber, ImageView view){
// this.fingerPrints = fingerPrints;
setFloorNumber(view, floorNumber);
}
public void onDraw(Canvas canvas, CustomImageView view) {
if(fingerPrints != null && !fingerPrints.isEmpty() && showFingerPrints){
for (WifiFingerPrint fingerPrint : fingerPrints){
PointF point = imageCoordsToScreenCoords(new PointF(fingerPrint.getX(), fingerPrint.getY()), view);
Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG);
paint.setColor(Color.RED);
canvas.drawCircle(point.x, point.y, CIRCLE_RADIUS, paint);
}
lastChosenPointImgCoords = null;
}else{
if(isPointInImage(view.getLastPoint(), view)){
Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG);
paint.setColor(Color.RED);
canvas.drawCircle(view.getLastPoint().x, view.getLastPoint().y, CIRCLE_RADIUS, paint);
PointF point = view.translateCoordinatesBack(view.getLastPoint());
TextView xView = (TextView) ((Activity) view.getContext()).findViewById(R.id.xCoordinate);
xView.setText(view.getContext().getResources().getString(R.string.xCoordinateLabelBase) + " " + point.x);
TextView yView = (TextView) ((Activity) view.getContext()).findViewById(R.id.yCoordinate);
yView.setText(view.getContext().getResources().getString(R.string.yCoordinateLabelBase) + " " + point.y);
lastChosenPointImgCoords = point;
}else{
lastChosenPointImgCoords = null;
}
}
}
public PointF imageCoordsToScreenCoords(PointF point, CustomImageView view){
float[] pts = {point.x, point.y};
Matrix matrix = view.getImageMatrix();
matrix.mapPoints(pts);
return new PointF(pts[0], pts[1]);
}
public boolean isPointInImage(PointF point, CustomImageView view){
int width = view.getDrawable().getIntrinsicWidth();
int height = view.getDrawable().getIntrinsicHeight();
point = view.translateCoordinatesBack(point);
return point.x >= 0 && point.y >= 0 && point.x <= width && point.y <= height;
}
public void toggleShowFingerPrints(List<WifiFingerPrint> fingerPrints, ImageView view){
this.fingerPrints = fingerPrints;
this.showFingerPrints = !showFingerPrints;
view.invalidate();
}
public void setFloorNumber(ImageView view, int floorNumber){
if(floorPlans.get(floorNumber) != null){
this.floorNumber = floorNumber;
this.fingerPrints = DbService.findPrintsGrouppedByLocation(floorNumber);
//this.fingerPrints = WifiFingerPrint.find(WifiFingerPrint.class, "z= ?", String.valueOf(floorNumber));
TextView header = (TextView) ((Activity) view.getContext()).findViewById(R.id.floorNumberTitle);
header.setText(view.getContext().getResources().getString(R.string.floor_base) + floorNumber);
view.setImageDrawable(ContextCompat.getDrawable(view.getContext(), floorPlans.get(floorNumber)));
}
}
public int getFloorNumber(){
return floorNumber;
}
public PointF getLastChosenPointImgCoords(){
return lastChosenPointImgCoords;
}
}
<file_sep>package com.course.localization.exactumpositioner;
import android.app.ProgressDialog;
import android.content.Context;
import android.widget.Button;
/**
* Created by Pete on 25.11.2015.
*/
public class OptionProgressDialog extends ProgressDialog {
public OptionProgressDialog(Context context) {
super(context);
}
public OptionProgressDialog(Context context, int theme) {
super(context, theme);
}
@Override
public void show(){
super.show();
hideButtons();
}
private void showButtons(){
getButton(ProgressDialog.BUTTON_POSITIVE).setVisibility(Button.VISIBLE);
getButton(ProgressDialog.BUTTON_NEUTRAL).setVisibility(Button.VISIBLE);
}
private void hideButtons(){
getButton(ProgressDialog.BUTTON_POSITIVE).setVisibility(Button.INVISIBLE);
getButton(ProgressDialog.BUTTON_NEUTRAL).setVisibility(Button.INVISIBLE);
}
@Override
public void incrementProgressBy(int num){
if( getProgress() + num == getMax()){
showButtons();
}
super.incrementProgressBy(num);
}
@Override
public void dismiss(){
incrementProgressBy(-1 * getProgress());
hideButtons();
super.dismiss();
}
}
<file_sep># exactumPositioner
<file_sep>package com.course.localization.exactumpositioner;
import android.content.Context;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseExpandableListAdapter;
import android.widget.CheckedTextView;
import android.widget.ImageButton;
import android.widget.TextView;
import com.course.localization.exactumpositioner.domain.WifiFingerPrint;
import java.util.ArrayList;
/**
* Created by Pete on 13.12.2015.
*/
public class ExpandableListAdapter extends BaseExpandableListAdapter {
public static final String TAG = ExpandableListAdapter.class.getSimpleName();
private ArrayList<Title> titles;
private ArrayList<ArrayList<WifiFingerPrint>> childItems;
private Context context;
public ExpandableListAdapter(ArrayList<Title> titles, ArrayList<ArrayList<WifiFingerPrint>> childItems, Context context){
this.titles = titles;
this.childItems = childItems;
this.context = context;
}
@Override
public int getGroupCount() {
return titles.size();
}
@Override
public int getChildrenCount(int groupPosition) {
if( childItems.size() == 0){
return 0;
}
return childItems.get(groupPosition).size();
}
@Override
public Object getGroup(int groupPosition) {
return null;
}
@Override
public Object getChild(int groupPosition, int childPosition) {
return null;
}
@Override
public long getGroupId(int groupPosition) {
return 0;
}
@Override
public long getChildId(int groupPosition, int childPosition) {
return 0;
}
@Override
public boolean hasStableIds() {
return false;
}
@Override
public View getGroupView(final int groupPosition, boolean isExpanded, View convertView, ViewGroup parent) {
if( convertView == null){
LayoutInflater inflater = (LayoutInflater) context
.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
convertView = inflater.inflate(R.layout.group, null);
}
ImageButton btn = (ImageButton) convertView.findViewById(R.id.discardBtn);
btn.setFocusable(false);
btn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Log.d(TAG, "group with time " + titles.get(groupPosition) + " clicked!");
}
});
CheckedTextView tw = (CheckedTextView) convertView.findViewById(R.id.checkedTW);
Title title = titles.get(groupPosition);
tw.setText(title.getDate());
tw.setChecked(isExpanded);
TextView coordsView = (TextView) convertView.findViewById(R.id.coordsView);
coordsView.setText("Floor: " + title.getZ() + ". Position [" + title.getX() + ", " + title.getY() + "]");
return convertView;
}
@Override
public View getChildView(int groupPosition, int childPosition, boolean isLastChild, View convertView, ViewGroup parent) {
ArrayList<WifiFingerPrint> prints = childItems.get(groupPosition);
ChildViewHolder holder;
TextView tw;
if( convertView == null){
LayoutInflater inflater = (LayoutInflater) context
.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
convertView = inflater.inflate(R.layout.list_item, null);
holder = new ChildViewHolder();
holder.networkName = (TextView) convertView.findViewById(R.id.networkNameTW);
holder.mac = (TextView) convertView.findViewById(R.id.macTW);
holder.rssi = (TextView) convertView.findViewById(R.id.rssiTW);
convertView.setTag(holder);
}else{
holder = (ChildViewHolder) convertView.getTag();
}
holder.networkName.setText(prints.get(childPosition).getNetworkName());
holder.mac.setText(prints.get(childPosition).getMac());
holder.rssi.setText(String.valueOf(prints.get(childPosition).getRssi()));
return convertView;
}
private static class ChildViewHolder{
public TextView networkName;
public TextView mac;
public TextView rssi;
}
@Override
public boolean isChildSelectable(int groupPosition, int childPosition) {
return true;
}
@Override
public boolean areAllItemsEnabled() {
return true;
}
}
<file_sep>package com.course.localization.exactumpositioner.domain;
import com.orm.SugarRecord;
import java.io.Serializable;
/**
* Created by Pete on 22.11.2015.
*/
public class WifiFingerPrint extends SugarRecord<WifiFingerPrint> implements Serializable{
private Float x;
private Float y;
private Float z;
private int rssi;
private String mac;
private String networkName;
private Long timeStamp;
public WifiFingerPrint(){
}
public WifiFingerPrint(float x, float y, float z, Integer rssi, String mac, String networkName, Long timeStamp){
this.x = x;
this.y = y;
this.z = z;
this.rssi = rssi;
this.mac = mac;
this.networkName = networkName;
this.timeStamp = timeStamp;
}
public float getY() {
return y;
}
public float getX() {
return x;
}
public float getZ() {
return z;
}
public int getRssi() {
return rssi;
}
public String getMac() {
return mac;
}
public String getNetworkName() {
return networkName;
}
public Long getTimeStamp() {
return timeStamp;
}
@Override
public String toString(){
String ret = networkName + ": (" + x + ", " + y + ", " + z + "), " + rssi + ", " + mac;
return ret;
}
/* @Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
WifiFingerPrint that = (WifiFingerPrint) o;
if (Float.compare(that.x, x) != 0) return false;
if (Float.compare(that.y, y) != 0) return false;
if (Float.compare(that.z, z) != 0) return false;
if (rssi != that.rssi) return false;
if (mac != null ? !mac.equals(that.mac) : that.mac != null) return false;
return !(networkName != null ? !networkName.equals(that.networkName) : that.networkName != null);
}
@Override
public int hashCode() {
int result = (x != +0.0f ? Float.floatToIntBits(x) : 0);
result = 31 * result + (y != +0.0f ? Float.floatToIntBits(y) : 0);
result = 31 * result + (z != +0.0f ? Float.floatToIntBits(z) : 0);
result = 31 * result + rssi;
result = 31 * result + (mac != null ? mac.hashCode() : 0);
result = 31 * result + (networkName != null ? networkName.hashCode() : 0);
return result;
}*/
}
| 29a4af37fa5c2fd714aa39ef2f1fa443003f0907 | [
"Markdown",
"Java"
] | 8 | Java | pcmakine/exactumPositioner | 7dfeb430373b62d6579dfc1e81b8e2323cd56d22 | 067acae311955e322ac2d112a70b95a8dadfc2c4 |
refs/heads/master | <repo_name>xiaopangzhi795/zlblog<file_sep>/src/main/webapp/js/system/yaodian/checkList.js
//列表对象
var listGrid;
var mainTab = frameElement.tab;//框架的tab对象
$(function() {
listGrid = $("#gridArea").ligerGrid({
columns: [
{display: 'id', name: 'id', width: 100 } ,
{ display: '类别', name: 'type', width: 150 },
{ display: '编码', name: 'code', width: 150 },
{ display: '中文名', name: 'cnFormatName', minWidth: 200, render:function(rowData,index,data){
return '<a href="javascript:showProp('+rowData.id + ',\'' + rowData.cnFormatName + '\',\'' + rowData.code +'\')" title="查看药品属性">' + data + '</a>';
}},
{ display: '拼音', name: 'pinYin', width: 200, },
{ display: '英文名', name: 'enName', width: 100, },
{ display: '指派校对人', name: 'responsePersonName', width: 100, },
{ display: '状态', name: 'checkStatus', width: 100,render:function(rowData,index,data){
if(data == null){
return '未校对';
}else if(data == 1){
return '校对完成';
}else{
return '校对中';
}
} },
{ display: '校对人', name: 'checkPersonName', width: 100, },
{ display: '校对时间', name: 'checkTime', width: 100, render: function(rowdata,index,data) {
if(data != null) {
return new Date(data).format("yyyy-MM-dd hh:mm:ss");
}else{
return '';
}
}},
{ display: '页码', name: 'bookStartPage', width: 50,},
{ display: 'PDF', name: 'bookStartPage', width: 100, render:function(rowData,index,data){
var pdflink = rootPath + '/yaodian/getPdf.shtml?bookStart=' + rowData.bookStartPage + '&bookEnd=' + rowData.bookEndPage;
return '<a href="javascript:showPdf(\''+rowData.cnFormatName + '\',\''+pdflink+'\')" >查看PDF</a>';
}},
{ display: '原始描述', name: 'medicineText', minWidth: 100, render: function (rowData,index,data) {
return '<a href="javascript:showDetail('+rowData.id+')" title="查看完整原始描述">' + data.substring(0,50) + '...' + '</a>';
}},
],
url: rootPath + '/yaodian/findByPageForCheck.shtml',
sortName: 'pdfStartPage',
sortOrder: 'asc',
fixedCellHeight: true,
});
$("#pageloading").hide();
$("#searchButton").click("click", function(event) {// 绑定查询按扭
event.preventDefault();
search();
});
$("#resetButton").click("click", function(event) {// 绑定查询按扭
event.preventDefault();
reset();
});
});
function search(){
var searchParams = $("#searchForm").serializeJson();// 初始化传参数
listGrid.search(searchParams);
}
function reset(){
$("#searchForm")[0].reset();
listGrid.clearParm();
listGrid.reloadAll();
}
//查看完整药品描述
function showDetail(id){
$.ligerDialog.open({
width: 500,
height: 600,
title: '详情',
url: rootPath + '/yaodian/getDetail.shtml',
urlParms: {id: id}
});
}
//查看pdf
function showPdf(name,pdfLink){
$.ligerDialog.open({
width: 1048,
height: 800,
title: name,
url: pdfLink,
});
}
//查看属性
function showProp(id,name,code){
var tabid = "yaoDianCheckDetail";
var options = {
tabid: tabid,
text: '校对-'+name,
url: rootPath + '/yaodian/medicineCheck.shtml?id=' + id + '&code=' + code,
callback: function () {
//添加tab页之后的回调,可以添加自定义方法,来向tab页中添加内容
}
};
if(mainTab.isTabItemExist(tabid)){
mainTab.removeTabItem(tabid);
}
mainTab.addTabItem(options);
}<file_sep>/src/main/webapp/js/fund/crash_apply/list.js
var listGrid; //列表对象
var columns = [
{display: '单据编号', name: 'number', width: 300, show: true,
render: function(rowdata,index,data){
return '<a href="#" onclick="detail('+rowdata.id+')">'+data+'</a>';
}
},
{display: '单据日期', name: 'create_date', width: 300, show: true, export_name:'create_date_str',
render: function(rowdata,index,data){
return new Date(data).format("yyyy-MM-dd");
}
},
{display: '单据状态', name: 'state', width: 150, show: true, export_name:'state_str',
editor: {type:'combobox',valueField: 'key' ,textField:'text',options:{selectBoxWidth:180}},
data:[{ key:0,text:'草稿'},{ key:1,text:'待审核'},{ key:2,text:'审核通过'},{ key:3,text:'审核不通过'}],
render:function(rowdata,index,data){
return {0 : '草稿', 1 : '待审核',2:'审核通过',3:'审核不通过'}[data]
}
},
{display: '付款状态', name: 'payment_state', width: 150, show: true, export_name:'payment_state_str',
editor: {type:'combobox',valueField: 'key' ,textField:'text',options:{selectBoxWidth:180}},
data:[{ key:0,text:'待付款'},{ key:1,text:'付款中'},{ key:2,text:'已付款'}],
render:function(rowdata,index,data){
return {0 : '待付款', 1 : '付款中', 2:'已付款'}[data]
}
},
{display: '申请组织', name: 'apply_org', width: 300, show: true},
{display: '部门负责人', name: 'dept_charge_user', width: 300, show: true},
{display: '提现申请金额', name: 'apply_money', width: 300, show: true},
{display: '审批金额', name: 'approval_money', width: 300, show: true},
{display: '实付金额', name: 'fact_money', width: 300, show: true},
{display: '收款户名', name: 'receipt_user', width: 300, show: true},
{display: '收款银行类别', name: 'receipt_bank_type', width: 300, show: true},
{display: '收款行名', name: 'receipt_bank_name', width: 300, show: true},
{display: '收款帐号', name: 'receipt_account', width: 300, show: true},
{display: '收款行号', name: 'receipt_bank_no', width: 300, show: true},
{display: '制单人', name: 'create_user', width: 300, show: true},
{display: '业务类型', name: 'tran_type', width: 300, show: true, export_name:'tran_type_str',
editor: {type:'combobox',valueField: 'key' ,textField:'text',options:{selectBoxWidth:180}},
data:[{ key:1,text:'提现申请单'}],
render:function(rowdata,index,data){
return {1 : '提现申请单'}[data]
}
},
{display: '所属公司', name: 'company', width: 300, show: true},
{display: '备注', name: 'remark', width: 300, show: true},
];
var mainTab = frameElement.tab;//框架的tab对象
var tab_id = frameElement.id;
var mainData = frameElement.openerData; //框架的mainData对象
$(function () {
listGrid = $("#gridArea").ligerFilterGrid({
columns: columns,
url: rootPath + '/fund_crash_apply/findByPage.shtml',
sortName: 'id',
sortOrder: 'desc',
originalColumns: columns , //保留原始表单列
summaryUrl: rootPath + '/common/summary.shtml', //统计url
summaryTable: 'fund_crash_apply', //统计表名(表名不是java entity)
summaryShowRecordCount : true, //是否显示统计条数
summary: [ //统计字段
{text:'合计提现申请金额',field:'apply_money'},
{text:'合计审批金额',field:'approval_money'},
{text:'合计实付金额',field:'fact_money'}
],
customColumn : { //表单配置
listUrl : rootPath + '/fund_custom_column/list.shtml', //自定义列查询url
submitUrl: rootPath + '/fund_custom_column/submit.shtml', //自定义列提交url
clazz: 'FundCrashApply', //自定义列要存储的唯一主键
id: 'formManage', //自定义列配置插件id
dialogTitle: '表单配置', //弹窗title
leftId: 'listBox-left', //表单配置左列表id(有默认值,可以不配置)
leftTitle: '隐藏的列', //表单配置左列表title(有默认值,可以不配置)
rightId: 'listBox-right', //表单配置右列表id(有默认值,可以不配置)
rightTitle: '显示的列', //表单配置右列表title(有默认值,可以不配置)
callback : function(){ //自定义列保存成功回调函数
mainTab.reload(tab_id);
}
},
seniorSearch: { //高级搜索,与ligerGrid本身的高级搜索互不相干
id: 'seniorSearchButton', //高级搜索插件id
searchSight: { //场景配置
id: 'search_sight', //场景配置插件id
listUrl: rootPath + '/fund_search_sight/listJson.shtml', //场景列表url
submitUrl: rootPath + '/fund_search_sight/addEntity.shtml', //场景提交url
clazz: 'FundCrashApply', //场景要存储的唯一主键
callback: function(form){ //场景配置保存成功回调函数
form.reset();
mainTab.reload(tab_id);
}
}
}
});
$("#pageloading").hide();
$("#search").bind("keydown", function (event) {// 绑定查询按扭
//event.preventDefault();
if(event.keyCode == "13") {
search($(this).val());
}
});
//新增
$("#add").click("click", function () {
add();
});
//修改
$("#edit").click("click", function () {
edit();
});
//删除
$("#delete").click("click", function () {
del();
});
//提交
$("#submit").click("click", function () {
submit();
});
//撤销
$("#revert").click("click", function () {
revert();
});
//审核通过
$("#approval_pass").click("click", function () {
approval_pass();
});
//审核不通过
$("#approval_reject").click("click", function () {
approval_reject();
});
//反审核
$("#anti_audit").click("click", function () {
anti_audit();
});
//生成付款单
$("#create_crash_pay").click("click", function () {
create_crash_pay(this);
});
//导出
$("#export").click(function () {
export_data();
});
});
/**
* 列筛选
* @param column
* @param grid
*/
function columnFilter(column,grid){
}
/**
* 自定义查询
* @param value
*/
function search(value) {
var rules = [];
var custom_rules = {};
if(value){
rules.push({
field: 'apply_money', op: 'equal', value: value, type:'float'
})
}
custom_rules.rules = rules;
listGrid.custom_rules = custom_rules;
listGrid.searchData();
}
/**
* 新增
*/
function add() {
$.ligerDialog.open({
title: '新增提现申请单',
url: rootPath + '/fund_crash_apply/add.shtml',
width: 800,
height: 640,
data: {
grid: listGrid, //将列表对象传给弹窗,
mainData: mainData
}
});
}
/**
* 修改
*/
function edit() {
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待编辑的项");
return;
}
if (selected.length > 1) {
layer.msg("只能选中一项进行编辑");
return;
}
var exist_error = false;
$.each(selected, function(index,item){
if (item.state != 0 && item.state != 3){
exist_error = true;
return;
}
});
if(!exist_error){
$.ligerDialog.open({
title: '编辑',
url: rootPath + '/fund_crash_apply/edit.shtml?id=' + selected[0].id,
width: 800,
height: 640,
data: {
grid: listGrid //将用户列表传给弹窗
}
});
}
else{
layer.msg("单据状态不合法,不能修改");
}
}
/**
* 详情
* @param id
*/
function detail(id){
$.ligerDialog.open({
title: '详情',
url: rootPath + '/fund_crash_apply/detail.shtml?id=' + id,
width: 800,
height: 640,
data: {
grid: listGrid //将用户列表传给弹窗
}
});
}
/**
* 删除
*/
function del() {
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待删除的项");
return;
}
var exist_error = false;
$.each(selected, function(index,item){
if (item.state != 0){
exist_error = true;
return;
}
});
if(!exist_error){
var ids = $.map(selected, function (item) {
return item.id;
});
ids = ids.join(',');
$.ligerDialog.confirm('确定是否删除?', function (yes) {
if (yes == true) {
var url = rootPath + '/fund_crash_apply/delete.shtml?ids=' + ids;
var result = CommnUtil.ajax(url, {}, "json");
if (result == "success") {
listGrid.reloadAll();
layer.msg('删除成功');
} else {
layer.msg('删除失败');
}
}
});
}
else{
layer.msg("存在状态不合法的单据,不能删除");
}
}
/**
* 提交
*/
function submit() {
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待提交的项");
return;
}
var exist_error = false;
$.each(selected, function(index,item){
if (item.state != 0){
exist_error = true;
return;
}
});
if(!exist_error){
var ids = $.map(selected, function (item) {
return item.id;
});
ids = ids.join(',');
$.ligerDialog.confirm('确定是否提交?', function (yes) {
if (yes == true) {
var url = rootPath + '/fund_crash_apply/submit.shtml?ids=' + ids;
var result = CommnUtil.ajax(url, {}, "json");
if (result == "success") {
listGrid.reloadAll();
layer.msg('提交成功');
} else {
layer.msg('提交失败');
}
}
});
}
else{
layer.msg("存在状态不合法的单据,不能提交");
}
}
/**
* 撤销
*/
function revert() {
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待撤销的项");
return;
}
var exist_error = false;
$.each(selected, function(index,item){
if (item.state != 1){
exist_error = true;
return;
}
});
if(!exist_error){
var ids = $.map(selected, function (item) {
return item.id;
});
ids = ids.join(',');
$.ligerDialog.confirm('确定是否撤销?', function (yes) {
if (yes == true) {
var url = rootPath + '/fund_crash_apply/revert.shtml?ids=' + ids;
var result = CommnUtil.ajax(url, {}, "json");
if (result == "success") {
listGrid.reloadAll();
layer.msg('撤销成功');
} else {
layer.msg('撤销失败');
}
}
});
}
else{
layer.msg("存在状态不合法的单据,不能撤销");
}
}
/**
* 审核通过
*/
function approval_pass() {
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待审核的项");
return;
}
var exist_error = false;
$.each(selected, function(index,item){
if (item.state != 1){
exist_error = true;
return;
}
});
if(!exist_error){
showApprovalDialog(selected);
}
else{
layer.msg("存在状态不合法的单据,不能审核");
}
}
function showApprovalDialog(selected){
//初始化赋值
$.each(selected, function(index,item){
item.approval_money = item.apply_money;
});
var content = $('<div id="approval_grid" style="margin: 0; padding: 0"></div>');
var dialog = $.ligerDialog.open({
title: '审核',
width: 1200,
height: 600,
content: content,
buttons:[
{
text:'确定',
cls:'btn btn-success',
onclick:function(){
var exist_error = false;
$.each(approval_grid.records, function(index,item){
if(item.approval_money > item.apply_money){
exist_error = true;
return;
}
});
if(exist_error){
layer.msg('存在审批金额大于提现申请金额的数据,审核失败');
return;
}
var data = [];
$.each(approval_grid.records, function(index,item){
var record = {};
record.id = item.id;
record.approval_money = item.approval_money;
data.push(record);
});
var result = CommnUtil.ajax( rootPath + '/fund_crash_apply/approval_pass.shtml', {
'data' : JSON2.stringify(data)
}, "json");
if (result == "success") {
layer.msg('审核成功');
setTimeout(function(){
dialog.close();
mainTab.reload(tab_id);
},1000);
} else {
layer.msg('审核失败,' + result);
}
}
},
{
text:'取消',
onclick:function(){
dialog.close();
}
}
],
});
var approval_grid = $("#approval_grid").ligerGrid(
{
checkbox: false,
enabledEdit:true,
height: 500,
columns: [
{display: '单据编号', name: 'number', width: 300},
{display: '单据日期', name: 'create_date', width: 180},
{display: '申请组织', name: 'apply_org', width: 180},
{display: '提现申请金额', name: 'apply_money', width: 180,type:'float',},
{display: '审批金额', name: 'approval_money', width: 180,type:'float', editor: { type: 'float', precision: 0.00}}
],
data:{'records': selected}
}
);
}
/**
* 审核不通过
*/
function approval_reject() {
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待审核的项");
return;
}
var exist_error = false;
$.each(selected, function(index,item){
if (item.state != 1){
exist_error = true;
return;
}
});
if(!exist_error){
var ids = $.map(selected, function (item) {
return item.id;
});
ids = ids.join(',');
var content = '<div class="row" style="margin: 40px 0px;">' +
'<div class="form-group col-xs-12">'+
'<label class="control-label col-xs-2">审批意见:</label>'+
'<div class="col-xs-10">'+
'<textarea class="form-control" name="opinion" style="resize: none;height: 100px;" />'+
'</div>'+
'</div>'+
'</div>';
var dialog = $.ligerDialog.open({
title: '审核不通过',
width: 600,
height: 400,
content: content,
buttons:[
{
text:'确定',
cls:'btn btn-success',
onclick:function(){
console.log(dialog);
var opinion = $(dialog.element).find("textarea[name=opinion]").val();
var result = CommnUtil.ajax( rootPath + '/fund_crash_apply/approval_reject.shtml', {
'ids' : ids,
'opinion' : opinion
}, "json");
if (result == "success") {
layer.msg('审核成功');
setTimeout(function(){
dialog.close();
mainTab.reload(tab_id);
},1000);
} else {
layer.msg('审核失败,' + result);
}
}
},
{
text:'取消',
onclick:function(){
dialog.close();
}
}
],
});
}
else{
layer.msg("存在状态不合法的单据,不能审核");
}
}
/**
* 反审核
*/
function anti_audit() {
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待反审核的项");
return;
}
var exist_error = false;
$.each(selected, function(index,item){
if (item.state != 2 || item.payment_state != 0){
exist_error = true;
return;
}
});
if(!exist_error){
var ids = $.map(selected, function (item) {
return item.id;
});
ids = ids.join(',');
var content = '<div class="row" style="margin: 40px 0px;">' +
'<div class="form-group col-xs-12">'+
'<label class="control-label col-xs-2">审批意见:</label>'+
'<div class="col-xs-10">'+
'<textarea class="form-control" name="opinion" style="resize: none;height: 100px;" />'+
'</div>'+
'</div>'+
'</div>';
var dialog = $.ligerDialog.open({
title: '反审核',
width: 600,
height: 400,
content: content,
buttons:[
{
text:'确定',
cls:'btn btn-success',
onclick:function(){
console.log(dialog);
var opinion = $(dialog.element).find("textarea[name=opinion]").val();
var result = CommnUtil.ajax( rootPath + '/fund_crash_apply/anti_audit.shtml', {
'ids' : ids,
'opinion' : opinion
}, "json");
if (result == "success") {
layer.msg('反审核成功');
setTimeout(function(){
dialog.close();
mainTab.reload(tab_id);
},1000);
} else {
layer.msg('反审核失败,' + result);
}
}
},
{
text:'取消',
onclick:function(){
dialog.close();
}
}
],
});
}
else{
layer.msg("存在状态不合法的单据,不能反审核");
}
}
/**
* 生成付款单
*
*/
function create_crash_pay(button){
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中生成付款单的项");
return;
}
var exist_error = false;
$.each(selected, function(index,item){
if (item.state != 2 || item.payment_state != 0){
exist_error = true;
return;
}
});
if(!exist_error){
var ids = $.map(selected, function (item) {
return item.id;
});
jumpToCrashPayAddTab(ids);
}
else{
layer.msg("存在状态不合法的单据,不能生成付款单");
}
}
function jumpToCrashPayAddTab(ids){
var tab_id = "crash_pay_add";
var url = rootPath + '/fund_crash_pay/add.shtml?crash_apply_ids='+ids;
if(mainTab.isTabItemExist(tab_id)){
mainTab.setTabItemSrc(tab_id,url);
mainTab.selectTabItem(tab_id);
mainTab.reload(tab_id);
}
else{
mainTab.addTabItem({
tabid: tab_id,
text: '提现付款单编制',
url: url,
callback: function () {
//添加tab页之后的回调,可以添加自定义方法,来向tab页中添加内容
}
});
}
}
/**
* 导出
*/
function export_data() {
listGrid.exportData('/fund_crash_apply/export.shtml')
}<file_sep>/src/main/resources/sqlFiles/1.0.7-update-quota-report-collect-wx-menu.sql
-- 指标填报报表 --
SELECT @fund_quota_report_collect_id := id FROM ly_resources WHERE resKey = 'quota_report__collect_manage' AND type = 1;
update ly_resources set resUrl='/quota_report_collect/list.shtml' where id=@fund_quota_report_collect_id;
<file_sep>/src/main/resources/sqlFiles/1.0.1-fengpy-insert-fund-fare-method.sql
-- 费用类型管理 0仓储费 1三方冷链费 2水电空调费 3送货服务费 4房租费 5物业费 6工资费 7社保费 8公积金费--
INSERT INTO fund_fare_method ( methodName, sn,state) VALUES ( '仓储费', '0', 1);
INSERT INTO fund_fare_method ( methodName, sn,state) VALUES ( '三方冷链费', '1', 1);
INSERT INTO fund_fare_method ( methodName, sn,state) VALUES ( '水电空调费', '2', 1);
INSERT INTO fund_fare_method ( methodName, sn,state) VALUES ( '送货服务费', '3', 1);
INSERT INTO fund_fare_method ( methodName, sn,state) VALUES ( '房租费', '4', 1);
INSERT INTO fund_fare_method ( methodName, sn,state) VALUES ( '物业费', '5', 1);
INSERT INTO fund_fare_method ( methodName, sn,state) VALUES ( '工资费', '6', 1);
INSERT INTO fund_fare_method ( methodName, sn,state) VALUES ( '社保费', '7', 1);
INSERT INTO fund_fare_method ( methodName, sn,state) VALUES ( '公积金费', '8', 1);
<file_sep>/src/main/webapp/js/system/role/edit.js
//获取此窗口对象以及父节点表格
var thisDialog = frameElement.dialog; //当前窗口
var targetGrid = thisDialog.get('data').grid; //用户列表
//单独验证某一个input class="checkRole"
jQuery.validator.addMethod("checkRole", function(value, element) {
return this.optional(element) || ((value.length <= 10) && (value.length>=3));
}, "roleKey由3至10位字符组合构成");
$(function() {
$("form").validate({
submitHandler : function(form) {//必须写在验证前面,否则无法ajax提交
ly.ajaxSubmit(form,{//验证新增是否成功
type : "post",
dataType:"json",
success : function(data) {
if (data=="success") {
targetGrid.reloadAll();
$.ligerDialog.confirm('更新成功!是否关闭窗口?', function(yes) {
if(yes == true){
thisDialog.close();
}
});
} else {
$.ligerDialog.error('添加失败!');
}
}
});
},
errorPlacement : function(error, element) {//自定义提示错误位置
$(".l_err").show();
$(".l_err").html(error.html());
},
success: function(label) {//验证通过后
$(".l_err").hide();
}
});
});
function getRoleTypeSelect(roleType) {
var data = [
{name: '基础角色', id: '1'},
{name: '分级管理员', id: '2'},
]
var h="";
for (var i = 0; i < data.length; i++) {
if (parseInt(roleType, 10) == parseInt(data[i].id, 10)) {
h += "<option value='" + data[i].id + "' selected='selected'>" + data[i].name + "</option>";
}
else {
h += "<option value='" + data[i].id + "'>" + data[i].name + "</option>";
}
}
$("#roleType").html(h);
}
<file_sep>/src/main/webapp/js/system/organization/list.js
var pageii = null;
var grid = null;
$(function() {
grid = $("#gridArea").ligerGrid({
columns: [
{
display : "id",
name : "id",
hide : true
}, {
display : "组织名称",
name : "orgName",
id:"orgName",
align: 'left',
},{
display : "组织编码",
name : "orgCode",
}, {
display : "组织属性",
name : "orgType",
render: function(rowdata,index,data) {
var orgTypeName="";
switch (rowdata.orgType){
case 0:orgTypeName="集团";break;
case 1:orgTypeName="公司";break;
case 2:orgTypeName="职能部门";break;
case 3:orgTypeName="业务部门";break;
case 4:orgTypeName="岗位";break;
default:orgTypeName="其它";break;
}
return orgTypeName;
}
}, {
display : "状态",
name : "locked",
minWidth : '90px',
render: function(rowdata,index,data){
return data == 1 ? '禁用' : '启用';
}
},{
display : "创建时间",
name : "createTime",
render: function(rowdata,index,data) {
return new Date(data).format("yyyy-MM-dd hh:mm:ss");
}
},
{ display: '描述', name: 'description', minWidth: 200}
],
url: rootPath + '/organization/treelists.shtml',
usePager : false,
tree: {
columnId: 'orgName'
},
autoCheckChildren:false
});
$("#pageloading").hide();
$("#search").click("click", function() {// 绑定查询按扭
search();
});
$("#reset").click("click", function() {// 绑定查询按扭
reset();
});
$("#addFun").click("click", function() {
addAccount();
});
$("#editFun").click("click", function() {
editAccount();
});
$("#delFun").click("click", function() {
delAccount();
});
$("#permissions").click("click", function() {
permissions();
});
$("#allotUserFun").click("click", function() {
allotUser();
});
});
function addAccount() {
$.ligerDialog.open({
title: '新增',
url: rootPath + '/organization/addUI.shtml',
width: 800,
height: 560,
data: {
grid: grid //将用户列表传给弹窗
}
});
}
function editAccount() {
var selected = grid.getSelectedRows();
if(selected.length == 0){
layer.msg("请选中待编辑的项");
return;
}
if (selected.length > 1) {
layer.msg("只能选中一项进行编辑");
return;
}
$.ligerDialog.open({
title: '编辑',
url: rootPath + '/organization/editUI.shtml?id=' + selected[0].id,
width: 800,
height: 420,
data: {
grid: grid //将用户列表传给弹窗
}
});
}
function delAccount() {
var selected = grid.getSelectedRows();
if(selected.length == 0){
layer.msg("请选中待删除的项");
return;
}
var ids = $.map(selected,function(item){return item.id;});
$.ligerDialog.confirm('确定是否删除?', function(yes) {
if(yes == true) {
var url = rootPath + '/organization/deleteEntity.shtml?ids=' + ids.join(',');
var result = CommnUtil.ajax(url, {}, "json");
if (result == "success") {
grid.reloadAll();
layer.msg('删除成功');
} else {
layer.msg('删除失败');
}
}
});
}
function permissions() {
var selected = grid.getSelectedRows();
if(selected.length == 0){
layer.msg("请选中待分配角色的项");
return;
}
if (selected.length > 1) {
layer.msg("只能选中一项进行分配角色");
return;
}
pageii = layer.open({
title : "分配角色",
type : 2,
area : [ "600px", "80%" ],
content : rootPath + '/organization/permissionsUI.shtml?id=' + selected[0].id,
});
}
function allotUser() {
var selected = grid.getSelectedRows();
if(selected.length == 0){
layer.msg("请选中待分配用户的项");
return;
}
if (selected.length > 1) {
layer.msg("只能选中一项进行分配用户");
return;
}
pageii = layer.open({
title : "分配用户",
type : 2,
area : [ "600px", "80%" ],
content : rootPath + '/organization/allotUserUI.shtml?id=' + selected[0].id,
});
}
<file_sep>/src/main/webapp/js/fund/cost_deducted/add.js
//获取此窗口对象以及父节点表格
var thisDialog = frameElement.dialog; //当前窗口
var targetGrid = thisDialog.get('data').grid; //用户列表
var form_validator;
$(function() {
$("button[type='reset']").click(function(event){
thisDialog.close();
});
//适用部门
$("#orgName").ligerComboBox(
{
isMultiSelect:true,
width : 200,
data:companyDepts,
initIsTriggerEvent: false,
textField: 'orgName',
valueFieldID:'library_num',
onSelected: function (value)
{
}
}
);
form_validator = $("#form").validate({
errorClass:'error-msg',
submitHandler : function(form) {// 必须写在验证前面,否则无法ajax提交
ly.ajaxSubmit(form, {// 验证新增是否成功
type : "post",
dataType : "json",
success : function(data) {
if (data == "success") {
resetForm();
targetGrid.reloadAll();
$.ligerDialog.confirm('添加成功!是否关闭窗口?', function(yes) {
if(yes == true){
thisDialog.close();
}
});
} else {
$.ligerDialog.error('添加失败!' + data);
}
}
});
},
rules : {
"orgName" : {
required : true,
remote : { // 异步验证是否存在
type : "POST",
url : rootPath + '/fund_cost_deducted/orgAndDepNotExist.shtml',
data : {
orgId : function() {
return $("#company_id").val();
},
deptId : function() {
return $("#library_num").val();
}
}
},
validateNullOrWhiteSpace: true
},
"deductedHand" : {
required : true,
validateNullOrWhiteSpace: true
},
"deductedFill" : {
required : true,
validateNullOrWhiteSpace: true
},
"remark":{
maxlength:255
}
},
messages : {
"orgName" : {
required : "适用部门不能为空",
remote:"所选部门有已存在有费用票抵扣规则",
validateNullOrWhiteSpace: "适用部门不能为空"
},
"deductedHand" : {
required : "应交费用票抵扣规则不能为空",
validateNullOrWhiteSpace: "应交费用票抵扣规则不能为空"
},
"deductedFill" : {
required : "应补现金汇总抵扣规则不能为空",
validateNullOrWhiteSpace: "应补现金汇总抵扣规则不能为空"
},
"remark":{
maxlength:"备注过长"
}
},
errorPlacement : function(error, element) {
var error_container = $("<div><label class='col-xs-4'></label></div>");
error.addClass('col-xs-6');
error_container.append(error);
element.closest('.form-group').append(error_container);
},
success : function(label) {
label.parent().remove();
},
});
});
/**
* 重置新增表单
*/
function resetForm(){
$("#form")[0].reset();
$("#apply_org_id").val('');
}<file_sep>/src/main/resources/sqlFiles/1.0.11-qzl-add-repay-menus.sql
-- 补充借还款管理功能中的按钮 --
SELECT @fund_loan_repay_manage_id := id FROM ly_resources WHERE resKey='loan_repay_manage' AND type=1;
INSERT INTO ly_resources (buttonId, description, icon, ishide, level, name, parentId, resKey, resUrl, type) VALUES (0, '<button type="button" id="export" class="btn btn-primary marR10">导出</button>', null, 0, 60, '导出', @fund_loan_repay_manage_id, 'export', '', '2');
-- 借还款管理的url --
UPDATE ly_resources SET resUrl='/fund_lend_repay/list.shtml' WHERE id = @fund_loan_repay_manage_id;<file_sep>/src/main/resources/sqlFiles/1.0.7-liubb-quota.sql
-- 修改lyorgnation lyuser历史数据 --
update ly_organization set state = 1;
update ly_user SET status = 1;
-- 报表管理 --
INSERT INTO ly_resources (buttonId, description, icon, ishide, level, name, parentId, resKey, resUrl, type) VALUES ( 0, '指标填报管理', 'fa-desktop', 0, 160, '指标填报管理', 0, 'quota', '', '0');
SELECT @fund_quota_id := id FROM ly_resources WHERE resKey = 'quota' AND type = 0;
-- 指标管理 --
INSERT INTO ly_resources (buttonId, description, icon, ishide, level, name, parentId, resKey, resUrl, type) VALUES (0, '指标管理', null, 0, 10, '指标管理', @fund_quota_id, 'quota_manage', '', '1');
-- 指标场景管理 --
INSERT INTO ly_resources (buttonId, description, icon, ishide, level, name, parentId, resKey, resUrl, type) VALUES (0, '指标场景管理', null, 0, 20, '指标场景管理', @fund_quota_id, 'quota_scene_manage', '', '1');
-- 指标填报管理 --
INSERT INTO ly_resources (buttonId, description, icon, ishide, level, name, parentId, resKey, resUrl, type) VALUES (0, '指标填报管理', null, 0, 30, '指标填报管理', @fund_quota_id, 'quota_report_manage', '', '1');
-- 指标数据报表 --
INSERT INTO ly_resources (buttonId, description, icon, ishide, level, name, parentId, resKey, resUrl, type) VALUES (0, '指标数据报表', null, 0, 40, '指标数据报表', @fund_quota_id, 'quota_report__collect_manage', '', '1');<file_sep>/src/main/resources/sqlFiles/1.0.13-liubb-add-default-person-to-admin.sql
-- 给admin分配一个默认人员
select @orgName := orgName,@orgCode := orgCode,@id := id from ly_organization where parentOrgId = 0;
INSERT INTO ly_person (code,name,sex,id_number,organization,organization_code,organization_id,status,creator_name,source,gmt_create) VALUES ('admin','系统管理员',1,'111111',@orgName,@orgCode,@id,1,'系统管理员',1,'2018-01-01 00:00:00');
select @person_id := id from ly_person where code = 'admin';
update ly_user set personId = @person_id where accountName = 'admin';
<file_sep>/src/main/webapp/js/fund/enum/add.js
//获取此窗口对象以及父节点表格
var thisDialog = frameElement.dialog; //当前窗口
var targetGrid = thisDialog.get('data').grid; //用户列表
$(function() {
//初始化把select赋值为空
if( $("#apply_org_id")){
$("#apply_org_id").bind('change',function(event){
$("#apply_org").val($(this).find("option:selected").text());
}).val('');
}
if($("#bank_type_id")){
$("#bank_type_id").bind('change',function(event){
}).val('');
}
var org_url = rootPath + '/fund_enum/findLyOrganizationList.shtml';
var comboBox_org= CommnUtil.ajax(org_url, null,"json");
if (comboBox_org.records.length>0) {
comboBox_org_data = comboBox_org.records;
} else {
layer.msg("没有适合条件的公司!");
return;
}
// var ceshi = [];
// for(var i=0;i<comboBox_org_data.length;i++){
// var object =comboBox_org_data[i];
// object["ischecked"]=true;
// ceshi.push(object);
// }
var manager = $("#companyName").ligerComboBox({
width : 250,
selectBoxWidth: 230,
selectBoxHeight: 300,
valueField: 'id',
textField: 'orgName',
valueFieldID:'companyId',
isShowCheckBox: true,
isMultiSelect: true,
data:comboBox_org_data,
split:','
});
// var selectid= $("#companyName").ligerGetComboBoxManager();
// manager.selectValue([30,50]);
//
// manager.setText("民生药业集团有限公司,天地民生集团本部,河南医药有限公司");
//开户日期
$("#accountOpeningDate").datetimepicker({
language: 'zh-CN',//显示中文
format: 'yyyy-mm-dd ',//显示格式
startView: 'month',
minView: "month",//设置只显示到月份
autoclose: true,//选中自动关闭
todayBtn: true,//显示今日按钮
});
$("button[type='reset']").click(function(event){
thisDialog.close();
});
$("form").validate({
errorClass:'error-msg',
submitHandler : function(form) {// 必须写在验证前面,否则无法ajax提交
ly.ajaxSubmit(form, {// 验证新增是否成功
type : "post",
dataType : "json",
success : function(data) {
if (data == "success") {
resetForm();
targetGrid.reloadAll();
$.ligerDialog.confirm('添加成功!是否关闭窗口?', function(yes) {
if(yes == true){
thisDialog.close();
}
});
} else {
$.ligerDialog.error('添加失败!' + data);
}
}
});
},
rules : {
"companyName" : {
required : true,
validateNullOrWhiteSpace: true,
},
"methodName" : {
required : true,
validateNullOrWhiteSpace: true,
}
},
messages : {
"companyName" : {
required : "请选择适用公司",
validateNullOrWhiteSpace: "适用公司不能为空白字符",
},
"methodName" : {
required : "请输入类型名称",
validateNullOrWhiteSpace: "类型名称不能为空白字符",
}
},
errorPlacement : function(error, element) {
var error_container = $("<div><label class='col-xs-4'></label></div>");
error.addClass('col-xs-8');
error_container.append(error);
element.closest('.form-group').append(error_container);
},
success : function(label) {
label.parent().remove();
},
});
});
/**
* 重置新增表单
*/
function resetForm(){
$("#form")[0].reset();
}<file_sep>/src/main/webapp/js/system/mdm_supplier/list.js
//列表对象
var listGrid;
$(function () {
listGrid = $("#gridArea").ligerGrid({
columns: [
{
display: '名称', name: 'name', width: 300, show: true, render: function (rowData, index, data) {
return '<a href="javascript:showView(' + rowData.id + ');" title="查看明细">' + data + '</a>'
}
},
{
display: '当前版本', name: 'version', width: 60, show: true
},
{display: '编码', name: 'code', width: 150, show: true},
{display: '税号', name: 'dutyParagraph', width: 150, show: true},
{display: '法人代表', name: 'legalPerson', width: 150, show: true},
{display: '冻结状态', name: 'status', width: 100, show: true},
{display: '备注', name: 'remark', minWidth: 150, show: true},
],
url: rootPath + '/mdm_supplier/findByPage.shtml',
sortName: 'id',
sortOrder: 'desc',
});
$("#pageloading").hide();
$("#searchButton").click("click", function (event) {// 绑定查询按扭
event.preventDefault();
search();
});
$("#seniorSearchButton").click("click", function (event) {// 绑定查询按扭
event.preventDefault();
seniorSearch();
});
$("#resetButton").click("click", function (event) {// 绑定查询按扭
event.preventDefault();
reset();
});
//新增
$("#addButton").click("click", function () {
add();
});
//修改
$("#editButton").click("click", function () {
edit();
});
//批量修改
$("#muchEditButton").click("click", function () {
muchEdit();
});
//删除
$("#delButton").click("click", function () {
del();
});
//导入
$("#importButton").click("click", function () {
importList();
});
//导出
$("#exportButton").click("click", function () {
exportList();
});
//冻结
$("#disableButton").click(function () {
statusChange('disable');
});
//解冻
$("#enableButton").click(function () {
statusChange('enable');
});
//下发
$("#dispatchButton").click(function () {
dispatchData();
})
});
function search() {
var searchParams = $("#searchForm").serializeJson();// 初始化传参数
listGrid.search(searchParams);
}
function reset() {
$("#searchForm")[0].reset();
listGrid.filter.reset();
listGrid.clearParm();
listGrid.reloadAll();
}
function add() {
$.ligerDialog.open({
title: '新增',
url: rootPath + '/mdm_supplier/addUI.shtml',
width: 800,
height: 640,
data: {
grid: listGrid //将列表对象传给弹窗
}
});
}
function edit() {
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待编辑的项");
return;
}
if (selected.length > 1) {
layer.msg("只能选中一项进行编辑");
return;
}
$.ligerDialog.open({
title: '编辑',
url: rootPath + '/mdm_supplier/editUI.shtml?id=' + selected[0].id,
width: 800,
height: 640,
data: {
grid: listGrid //将用户列表传给弹窗
}
});
}
function muchEdit() {
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待编辑的项");
return;
}
var ids = $.map(selected, function (item) {
return item.id;
});
ids = ids.join(',');
$.ligerDialog.open({
title: '批量修改',
url: rootPath + '/mdm_supplier/muchEditUI.shtml?ids=' + ids,
width: 400,
height: 200,
data: {
grid: listGrid //将用户列表传给弹窗
}
});
}
function del() {
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待删除的项");
return;
}
var ids = $.map(selected, function (item) {
return item.id;
});
ids = ids.join(',');
$.ligerDialog.confirm('确定是否删除?', function (yes) {
if (yes == true) {
var url = rootPath + '/mdm_supplier/deleteEntity.shtml?ids=' + ids;
var result = CommnUtil.ajax(url, {}, "json");
if (result == "success") {
listGrid.reloadAll();
layer.msg('删除成功');
} else {
layer.msg('删除失败');
}
}
});
}
//导入
function importList() {
$.ligerDialog.open({
width: 600,
height: 400,
title: '导入',
url: rootPath + '/mdm_supplier/importUI.shtml',
data: {
grid: listGrid //将列表传给弹窗
}
});
}
//导出
function exportList() {
listGrid.exportData('/mdm_supplier/export.shtml')
}
//状态变更
function statusChange(status) {
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待操作的项");
return;
}
var ids = $.map(selected, function (item) {
return item.id;
});
ids = ids.join(',');
$.ligerDialog.confirm('是否确定' + (status == 'disable' ? '冻结' : '解冻') + '?', function (yes) {
if (yes == true) {
var url = rootPath + '/mdm_supplier/statusChange.shtml';
var result = CommnUtil.ajax(url, {
ids: ids,
status: status,
}, "json");
if (result == "success") {
listGrid.reloadAll();
layer.msg('操作成功');
} else {
layer.msg(result);
}
}
});
}
//手动下发数据
function dispatchData(){
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待操作的项");
return;
}
var ids = $.map(selected, function (item) {
return item.id;
});
ids = ids.join(',');
$.ligerDialog.confirm('是否确定分发?', function (yes) {
if (yes == true) {
var url = rootPath + '/mdm_supplier/dispatchData.shtml';
var result = CommnUtil.ajax(url, {
ids: ids
}, "json");
if (result == "success") {
listGrid.reloadAll();
layer.msg('操作成功');
} else {
layer.msg(result);
}
}
});
}
//查看明细
function showView(id) {
$.ligerDialog.open({
width: 800,
height: 600,
title: '详情',
url: rootPath + '/mdm_supplier/viewUI.shtml',
urlParms: {id: id}
});
}
/**
* 高级搜索
*/
function seniorSearch() {
if (listGrid.winfilter) {
listGrid.winfilter.show();
return;
}
var filtercontainer = $('<div id="' + listGrid.id + '_filtercontainer"></div>').width(700).height(300).hide();
//fields过滤,以后抽取公共方法供调用
var fields = [];
$(listGrid.columns).each(function () {
if (this.show == true) {
var o = {name: this.name, display: this.display};
var isNumber = this.type == "int" || this.type == "number" || this.type == "float";
var isDate = this.type == "date";
if (isNumber) o.type = "number";
if (isDate) o.type = "date";
if (this.editor) {
o.editor = this.editor;
}
fields.push(o);
}
});
listGrid.filter = filtercontainer.ligerMdmFilter({
fields: fields,
showGroup: false,
atLeastOne: true,
headAlign: 'center',
widths: ['180px', '120px', '180px', '100px'],
texts: ['字段', '运算符', '值', '运算关系'],
});
//filter.addRule($(filter.element.lastChild));
return listGrid.winfilter = $.ligerDialog.open({
title: '自定义查询',
width: 800, height: 400,
target: filtercontainer, isResize: false, top: 50,
buttons: [
{
text: '确定', onclick: function (item, dialog) {
listGrid.search({where: JSON2.stringify(listGrid.filter.getData())});
dialog.hide();
}
},
{
text: '取消', onclick: function (item, dialog) {
dialog.hide();
}
},
{
text: '重置', onclick: function () {
listGrid.filter.reset();
}
}
]
});
}
<file_sep>/src/main/webapp/js/fund/inter_dept_transfer/list.js
var mainTab = frameElement.tab; // 框架的tab对象
var tab_id = frameElement.id; // 框架的ID
var mainData = frameElement.openerData; // 框架的mainData对象
var listGrid; //列表对象
var columns = [
{display: '单据编号', name: 'number', type: "int", width: 200, show: true},
{display: '所属公司', name: 'company.orgName', width: 200, align: 'left', show: true},
{display: '转出部门', name: 'turnOutDept.orgName', width: 200, align: 'left', show: true},
{display: '转移金额', name: 'transferAmount', type: "float", width: 100, show: true},
{display: '转入部门', name: 'transferToDept.orgName', width: 200, align: 'left', show: true},
{display: '转移类型', name: 'transferType', width: 100, show: true,
editor: {type:'combobox',valueField: 'key' ,textField:'text',options:{selectBoxWidth:180}},
data:[{key: 1, text: "部门转到部门"}],
render:function(rowdata, index, data) {
return {1:"部门转到部门"}[data];
}
},
{display: '转移款项类型', name: 'transferAmountType', width: 100, show: true,
editor: {type:'combobox',valueField: 'key' ,textField:'text',options:{selectBoxWidth:180}},
data:[{key: 1, text: "器械"}, {key: 2, text: "特药"}, {key: 3, text: "普药"}],
render:function(rowdata, index, data) {
return {1:"器械", 2:"特药", 3:"普药"}[data];
}
},
{display: '单据状态', name: 'status', width: 100, show: true,
editor: {type:'combobox',valueField: 'key' ,textField:'text',options:{selectBoxWidth:180}},
data:[{key: 0, text:"草稿"}, {key: 1, text:"待确认"}, {key: 2, text:"待审核"}, {key: 3, text:"已驳回"}, {key: 4, text:"审核通过"}, {key: 5, text:"审核不通过"}],
render:function(rowdata, index, data) {
return {0:"草稿", 1:"待确认", 2:"待审核", 3:"已驳回", 4:"审核通过", 5:"审核不通过"}[data];
}
},
{display: '单据日期', name: 'createDate', width: 100, show: true, export_name:'create_date_str',
render: function(rowdata, index, data){
return new Date(data).format("yyyy-MM-dd");
}
}
];
$(function () {
listGrid = $("#gridArea").ligerFilterGrid({
columns: columns,
url: rootPath + '/fund/inter_dept_transfer/findByPage.shtml',
sortName: 'id',
sortOrder: 'desc',
originalColumns: columns , //保留原始表单列
summaryUrl: rootPath + '/common/summary.shtml', //统计url
summaryTable: 'fund_inter_dept_transfer', //统计表名(表名不是java entity)
summaryShowRecordCount : true, //是否显示统计条数
summary: [ //统计字段
{text:'合计转移金额',field:'transfer_amount'}
],
customColumn : { //表单配置
listUrl : rootPath + '/fund_custom_column/list.shtml', //自定义列查询url
submitUrl: rootPath + '/fund_custom_column/submit.shtml', //自定义列提交url
clazz: 'FundInterDeptTransfer', //自定义列要存储的唯一主键
id: 'formManage', //自定义列配置插件id
dialogTitle: '表单配置', //弹窗title
leftId: 'listBox-left', //表单配置左列表id(有默认值,可以不配置)
leftTitle: '隐藏的列', //表单配置左列表title(有默认值,可以不配置)
rightId: 'listBox-right', //表单配置右列表id(有默认值,可以不配置)
rightTitle: '显示的列', //表单配置右列表title(有默认值,可以不配置)
callback : function(){ //自定义列保存成功回调函数
mainTab.reload(tab_id);
}
},
seniorSearch: { //高级搜索,与ligerGrid本身的高级搜索互不相干
id: 'seniorSearchButton', //高级搜索插件id
searchSight: { //场景配置
id: 'search_sight', //场景配置插件id
listUrl: rootPath + '/fund_search_sight/listJson.shtml', //场景列表url
submitUrl: rootPath + '/fund_search_sight/addEntity.shtml', //场景提交url
clazz: 'FundInterDeptTransfer', //场景要存储的唯一主键
callback: function(form){ //场景配置保存成功回调函数
form.reset();
mainTab.reload(tab_id);
}
}
}
});
$("#pageloading").hide();
// 绑定查询按扭
$("#search").bind("keydown", function (event) {
if(event.keyCode == "13") {
search($(this).val());
}
});
// 新增
$("#add").bind("click", function () {
add();
});
// 编辑
$("#edit").bind("click", function () {
edit();
});
// 删除
$("#del").bind("click", function () {
del();
});
// 提交
$("#commit").bind("click", function () {
submit();
});
// 撤消
$("#revoke").bind("click", function () {
revert();
});
// 确认
$("#confirm").bind("click", function () {
confirmSumit();
});
// 驳回
$("#reject").bind("click", function () {
reject();
});
// 审核
$("#approval").bind("click", function () {
approval();
});
// 审核
$("#anti_audit").bind("click", function () {
antiAudit();
});
// 导出
$("#export").bind("click", function () {
exportData();
});
});
/**
* 新增
*/
function add() {
$.ligerDialog.open({
title: '新增',
url: rootPath + '/fund/inter_dept_transfer/add.shtml',
width: 800,
height: 600,
data: {
grid: listGrid,
mainData: mainData
}
});
}
/**
* 编辑
*/
function edit() {
var selectedRows = listGrid.getSelectedRows();
if (selectedRows.length == 0) {
layer.msg("请选中待编辑的项");
return;
}
if (selectedRows.length > 1) {
layer.msg("只能选中一项进行编辑");
return;
}
var exist_error = false;
$.each(selectedRows, function(index, item){
if (item.status != 0 && item.status != 3 && item.status != 5) {
exist_error = true;
return;
}
});
if(!exist_error){
$.ligerDialog.open({
title: '编辑',
url: rootPath + '/fund/inter_dept_transfer/edit.shtml?id=' + selectedRows[0].id,
width: 800,
height: 640,
data: {
grid: listGrid //将用户列表传给弹窗
}
});
}
else{
layer.msg("单据状态不合法,不能修改");
}
}
function opVerify(selectedRows, status) {
var exist_error = false;
$.each(selectedRows, function(index, item){
if (item.status != status){
exist_error = true;
return;
}
});
return exist_error;
}
/**
* 删除
*/
function del() {
var selectedRows = listGrid.getSelectedRows();
if (selectedRows.length == 0) {
layer.msg("请选中待提交的项!");
return;
}
var exist_error = opVerify(selectedRows, 0);
if(!exist_error){
var ids = $.map(selectedRows, function (item) {
return item.id;
});
ids = ids.join(',');
$.ligerDialog.confirm('确定是否删除?', function (yes) {
if (yes == true) {
var url = rootPath + '/fund/inter_dept_transfer/delete.shtml?ids=' + ids;
var result = CommnUtil.ajax(url, {}, "json");
if (result == "success") {
listGrid.reloadAll();
layer.msg('删除成功');
} else {
layer.msg('删除失败');
}
}
});
}
else{
layer.msg("存在状态不合法的单据,不能删除");
}
}
/**
* 提交
*/
function submit() {
var selectedRows = listGrid.getSelectedRows();
if (selectedRows.length == 0) {
layer.msg("请选中待提交的项!");
return;
}
var exist_error = opVerify(selectedRows, 0);
if(!exist_error){
var ids = $.map(selectedRows, function (item) {
return item.id;
});
ids = ids.join(',');
$.ligerDialog.confirm('确定是否提交?', function (yes) {
if (yes == true) {
var url = rootPath + '/fund/inter_dept_transfer/submit.shtml?ids=' + ids;
var result = CommnUtil.ajax(url, {}, "json");
if (result == "success") {
listGrid.reloadAll();
layer.msg('提交成功');
} else {
layer.msg('提交失败');
}
}
});
}
else{
layer.msg("存在状态不合法的单据,不能提交!");
}
}
/**
* 撤销
*/
function revert() {
var selectedRows = listGrid.getSelectedRows();
if (selectedRows.length == 0) {
layer.msg("请选中待撤销的项!");
return;
}
var exist_error = opVerify(selectedRows, 1);
if(!exist_error){
var ids = $.map(selectedRows, function (item) {
return item.id;
});
ids = ids.join(',');
$.ligerDialog.confirm('确定是否撤消?', function (yes) {
if (yes == true) {
var url = rootPath + '/fund/inter_dept_transfer/revert.shtml?ids=' + ids;
var result = CommnUtil.ajax(url, {}, "json");
if (result == "success") {
listGrid.reloadAll();
layer.msg('撤消成功!');
} else {
layer.msg('撤消失败!');
}
}
});
}
else{
layer.msg("存在状态不合法的单据,不能撤消!");
}
}
/**
* 确认
*/
function confirmSumit() {
var selectedRows = listGrid.getSelectedRows();
if (selectedRows.length == 0) {
layer.msg("请选中待确认的项!");
return;
}
var exist_error = opVerify(selectedRows, 1);
if(!exist_error) {
var ids = $.map(selectedRows, function (item) {
return item.id;
});
ids = ids.join(',');
$.ligerDialog.confirm('是否确认?', function (yes) {
if (yes == true) {
var url = rootPath + '/fund/inter_dept_transfer/confirm.shtml?ids=' + ids;
var result = CommnUtil.ajax(url, {}, "json");
if (result == "success") {
listGrid.reloadAll();
layer.msg('确认成功!');
} else {
layer.msg('确认失败!');
}
}
});
}
else{
layer.msg("存在状态不合法的单据,不能确认!");
}
}
/**
* 驳回
*/
function reject() {
var selectedRows = listGrid.getSelectedRows();
if (selectedRows.length == 0) {
layer.msg("请选中待驳回的项!");
return;
}
var exist_error = opVerify(selectedRows, 2);
if(!exist_error){
var ids = $.map(selectedRows, function (item) {
return item.id;
});
ids = ids.join(',');
$.ligerDialog.confirm('确定是否驳回?', function (yes) {
if (yes == true) {
var url = rootPath + '/fund/inter_dept_transfer/reject.shtml?ids=' + ids;
var result = CommnUtil.ajax(url, {}, "json");
if (result == "success") {
listGrid.reloadAll();
layer.msg('驳回成功!');
} else {
layer.msg('驳回失败!');
}
}
});
}
else{
layer.msg("存在状态不合法的单据,不能驳回!");
}
}
// 审核
function approval() {
var selectedRows = listGrid.getSelectedRows();
if (selectedRows.length == 0) {
layer.msg("请选中待审核的项!");
return;
}
var exist_error = opVerify(selectedRows, 2);
if(!exist_error) {
var ids = [];
var amount = 0.00;
$.each(selectedRows, function(index, item) {
amount += item.transferAmount;
ids.push(item.id);
});
ids = ids.join(',');
var content = '<div class="row">' +
'<div class="form-group col-xs-12">' +
'<label class="control-label col-xs-3" style="text-align: right">转款金额:</label>' +
'<div class="col-xs-9">' +
'<input class="form-control" value="' + amount + '" readonly />' +
'</div>' +
'</div>' +
'</div>';
content += '<div class="row">' +
'<div class="form-group col-xs-12">' +
'<label class="control-label col-xs-3" style="text-align: right">审批意见:</label>' +
'<div class="col-xs-9">' +
'<textarea class="form-control" name="opinion" style="resize: none; height: 60px;" />' +
'</div>' +
'</div>' +
'</div>';
var dialog = $.ligerDialog.open({
title: '审核',
width: 480,
height: 360,
content: content,
buttons:[
{
text: '审核通过',
cls: 'btn btn-success',
onclick:function() {
var opinion = $(dialog.element).find("textarea[name=opinion]").val();
var result = CommnUtil.ajax( rootPath + '/fund/inter_dept_transfer/approval.shtml', {
'ids' : ids,
'opinion' : opinion,
'status' : 4
}, "json");
if (result == "success") {
layer.msg('审核成功');
setTimeout(function(){
dialog.close();
mainTab.reload(tab_id);
}, 1000);
} else {
layer.msg('审核失败,' + result);
}
}
},
{
text: '审核不通过',
onclick:function() {
var opinion = $(dialog.element).find("textarea[name=opinion]").val();
var result = CommnUtil.ajax( rootPath + '/fund/inter_dept_transfer/approval.shtml', {
'ids' : ids,
'opinion' : opinion,
'status' : 5
}, "json");
if (result == "success") {
layer.msg('审核成功!');
setTimeout(function(){
dialog.close();
mainTab.reload(tab_id);
}, 1000);
} else {
layer.msg('审核失败,' + result);
}
}
}
,
{
text:'取消',
onclick:function(){
dialog.close();
}
}
]
});
}
else{
layer.msg("存在状态不合法的单据,不能审核");
}
}
/**
* 反审核
* */
function antiAudit() {
var selectedRows = listGrid.getSelectedRows();
if (selectedRows.length == 0) {
layer.msg("请选中待反审核的项!");
return;
}
var exist_error = opVerify(selectedRows, 4);
if(!exist_error){
var ids = $.map(selectedRows, function (item) {
return item.id;
});
ids = ids.join(',');
$.ligerDialog.confirm('确定是否反审核?', function (yes) {
if (yes == true) {
var url = rootPath + '/fund/inter_dept_transfer/antiAudit.shtml?ids=' + ids;
var result = CommnUtil.ajax(url, {}, "json");
if (result == "success") {
listGrid.reloadAll();
layer.msg('反审核成功!');
} else {
layer.msg('反审核失败!');
}
}
});
}
else{
layer.msg("存在状态不合法的单据,不能反审核!");
}
}
/**
* 自定义查询
* @param value
*/
function search(value) {
var rules = [];
var custom_rules = {};
if(value){
rules.push({
field: 'transferAmount', op: 'equal', value: value, type:'float'
})
}
custom_rules.rules = rules;
listGrid.custom_rules = custom_rules;
listGrid.searchData();
}
/**
* 导出
*/
function exportData() {
listGrid.exportData('/fund/inter_dept_transfer/export.shtml')
}<file_sep>/src/main/webapp/js/fund/cost_account/import.js
//获取此窗口对象以及父节点表格
var thisDialog = frameElement.dialog; //当前窗口
var targetGrid = thisDialog.get('data').grid; //用户列表
/*jslint unparam: true, regexp: true */
/*global window, $ */
$(function () {
'use strict';
// Change this to the location of your server-side upload handler:
var url = rootPath + '/common_file_upload/upload.shtml';
var uploadButton = $('<button/>')
.addClass('upload btn btn-primary')
.prop('disabled', true)
.text('处理中...')
.on('click', function () {
var $this = $(this), data = $this.data();
$this.off('click').text('取消').on('click', function () {
$this.remove();
data.abort();
});
//开始上传
data.submit().always(function () {
$this.remove();
var cancle = data.context.find('button.cancel');
if (cancle) {
cancle.prop('disabled', true);
}
});
});
var cancelButton = $('<button/>')
.addClass('cancel close')
.html('×')
.prop('aria-hidden', true)
.on('click', function () {
//取消上传,删除文件
var $this = $(this), data = $this.data().data, index = $this.data().index;
data.abort();
data.files.splice(index);
data.context.empty();
data.context = null;
//修改选择文件按钮的状态
if (data.files && data.files.length >= 1) {
$('#fileupload').prop('disabled', true).parent('span.btn').addClass('disabled');
} else {
$('#fileupload').prop('disabled', false).parent('span.btn').removeClass('disabled');
}
});
$('#fileupload').fileupload({
url: url,
dataType: 'json',
autoUpload: false,
acceptFileTypes: /(\.|\/)(xls)$/i,
maxFileSize: 10485760,//1024*1024*10 = 10M
minFileSize: 1,
messages: {
maxNumberOfFiles: '文件数量超限',
acceptFileTypes: '只允许上传.xls的EXCEL文件',
maxFileSize: '文件大小超过10M',
minFileSize: '文件大小必须大于0'
}
}).on('fileuploadadd', function (e, data) {
//添加上传按钮和删除按钮
data.context = $('<div/>').appendTo('#files');
$.each(data.files, function (index, file) {
var node = $('<p/>').append($('<h3/>').text(file.name).append(cancelButton.clone(true).data({
index: index,
data: data
})));
if (!index) {
node.append('<br>')
.append(uploadButton.clone(true).data(data));
}
node.appendTo(data.context);
});
}).on('fileuploadprocessalways', function (e, data) {
//修改选择文件按钮的状态
if (data.files && data.files.length >= 1) {
$('#fileupload').prop('disabled', true).parent('span.btn').addClass('disabled');
} else {
$('#fileupload').prop('disabled', false).parent('span.btn').removeClass('disabled');
}
//校验完成,显示上传按钮
var index = data.index,
file = data.files[index],
node = $(data.context.children()[index]);
if (file.preview) {
node.prepend('<br>').prepend(file.preview);
}
if (file.error) {
node.append('<br>').append($('<span class="text-danger"/>').text(file.error));
}
if (index + 1 === data.files.length) {
data.context.find('button.upload')
.text('上传')
.prop('disabled', !!data.files.error);
}
}).on('fileuploadprogressall', function (e, data) {
var progress = parseInt(data.loaded / data.total * 100, 10);
$('#progress .progress-bar').css(
'width',
progress + '%'
);
}).on('fileuploaddone', function (e, data) {
$.each(data.result, function (index, file) {
var node = $(data.context.children()[index]);
if (file.success) {
var message = $('<span class="text-success"/>').text('上传成功!');
node.find('button.cancel').remove();
node.append('<br>').append(message);
beginToImport(file);
} else {
var error = $('<span class="text-danger"/>').text(file.message);
node.append('<br>').append(error);
}
});
}).on('fileuploadfail', function (e, data) {
$.each(data.files, function (index) {
var error = $('<span class="text-danger"/>').text('File upload failed.');
$(data.context.children()[index])
.append('<br>')
.append(error);
});
}).prop('disabled', !$.support.fileInput)
.parent().addClass($.support.fileInput ? undefined : 'disabled');
$('#resetButton').click(function (event) {
self.location.reload();
});
$('#cancelButton').click(function (event) {
thisDialog.close();
});
});
//开始导入
function beginToImport(file) {
var costType=$("#costType").val();
addSuccessMessage('正在导入...');
ly.ajax({
type: "post",
dataType: "json",
url: rootPath + '/fund_cost_accounting/importCostAccount.shtml?costType='+costType,
data: file,
success: function (data) {
if (data && data.success == true) {
$.ligerDialog.warn('导入完成!请检查结果!');
addSuccessMessage('导入完成!');
addSuccessMessage('共解析' + data.allCount + '条,成功导入:' + data.successCount + '条,导入失败:' + data.failCount + '条');
if (data.message != "") {
addErrorMessage("出现错误:" + data.message);
}
if (data.failDetails.length > 0) {
addErrorMessage("失败详情:");
$.each(data.failDetails, function (index, data) {
addErrorMessage(data.sheetName+",行号:"+data.rowNumber+",错误:"+data.message);
})
}
targetGrid.reloadAll();
} else {
$.ligerDialog.error('导入失败!', data.message);
addErrorMessage('导入失败!' + data.message);
}
}
});
}
function addSuccessMessage(message) {
$('<span class="text-success"/>').text(message).append('<br/>').appendTo($('#files'));
}
function addErrorMessage(message) {
$('<span class="text-danger"/>').text(message).append('<br/>').appendTo($('#files'));
}<file_sep>/src/main/webapp/ligerui/components/mdmLigerFilter.js
/**
* jQuery ligerUI 1.3.3
*
* http://ligerui.com
*
* Author daomi 2015 [ <EMAIL> ]
*
*/
(function ($)
{
$.fn.ligerMdmFilter = function ()
{
return $.ligerui.run.call(this, "ligerMdmFilter", arguments);
};
$.fn.ligerGetMdmFilterManager = function ()
{
return $.ligerui.run.call(this, "ligerGetMdmFilterManager", arguments);
};
$.ligerDefaults.MdmFilter = $.extend(true,{
showGroup : true,
widths:['180px', '120px', '180px','100px'],
texts:['字段','运算符', '值', '运算关系' ],
headAlign: 'center',
atLeastOne:true,
},$.ligerDefaults.Filter);
//接口方法扩展
$.ligerMethos.MdmFilter = $.ligerMethos.MdmFilter || {};
$.ligerDefaults.MdmFilterString = $.extend(true,{
},$.ligerDefaults.FilterString);
//过滤器组件
$.ligerui.controls.MdmFilter = function (element, options)
{
$.ligerui.controls.MdmFilter.base.constructor.call(this, element, options);
};
$.ligerui.controls.MdmFilter.ligerExtend($.ligerui.controls.Filter, {
__getType: function ()
{
return 'MdmFilter'
},
__idPrev: function ()
{
return 'MdmFilter';
},
_render: function ()
{
var g = this, p = this.options;
g.set(p);
//事件:增加分组
$(g.element).bind("click", function (e)
{
e.preventDefault();
var jthis = $((e.target || e.srcElement));
var cn = jthis.get(0).className;
if (cn.indexOf("addgroup") >= 0)
{
var jtable = jthis.parent().parent().parent().parent();
g.addGroup(jtable);
}
else if (cn.indexOf("deletegroup") >= 0)
{
var jtable = jthis.parent().parent().parent().parent();
g.deleteGroup(jtable);
}
else if (cn.indexOf("addrule") >= 0)
{
var jtable = jthis.parent().parent().parent().parent();
g.addRule(jtable);
}
else if (cn.indexOf("deleterole") >= 0)
{
var rulerow = jthis.parent().parent();
g.deleteRule(rulerow);
}
});
if(p.atLeastOne){
g.addRule($(g.element.lastChild));
}
},
//增加一个条件
//parm [jgroup] 分组的jQuery对象
addRule: function (jgroup)
{
var g = this, p = this.options;
jgroup = jgroup || g.group;
var lastrow = $(">tbody:first > tr:last", jgroup);
var rulerow = $(g._bulidRuleRowHtml());
lastrow.before(rulerow);
if (p.fields.length)
{
//如果第一个字段启用了自定义输入框
g.appendEditor(rulerow, p.fields[0]);
}
//事件:字段列表改变时
$("select.fieldsel", rulerow).bind('change', function ()
{
var jopsel = $(this).parent().next().find("select:first");
var fieldName = $(this).val();
if (!fieldName) return;
var field = g.getField(fieldName);
//字段类型处理
var fieldType = field.type || "string";
var oldFieldtype = rulerow.attr("fieldtype");
if (fieldType != oldFieldtype)
{
jopsel.html(g._bulidOpSelectOptionsHtml(fieldType,field.operator ));
rulerow.attr("fieldtype", fieldType);
}
//当前的编辑器
var editorType = null;
//上一次的编辑器
var oldEditorType = rulerow.attr("editortype");
if (g.enabledEditor(field)) editorType = field.editor.type;
if (oldEditorType)
{
//如果存在旧的输入框
g.removeEditor(rulerow);
}
if (editorType)
{
//如果当前选择的字段定义了输入框
g.appendEditor(rulerow, field);
} else
{
rulerow.removeAttr("editortype").removeAttr("editorid");
$("td.l-filter-value:first", rulerow).html('<input type="text" class="valtxt form-control" style="width:180px;" />');
}
});
//延时加载,调整运算关系列按钮状态
setTimeout(function(){
g.adjustGop();
},100);
return rulerow;
},
//重置
reset: function(){
var g = this;
var p = this.options;
if(p.atLeastOne){
$("table.l-filter-group tbody tr.l-filter-column").not(":first-child").remove();
}
else{
$("table.l-filter-group tbody tr.l-filter-column").remove();
}
g.adjustGop();
},
//根据Rule的数量动态调整Gop的展示
adjustGop: function(){
$("table.l-filter-group tbody tr.l-filter-column td.l-filter-gop select").attr("disabled",false);
$("table.l-filter-group tbody tr.l-filter-column:last td.l-filter-gop select").attr("disabled",true);
},
//删除一个条件
deleteRule: function (rulerow)
{
var p = this.options;
var g = this;
$("select.fieldsel", rulerow).unbind();
this.removeEditor(rulerow);
$(rulerow).remove();
g.adjustGop();
},
//附加一个输入框
appendEditor: function (rulerow, field)
{
var g = this, p = this.options;
if (g.enabledEditor(field))
{
var container = $("td.l-filter-value:first", rulerow).html("");
var editor = p.editors[field.editor.type];
var editorTag = ++g.editorCounter;
var editParm = {
filter: g
};
editParm.field = $.extend(true, {}, field);
editParm.field.name = field.name + "_" + editorTag;
g.editors[editorTag] = editor.create.call(this, container, editParm.field);
rulerow.attr("editortype", field.editor.type).attr("editorid", editorTag);
}
},
//获取分组数据
getData: function (group)
{
var g = this, p = this.options;
group = group || g.group;
var groupData = {};
$("> tbody > tr", group).each(function (i, row)
{
var rowlast = $(row).hasClass("l-filter-rowlast");
var rowgroup = $(row).hasClass("l-filter-rowgroup");
if (rowgroup)
{
var groupTable = $("> td:first > table:first", row);
if (groupTable.length)
{
if (!groupData.groups) groupData.groups = [];
groupData.groups.push(g.getData(groupTable));
}
}
else if (rowlast)
{
groupData.op = $(".groupopsel:first", row).val();
}
else
{
var fieldName = $("select.fieldsel:first", row).val();
var field = g.getField(fieldName);
var op = $(".opsel:first", row).val();
var value = g._getRuleValue(row, field);
var type = $(row).attr("fieldtype") || "string";
var gop = $(".groupopsel:first", row).val();
if (!groupData.rules) groupData.rules = [];
if (value != null)
{
groupData.rules.push({
field: fieldName, op: op, value: value, type: type,gop:gop
});
}
}
});
return groupData;
},
//获取一个分组的html
_bulidGroupTableHtml: function (altering, allowDelete)
{
var g = this, p = this.options;
var tableHtmlArr = [];
tableHtmlArr.push('<table cellpadding="0" cellspacing="10" border="0" class="l-filter-group" style="border-spacing: 10px;border-collapse:separate "> ');
if (altering)
tableHtmlArr.push(' l-filter-group-alt');
tableHtmlArr.push('<thead>');
tableHtmlArr.push('<th style="text-align:'+p.headAlign+';width:'+p.widths[0]+'">'+p.texts[0]+'</th>');
tableHtmlArr.push('<th style="text-align:'+p.headAlign+';width:'+p.widths[1]+'">'+p.texts[1]+'</th>');
tableHtmlArr.push('<th style="text-align:'+p.headAlign+';width:'+p.widths[2]+'">'+p.texts[2]+'</th>');
tableHtmlArr.push('<th style="text-align:'+p.headAlign+';width:'+p.widths[3]+'">'+p.texts[3]+'</th>');
tableHtmlArr.push('</thead>');
if (p.atLeastOne){
tableHtmlArr.push('<tbody class="at-least-one">');
}
else{
tableHtmlArr.push('<tbody>');
}
tableHtmlArr.push('<tr class="l-filter-rowlast"><td class="l-filter-rowlastcell" align="left" colSpan="4">');
//and or
/* tableHtmlArr.push('<select class="groupopsel">');
tableHtmlArr.push('<option value="and">' + p.strings['and'] + '</option>');
tableHtmlArr.push('<option value="or">' + p.strings['or'] + '</option>');
tableHtmlArr.push('</select>');*/
//add group
if(p.showGroup){
tableHtmlArr.push('<input type="button" value="' + p.strings['addgroup'] + '" class="addgroup">');
}
//add rule
tableHtmlArr.push('<div class="l-icon-add addrule" style="width:16px ;height: 16px;cursor: pointer;"></div>');
if(p.showGroup && allowDelete){
tableHtmlArr.push('<input type="button" value="' + p.strings['deletegroup'] + '" class="deletegroup">');
}
tableHtmlArr.push('</td></tr>');
tableHtmlArr.push('</tbody></table>');
return tableHtmlArr.join('');
},
//获取字段值规则的html
_bulidRuleRowHtml: function (fields)
{
var g = this, p = this.options;
fields = fields || p.fields;
var rowHtmlArr = [];
var fieldType = fields && fields.length && fields[0].type ? fields[0].type : "string";
rowHtmlArr.push('<tr fieldtype="' + fieldType + '" class="l-filter-column"><td class="l-filter-column">');
rowHtmlArr.push('<select class="fieldsel form-control" style="width:180px;">');
for (var i = 0, l = fields.length; i < l; i++)
{
var field = fields[i];
rowHtmlArr.push('<option value="' + field.name + '"');
if (i == 0) rowHtmlArr.push(" selected ");
rowHtmlArr.push('>');
rowHtmlArr.push(field.display);
rowHtmlArr.push('</option>');
}
rowHtmlArr.push("</select>");
rowHtmlArr.push('</td>');
rowHtmlArr.push('<td class="l-filter-op">');
rowHtmlArr.push('<select class="opsel form-control" style="width:120px;"> ');
rowHtmlArr.push(g._bulidOpSelectOptionsHtml(fieldType, fields && fields.length ? fields[0].operator : null));
rowHtmlArr.push('</select>');
rowHtmlArr.push('</td>');
rowHtmlArr.push('<td class="l-filter-value">');
rowHtmlArr.push('<input type="text" class="valtxt form-control" style="width:180px;" />');
rowHtmlArr.push('</td>');
rowHtmlArr.push('<td class="l-filter-gop">');
rowHtmlArr.push('<select class="groupopsel form-control" style="width:100px;">');
rowHtmlArr.push('<option value="and">' + p.strings['and'] + '</option>');
rowHtmlArr.push('<option value="or">' + p.strings['or'] + '</option>');
rowHtmlArr.push('</select>');
rowHtmlArr.push('</td>');
rowHtmlArr.push('<td>');
rowHtmlArr.push('<div class="l-icon-cross deleterole"></div>');
rowHtmlArr.push('</td>');
rowHtmlArr.push('</tr>');
return rowHtmlArr.join('');
},
});
})(jQuery);<file_sep>/src/main/java/cn/zllog/services/blog/FundBlogServiceImpl.java
package cn.zllog.services.blog;
import cn.zllog.dao.ICommonDAO;
import cn.zllog.entity.BLogEntity;
import cn.zllog.entity.BbsEntity;
import cn.zllog.entity.StatiEntity;
import cn.zllog.plugin.PageView;
import cn.zllog.services.IFundBlogService;
import org.springframework.stereotype.Service;
import javax.inject.Inject;
import javax.servlet.http.HttpServletRequest;
import java.io.*;
import java.net.HttpURLConnection;
import java.net.URL;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
@Service("FundBlogService")
public class FundBlogServiceImpl implements IFundBlogService {
@Inject
private ICommonDAO commonDAO;
@Override
public void doDown(Integer id) {
BbsEntity bbsEntity = new BbsEntity().findById(id);
bbsEntity.setDown(bbsEntity.getDown()+1);
bbsEntity.update();
}
@Override
public void doUp(Integer id) {
BbsEntity bbsEntity = new BbsEntity().findById(id);
bbsEntity.setTop(bbsEntity.getTop()+1);
bbsEntity.update();
}
@Override
public Integer findCountDay() {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
String date = sdf.format(new Date());
String hql = "from StatiEntity stati";
List<StatiEntity> list = commonDAO.findByHql(hql);
Integer count = 0;
for(StatiEntity stati : list){
String sdate = sdf.format(stati.getDate());
if(date.equals(sdate)){
count++;
}
}
return count;
}
@Override
public Integer findCountStati() {
String hql = "from StatiEntity stati";
Integer count = Integer.valueOf(commonDAO.findByHql("select count(1) as count " + hql).get(0).toString());
return count;
}
@Override
public void saveStati(String ip, String name) {
StatiEntity statiEntity = new StatiEntity();
statiEntity.setIp(ip);
statiEntity.setName(name);
statiEntity.save();
}
@Override
public String getIp(HttpServletRequest request) {
String ip = request.getHeader("x-forwarded-for");
if (ip == null || ip.length() == 0 || "unknown".equalsIgnoreCase(ip)) {
ip = request.getHeader("Proxy-Client-IP");
}
if (ip == null || ip.length() == 0 || "unknown".equalsIgnoreCase(ip)) {
ip = request.getHeader("WL-Proxy-Client-IP");
}
if (ip == null || ip.length() == 0 || "unknown".equalsIgnoreCase(ip)) {
ip = request.getRemoteAddr();
}
if (ip.equals("0:0:0:0:0:0:0:1")) {
ip = "本地";
}
return ip;
}
@Override
public List<BbsEntity> findBbs() {
String orderby =" order by id desc";
String hql = "from BbsEntity bbs where 1=1" + orderby;
List<BbsEntity> list = commonDAO.findByHql(hql);
return list;
}
@Override
public void add(BbsEntity bbsEntity) {
bbsEntity.setTop((int)(Math.random()*900)+100);
bbsEntity.setDown((int)(Math.random()*250)+50);
bbsEntity.save();
}
@Override
public PageView findByPage(Integer currPage, Integer pageSize, String condition, String sort) {
PageView pageView = new PageView(pageSize, currPage);
String orderBy = " order by " + "top" + " " + sort;
String hql = "from BLogEntity blog where 1=1 " + condition + orderBy;
Integer count = Integer.valueOf(commonDAO.findByHql("select count(1) as count " + hql).get(0).toString());
List<BLogEntity> databaseResults = commonDAO.findPageByHql(currPage,pageSize,hql);
pageView.setQueryResult(count, databaseResults);
return pageView;
}
public String getAddresses(String ip)
{
try {
// 这里调用淘宝API
String urlStr = "http://ip.taobao.com/service/getIpInfo.php";
// 从http://whois.pconline.com.cn取得IP所在的省市区信息
String returnStr = getResult(urlStr, "ip="+ip, "utf-8");
if (returnStr != null) {
// 处理返回的省市区信息
// System.out.println("(1) unicode转换成中文前的returnStr : " + returnStr);
// returnStr = decodeUnicode(returnStr);
// System.out.println("(2) unicode转换成中文后的returnStr : " + returnStr);
// String[] temp = returnStr.split(",");
// if(temp.length<3){
// return "0";//无效IP,局域网测试
// }
return returnStr;
}
}catch (Exception e){
e.printStackTrace();
}
return null;
}
private static String getResult(String urlStr, String content, String encoding) {
URL url = null;
HttpURLConnection connection = null;
try {
url = new URL(urlStr);
connection = (HttpURLConnection) url.openConnection();// 新建连接实例
connection.setConnectTimeout(2000);// 设置连接超时时间,单位毫秒
connection.setReadTimeout(2000);// 设置读取数据超时时间,单位毫秒
connection.setDoOutput(true);// 是否打开输出流 true|false
connection.setDoInput(true);// 是否打开输入流true|false
connection.setRequestMethod("GET");// 提交方法POST|GET
connection.setUseCaches(false);// 是否缓存true|false
connection.connect();// 打开连接端口
DataOutputStream out = new DataOutputStream(connection
.getOutputStream());// 打开输出流往对端服务器写数据
out.writeBytes(content);// 写数据,也就是提交你的表单 name=xxx&pwd=xxx
out.flush();// 刷新
out.close();// 关闭输出流
BufferedReader reader = new BufferedReader(new InputStreamReader(
connection.getInputStream(), encoding));// 往对端写完数据对端服务器返回数据
// ,以BufferedReader流来读取
StringBuffer buffer = new StringBuffer();
String line = "";
while ((line = reader.readLine()) != null) {
buffer.append(line);
}
reader.close();
return buffer.toString();
} catch (IOException e) {
e.printStackTrace();
} finally {
if (connection != null) {
connection.disconnect();// 关闭连接
}
}
return null;
}
/**
* unicode 转换成 中文
*
* @author fanhui 2007-3-15
* @param theString
* @return*/
public static String decodeUnicode(String theString) {
char aChar;
int len = theString.length();
StringBuffer outBuffer = new StringBuffer(len);
for (int x = 0; x < len;) {
aChar = theString.charAt(x++);
if (aChar == '\\') {
aChar = theString.charAt(x++);
if (aChar == 'u') {
int value = 0;
for (int i = 0; i < 4; i++) {
aChar = theString.charAt(x++);
switch (aChar) {
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
value = (value << 4) + aChar - '0';
break;
case 'a':
case 'b':
case 'c':
case 'd':
case 'e':
case 'f':
value = (value << 4) + 10 + aChar - 'a';
break;
case 'A':
case 'B':
case 'C':
case 'D':
case 'E':
case 'F':
value = (value << 4) + 10 + aChar - 'A';
break;
default:
throw new IllegalArgumentException(
"Malformed encoding.");
}
}
outBuffer.append((char) value);
} else {
if (aChar == 't') {
aChar = '\t';
} else if (aChar == 'r') {
aChar = '\r';
} else if (aChar == 'n') {
aChar = '\n';
} else if (aChar == 'f') {
aChar = '\f';
}
outBuffer.append(aChar);
}
} else {
outBuffer.append(aChar);
}
}
return outBuffer.toString();
}
}
<file_sep>/src/main/webapp/js/system/user/list.js
//用户列表对象
var userGrid,
custom_rules = {}; //自定义查询条件
$(function() {
userGrid = $("#gridArea").ligerGrid({
columns: [
{display: 'id', name: 'id', width: 100 } ,
{ display: '关联人员', name: 'lyPerson.name', width: 200 },
{ display: '账号', name: 'accountName', width: 200,},
{ display: '账号状态', name: 'status', width: 100,
render: function(rowdata,index,data){
return {0:'草稿',1:'启用',2:'禁用'}[data];
}
},
{ display: '描述', name: 'description', width: 200,},
{ display: '时间', name: 'createTime', width: 200 ,
render: function(rowdata,index,data) {
return new Date(data).format("yyyy-MM-dd hh:mm:ss");
}
}
],
url: rootPath + '/user/findByPage.shtml',
sortName: 'createTime',
sortOrder: 'desc',
});
$("#status_combo").ligerComboBox({
width : 210,
selectBoxWidth: 200,
selectBoxHeight: 300,
valueFieldID:'status',
data :[
{ id:0,text:'草稿'},{ id:1,text:'启用'},{ id:2,text:'禁用'}
]
});
$("#addFun").click("click", function() {
addUser();
});
$("#editFun").click("click", function() {
editUser();
});
$("#delFun").click("click", function() {
delUser();
});
$("#enable").click("click", function() {
enable();
});
$("#disable").click("click", function() {
disable();
});
$("#search").click("click", function() {
search();
});
$("#pageloading").hide();
});
/**
* 自定义查询
* @param value
*/
function search() {
var rules = [];
var accountName = $("#accountName").val();
var userName = $("#userName").val();
var status = $("#status").val();
if(accountName){
rules.push({
field: 'accountName', op: 'like', value: accountName, type:'string'
})
}
if(userName){
rules.push({
field: 'userName', op: 'like', value: userName, type:'string'
})
}
if(status && status != -1){
rules.push({
field: 'status', op: 'equal', value: status, type:'int'
})
}
custom_rules.rules = rules;
searchData();
}
/**
* 查询数据(将页面所有查询条件汇总)
*/
function searchData(){
var hash ={};
var rules = [];
var groupData = {};
$.each(custom_rules.rules,function(k,v){
if(!hash[v.field+"_"+v.op+"_"+v.value]){
hash[v.field+"_"+v.op+"_"+v.value] = true
rules.push(v);
}
});
groupData.rules = rules;
groupData.op = "and";
userGrid.search({where: JSON2.stringify(groupData)});
}
function addUser() {
$.ligerDialog.open({
title: '新增账号',
url: rootPath + '/user/addUI.shtml',
width: 800,
height: 500,
data: {
grid: userGrid //将用户列表传给弹窗
}
});
}
function editUser() {
var selected = userGrid.getSelectedRows();
if(selected.length == 0){
layer.msg("请选中待编辑的项");
return;
}
if (selected.length > 1) {
layer.msg("只能选中一项进行编辑");
return;
}
if(selected[0].status == 1){
layer.msg("只能修改草稿或者禁用的行");
return;
}
$.ligerDialog.open({
title: '编辑账号',
url: rootPath + '/user/editUI.shtml?id=' + selected[0].id,
width: 800,
height: 500,
data: {
grid: userGrid //将用户列表传给弹窗
}
});
}
function delUser() {
var selected = userGrid.getSelectedRows();
if(selected.length == 0){
layer.msg("请选中待删除的项");
return;
}
var exist_error = false;
$.each(selected, function(index,item){
if (item.state != 0){
exist_error = true;
return;
}
});
if(!exist_error){
var ids = $.map(selected,function(item){return item.id;});
$.ligerDialog.confirm('确定是否删除?', function(yes) {
if(yes == true) {
var url = rootPath + '/user/deleteEntity.shtml?ids=' + ids;
var result = CommnUtil.ajax(url, {}, "json");
if (result == "success") {
userGrid.reloadAll();
layer.msg('删除成功');
} else {
layer.msg('删除失败');
}
}
});
}
else{
layer.msg("存在状态不合法的单据,不能删除");
}
}
/**
* 启用
*/
function enable() {
var selected = userGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待启用的项");
return;
}
var exist_error = false;
$.each(selected, function(index,item){
if (item.status == 1){
exist_error = true;
return;
}
});
if(!exist_error){
var ids = $.map(selected, function (item) {
return item.id;
});
$.ligerDialog.confirm('确定是否启用?', function (yes) {
if (yes == true) {
var url = rootPath + '/user/enable.shtml?ids=' + ids;
var result = CommnUtil.ajax(url, {}, "json");
if (result == "success") {
userGrid.reloadAll();
layer.msg('启用成功');
} else {
layer.msg('启用失败');
}
}
});
}
else{
layer.msg("存在状态不合法的单据,不能启用");
}
}
/**
* 禁用
*/
function disable() {
var selected = userGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待禁用的项");
return;
}
var exist_error = false;
$.each(selected, function(index,item){
if (item.status != 1){
exist_error = true;
return;
}
});
if(!exist_error){
var ids = $.map(selected, function (item) {
return item.id;
});
ids = ids.join(',');
$.ligerDialog.confirm('确定是否禁用?', function (yes) {
if (yes == true) {
var url = rootPath + '/user/disable.shtml?ids=' + ids;
var result = CommnUtil.ajax(url, {}, "json");
if (result == "success") {
userGrid.reloadAll();
layer.msg('禁用成功');
} else {
layer.msg('禁用失败');
}
}
});
}
else{
layer.msg("存在状态不合法的单据,不能禁用");
}
}
<file_sep>/src/main/webapp/js/system/yaodian/specifyPersonList.js
//列表对象
var dataGrid;
//获取此窗口对象
var thisDialog = frameElement.dialog; //当前窗口
//获取上级节点传过来的参数
var medicineIds = thisDialog.get('data').medicineIds;
var userListGrid = thisDialog.get('data').grid;
$(function() {
dataGrid = $("#gridArea").ligerGrid({
columns: [
{display: 'id', name: 'id', width: 100 } ,
{ display: '用户名', name: 'userName', width: 100 },
{ display: '账号', name: 'accountName', width: 100,},
{ display: '所属角色', name: 'roleName', minWidth: 100, },
],
url: rootPath + '/user/findByPage.shtml',
sortName: 'createTime',
sortOrder: 'desc',
isSingleCheck: true,
height:'460'
});
$("#pageloading").hide();
$("#searchButton").click("click", function(event) {// 绑定查询按扭
event.preventDefault();
searchAccount();
});
$("#resetButton").click("click", function(event) {// 绑定查询按扭
event.preventDefault();
resetAccount();
});
$("#specifyPersonOk").click("click", function() {// 绑定查询按扭
specifyPersonOk();
});
$("#specifyPersonCancel").click("click", function() {// 绑定查询按扭
specifyPersonCancel();
});
});
function searchAccount(){
var searchParams = $("#searchForm").serializeJson();// 初始化传参数
dataGrid.search(searchParams);
}
function resetAccount(){
$("#searchForm")[0].reset();
dataGrid.clearParm();
dataGrid.reloadAll();
}
function specifyPersonOk(){
var selected = dataGrid.getSelectedRow();
if(selected == null){
layer.msg("请选中校对人!");
return;
}
$.ligerDialog.confirm('确定是指派?', function(yes) {
if(yes == true) {
var url = rootPath + '/yaodian/specifyPerson.shtml';
var result = CommnUtil.ajax(url, {
medicineIds: medicineIds,
specifyPersonId: selected.id
}, "json");
if (result == "success") {
thisDialog.close();
userListGrid.reloadAll();
layer.msg('指派成功');
} else {
layer.msg('指派失败');
}
}
});
}
function specifyPersonCancel(){
thisDialog.close();
}<file_sep>/src/main/java/cn/zllog/beanEntity/UserRoleRes.java
package cn.zllog.beanEntity;
import javax.persistence.Column;
public class UserRoleRes implements java.io.Serializable {
private Integer userId;
private Integer roleId;
private Integer resId;
// Property accessors
@Column(name = "userId", nullable = false)
public Integer getUserId() {
return this.userId;
}
public void setUserId(Integer userId) {
this.userId = userId;
}
@Column(name = "resId", nullable = false)
public Integer getResId() {
return this.resId;
}
public void setResId(Integer resId) {
this.resId = resId;
}
@Column(name = "roleId", nullable = false)
public Integer getRoleId() {
return this.roleId;
}
public void setRoleId(Integer roleId) {
this.roleId = roleId;
}
}
<file_sep>/src/main/webapp/ligerui/components/ligerExpandComboBox.js
/**
*
* Author fengpy
* 增加添加初始化数据选中checkbox
*
*
*
*/
(function ($)
{
$.fn.ligerExpandComboBox = function (options)
{
return $.ligerui.run.call(this, "ligerExpandComboBox", arguments);
};
$.fn.ligerGetExpandComboBoxManager = function ()
{
return $.ligerui.run.call(this, "ligerGetExpandComboBoxManager", arguments);
};
$.ligerDefaults.ExpandComboBox = $.extend(true, $.ligerDefaults.ComboBox,{
});
//扩展方法
$.ligerMethos.ExpandComboBox = $.ligerMethos.ExpandComboBox || {};
$.ligerui.controls.ExpandComboBox = function (element, options)
{
$.ligerui.controls.ExpandComboBox.base.constructor.call(this, element, options);
};
$.ligerui.controls.ExpandComboBox.ligerExtend($.ligerui.controls.ComboBox, {
_setValue: function (value, text)
{
var g = this, p = this.options;
var isInit = false, isTriggerEvent = true;
if (text == "init")
{
text = null;
isInit = true;
isTriggerEvent = p.initIsTriggerEvent ? true : false;
}
if (p.isTextBoxMode)
{
text = value;
} else
{
text = text || g.findTextByValue(value);
}
if (p.tree)
{
//刷新树的选中状态
setTimeout(function ()
{
if (p.setTextBySource)
{
//刷新树的选中状态并更新文本框
g.selectValueByTree(value);
} else
{
g.treeSelectInit(value);
}
}, 100);
}
else if (!p.isMultiSelect)
{
g._changeValue(value, text, isTriggerEvent);
$("tr[value='" + value + "'] td", g.selectBox).addClass("l-selected");
$("tr[value!='" + value + "'] td", g.selectBox).removeClass("l-selected");
}
else
{
g._changeValue(value, text, isTriggerEvent);
if (value != null)
{
var targetdata = value.toString().split(p.split);
$("table.l-table-checkbox :checkbox", g.selectBox).each(function () { this.checked = false; });
for (var i = 0; i < targetdata.length; i++)
{
$("table.l-table-checkbox tr[value=" + targetdata[i] + "] :checkbox", g.selectBox).each(function () {
this.checked = true;
//增加添加初始化数据选中checkbox
$("table.l-table-checkbox tr[value=" + targetdata[i] + "] a").addClass("l-checkbox-checked");
});
}
}
}
if (p.selectBoxRenderUpdate)
{
p.selectBoxRenderUpdate.call(g, {
selectBox: g.selectBox,
value: value,
text: text
});
}
}
});
$.ligerui.controls.ExpandComboBox.prototype.setValue = $.ligerui.controls.ExpandComboBox.prototype.selectValue;
//设置文本框和隐藏控件的值
$.ligerui.controls.ExpandComboBox.prototype.setInputValue = $.ligerui.controls.ExpandComboBox.prototype._changeValue;
//Key Init
(function ()
{
$(document).unbind('keydown.ligercombobox');
$(document).bind('keydown.ligercombobox',function (event)
{
function down()
{
if (!combobox.selectBox.is(":visible"))
{
combobox.selectBox.show();
}
combobox.downFocus();
}
function toSelect()
{
if (!curGridSelected)
{
combobox._changeValue(value, curTd.attr("text"), true);
combobox.selectBox.hide();
combobox.trigger('textBoxKeyEnter', [{
element: curTd.get(0)
}]);
}
else
{
combobox._changeValue(curGridSelected[combobox_op.valueField], curGridSelected[combobox_op.textField], true);
combobox.selectBox.hide();
combobox.trigger('textBoxKeyEnter', [{
rowdata: curGridSelected
}]);
}
}
var curInput = $("input:focus");
if (curInput.length && curInput.attr("data-comboboxid"))
{
var combobox = liger.get(curInput.attr("data-comboboxid"));
if (!combobox) return;
var combobox_op = combobox.options;
if (!combobox.get("keySupport")) return;
if (event.keyCode == 38) //up
{
combobox.upFocus();
} else if (event.keyCode == 40) //down
{
if (combobox.hasBind('textBoxKeyDown'))
{
combobox.trigger('textBoxKeyDown', [
{
callback: function ()
{
down();
}
}]);
}
else
{
down();
}
}
else if (event.keyCode == 13) //enter
{
if (!combobox.selectBox.is(":visible")) return;
var curGridSelected = null;
if (combobox.grid)
{
curGridSelected = combobox.grid.getSelected();
}
var curTd = combobox.selectBox.table.find("td.l-over");
if (curGridSelected || curTd.length)
{
var value = curTd.attr("value");
if (curGridSelected && curGridSelected.ID) value = curGridSelected.ID;
if (combobox.enabledLoadDetail())
{
combobox.loadDetail(value, function (data)
{
if (!curGridSelected)
{
var index = combobox.getRowIndex(value);
if (index == -1) return;
combobox.data = combobox.data || [];
combobox.data[index] = combobox.selected = data;
}
toSelect();
});
} else
{
toSelect();
}
}
}
}
});
})();
})(jQuery);<file_sep>/src/main/webapp/js/fund/influence/edit.js
//获取此窗口对象以及父节点表格
var thisDialog = frameElement.dialog; //当前窗口
var targetGrid = thisDialog.get('data').grid; //用户列表
$(function() {
//开户日期
$("#accountOpeningDate").datetimepicker({
language: 'zh-CN',//显示中文
format: 'yyyy-mm-dd ',//显示格式
startView: 'month',
minView: "month",//设置只显示到月份
autoclose: true,//选中自动关闭
todayBtn: true,//显示今日按钮
});
$("button[type='reset']").click(function(event){
thisDialog.close();
});
$("form").validate({
errorClass:'error-msg',
submitHandler : function(form) {// 必须写在验证前面,否则无法ajax提交
ly.ajaxSubmit(form, {// 验证新增是否成功
type : "post",
dataType : "json",
success : function(data) {
if (data == "success") {
//刷新列表
targetGrid.reloadAll();
//确认是否关闭
$.ligerDialog.confirm('保存成功!是否关闭窗口?', function(yes) {
if(yes == true){
thisDialog.close();
}
//刷新弹窗
self.location.reload();
});
} else {
$.ligerDialog.error('操作失败! ' + data);
}
}
});
},
rules : {
"bankTypeId" : {
required : true,
validateNullOrWhiteSpace: true,
},
"accountName" : {
required : true,
validateNullOrWhiteSpace: true,
},
"accountNumber" : {
required : true,
validateNullOrWhiteSpace: true,
},
"accountOpeningBank" : {
required : true,
validateNullOrWhiteSpace: true,
},
"bankShortCut" : {
required : true,
validateNullOrWhiteSpace: true,
},
"salesDepartmentId" : {
required : true,
validateNullOrWhiteSpace: true,
}
},
messages : {
"bankTypeId" : {
required : "请选择银行类型",
validateNullOrWhiteSpace: "银行类型不能为空白字符",
},
"accountName" : {
required : "请输入户名",
validateNullOrWhiteSpace: "户名不能为空白字符",
},
"accountNumber" : {
required : "请输入银行账号",
validateNullOrWhiteSpace: "银行账号不能为空白字符",
},
"accountOpeningBank" : {
required : "请输入开户银行",
validateNullOrWhiteSpace: "开户银行不能为空白字符",
},
"bankShortCut" : {
required : "请输入银行简称",
validateNullOrWhiteSpace: "银行简称不能为空白字符",
},
"salesDepartmentId" : {
required : "请选择业务部门",
validateNullOrWhiteSpace: "业务部门不能为空白字符",
}
},
errorPlacement : function(error, element) {
var error_container = $("<div><label class='col-xs-4'></label></div>");
error.addClass('col-xs-8');
error_container.append(error);
element.closest('.form-group').append(error_container);
},
success : function(label) {
label.parent().remove();
},
});
});
<file_sep>/src/main/webapp/js/system/mdm_code_rule/referenceDict.js
/**
* 各对象对应的可引用字段
*/
var referenceDict = {};
//供应商可引用字段
referenceDict["com.crown.entity.MdmSupplier"] = [
{value: 'name', text: '供应商名称'},
{value: 'dutyParagraph', text: '税号'},
{value: 'legalPerson', text: '法人代表'}
];
referenceDict["com.crown.entity.MdmCustomer"] = [
{value: 'name', text: '客户名称'},
{value: 'dutyParagraph', text: '税号'},
{value: 'legalPerson', text: '法人代表'}
];<file_sep>/src/main/webapp/js/fund/voucher_merge_rule/add.js
//获取此窗口对象以及父节点表格
var thisDialog = frameElement.dialog; //当前窗口
var targetGrid = thisDialog.get('data').grid; //用户列表
$(function() {
$("#cbxCompany").ligerComboBox({ isShowCheckBox: true, isMultiSelect: true,
data: org_data, valueFieldID: 'orgIds'
});
var vals = [];
$('[name=summaryWay]:checkbox').click(function() {
if ($(this).prop("checked")) {
vals.push($(this).val());
} else {
vals.pop($(this).val());
}
$("#summaryWays").val(vals.join(";"));
});
$("form").validate({
ignore: [], //对隐藏域也可以验证
errorClass:'error-msg',
submitHandler : function(form) {// 必须写在验证前面,否则无法ajax提交
ly.ajaxSubmit(form, {// 验证新增是否成功
type : "post",
dataType : "json",
success : function(data) {
if (data == "success") {
targetGrid.reloadAll();
$.ligerDialog.confirm('添加成功!是否关闭窗口?', function(yes) {
if(yes == true){
thisDialog.close();
}
});
} else {
$.ligerDialog.error('添加失败!' + data);
}
}
});
},
rules : {
"orgIds" : {
required : true
},
"summaryWays" : {
required : true
}
},
messages : {
"orgIds" : {
required : "请选择所属公司"
},
"summaryWays" : {
required : "请选择汇总方式"
}
},
success : function(label) {
},
});
$("button[type='reset']").click(function(event){
thisDialog.close();
});
});<file_sep>/src/main/webapp/js/system/mdm_code_rule/list.js
//列表对象
var listGrid;
$(function () {
listGrid = $("#gridArea").ligerGrid({
columns: [
{
display: '编码对象', name: 'forObject', width: 200, show: true,
render: function (rowData, index, data) {
return '<a href="javascript:showView(' + rowData.id + ');" title="查看明细">' + data + '</a>'
}
},
{
display: '规则代码', name: 'code', width: 150, show: true
},
{display: '状态', name: 'status', width: 100, show: true},
{display: '汇总', name: 'detailSummary', minWidth: 150, show: true}
],
url: rootPath + '/mdm_code_rule/findByPage.shtml',
sortName: 'id',
sortOrder: 'desc',
fixedCellHeight: true,
});
$("#pageloading").hide();
$("#searchButton").click("click", function (event) {// 绑定查询按扭
event.preventDefault();
search();
});
$("#addRule").click("click", function () {
add();
});
$("#editRule").click("click", function () {
edit();
});
$("#deleteRule").click(function () {
deleteRule();
});
$("#enableRule").click(function () {
ruleStatusChange('enable');
});
$("#disableRule").click(function () {
ruleStatusChange('disable');
});
});
function search() {
var searchParams = $("#searchForm").serializeJson();// 初始化传参数
listGrid.search(searchParams);
}
function reset() {
$("#searchForm")[0].reset();
listGrid.filter.reset();
listGrid.clearParm();
listGrid.reloadAll();
}
function add() {
$.ligerDialog.open({
title: '新增',
url: rootPath + '/mdm_code_rule/addUI.shtml',
width: 1000,
height: 640,
data: {
grid: listGrid //将列表对象传给弹窗
}
});
}
function edit() {
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待编辑的项");
return;
}
if (selected.length > 1) {
layer.msg("只能选中一项进行编辑");
return;
}
$.ligerDialog.open({
title: '编辑',
url: rootPath + '/mdm_code_rule/editUI.shtml?id=' + selected[0].id,
width: 800,
height: 640,
data: {
grid: listGrid //将用户列表传给弹窗
}
});
}
function deleteRule() {
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待删除的项");
return;
}
var ids = $.map(selected, function (item) {
return item.id;
});
ids = ids.join(',');
$.ligerDialog.confirm('确定是否删除?', function (yes) {
if (yes == true) {
var url = rootPath + '/mdm_code_rule/deleteEntity.shtml';
var result = CommnUtil.ajax(url, {
"ids": ids
}, "json");
if (result == "success") {
listGrid.reloadAll();
layer.msg('删除成功');
} else {
layer.msg('删除失败,' + result);
}
}
});
}
function ruleStatusChange(status){
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待操作的项");
return;
}
var ids = $.map(selected, function (item) {
return item.id;
});
ids = ids.join(',');
$.ligerDialog.confirm('是否确定' + (status == 'disable' ? '禁用' : '启用') + '?', function (yes) {
if (yes == true) {
var url = rootPath + '/mdm_code_rule/statusChange.shtml';
var result = CommnUtil.ajax(url, {
ids: ids,
status: status,
}, "json");
if (result == "success") {
listGrid.reloadAll();
layer.msg('操作成功');
} else {
layer.msg(result);
}
}
});
}
//查看明细
function showView(id) {
$.ligerDialog.open({
width: 800,
height: 600,
title: '详情',
url: rootPath + '/mdm_code_rule/viewUI.shtml',
urlParms: {id: id},
data: {
grid: listGrid //将用户列表传给弹窗
}
});
}
<file_sep>/src/main/webapp/js/system/button/list.js
var pageii = null;
var roleGrid = null;
$(function() {
roleGrid = $("#gridArea").ligerGrid({
columns: [
{display: 'id', name: 'id', width: 100 } ,
{ display: '按钮名称', name: 'buttonName', width: 200 },
{ display: '关键字', name: 'buttonKey', width: 200 },
{ display: '预览', name: 'buttonHtml', width: 200 },
{ display: '描述', name: 'description', width: 200,},
],
url: rootPath + '/button/findPage.shtml',
sortName: 'id',
sortOrder: 'desc',
});
$("#pageloading").hide();
$("#search").click("click", function() {// 绑定查询按扭
searchRole();
});
$("#reset").click("click", function() {// 绑定查询按扭
resetRole();
});
$("#addFun").click("click", function() {
addRole();
});
$("#editFun").click("click", function() {
editRole();
});
$("#delFun").click("click", function() {
delRole();
});
});
function searchRole(){
var searchParams = $("#searchForm").serializeJson();// 初始化传参数
roleGrid.search(searchParams);
}
function resetRole(){
$("#searchForm")[0].reset();
roleGrid.clearParm();
roleGrid.reloadAll();
}
function addRole() {
$.ligerDialog.open({
title: '新增',
url: rootPath + '/button/addUI.shtml',
width: 800,
height: 430,
data: {
grid: roleGrid //将列表传给弹窗
}
});
}
function editRole() {
var selected = roleGrid.getSelectedRows();
if(selected.length == 0){
layer.msg("请选中待编辑的项");
return;
}
if (selected.length > 1) {
layer.msg("只能选中一项进行编辑");
return;
}
$.ligerDialog.open({
title: '编辑',
url: rootPath + '/button/editUI.shtml?id=' + selected[0].id,
width: 800,
height: 430,
data: {
grid: roleGrid //将列表传给弹窗
}
});
}
function delRole() {
var selected = roleGrid.getSelectedRows();
if(selected.length == 0){
layer.msg("请选中待删除的项");
return;
}
var ids = $.map(selected,function(item){return item.id;});
$.ligerDialog.confirm('确定是否删除?', function(yes) {
if(yes == true) {
var url = rootPath + '/button/deleteEntity.shtml?ids=' + ids.join(',');
var result = CommnUtil.ajax(url, {}, "json");
if (result == "success") {
roleGrid.reloadAll();
layer.msg('删除成功');
} else {
layer.msg('删除失败');
}
}
});
}
<file_sep>/src/main/webapp/js/system/mdm_supplier/add.js
//获取此窗口对象以及父节点表格
var thisDialog = frameElement.dialog; //当前窗口
var targetGrid = thisDialog.get('data').grid; //用户列表
$(function() {
$("button[type='reset']").click(function(event){
thisDialog.close();
});
$("form").validate({
errorClass:'error-msg',
submitHandler : function(form) {// 必须写在验证前面,否则无法ajax提交
ly.ajaxSubmit(form, {// 验证新增是否成功
type : "post",
dataType : "json",
success : function(data) {
if (data == "success") {
resetForm();
targetGrid.reloadAll();
$.ligerDialog.confirm('添加成功!是否关闭窗口?', function(yes) {
if(yes == true){
thisDialog.close();
}
});
} else {
$.ligerDialog.error('添加失败!' + data);
}
}
});
},
rules : {
"name" : {
required : true,
remote : { // 异步验证是否存在
type : "POST",
url : rootPath + '/mdm_supplier/nameNotExist.shtml',
data : {
name : function() {
return $("#name").val();
}
}
},
validateNullOrWhiteSpace: true,
maxlength: 255,
},
"dutyParagraph" : {
required : true,
remote : { // 异步验证是否存在
type : "POST",
url : rootPath + '/mdm_supplier/dutyParagraphNotExist.shtml',
data : {
dutyParagraph : function() {
return $("#dutyParagraph").val();
}
}
},
validateNullOrWhiteSpace: true,
maxlength: 255,
},
"legalPerson" : {
required : true,
validateNullOrWhiteSpace: true,
maxlength: 255,
},
"remark" : {
maxlength: 255,
}
},
messages : {
"name" : {
required : "请输入名称",
remote : "该名称已经存在",
validateNullOrWhiteSpace: "名称不能为空白字符",
maxlength: "名称过长",
},
"dutyParagraph" : {
required : "请输入税号",
remote : "该税号已经存在",
validateNullOrWhiteSpace: "税号不能为空白字符",
maxlength: "税号过长",
},
"legalPerson" : {
required : "请输入法人代表",
validateNullOrWhiteSpace: "法人代表不能为空白字符",
maxlength: "法人代表过长",
},
"remark" : {
maxlength: "备注过长",
}
},
errorPlacement : function(error, element) {
error.addClass('col-sm-4');
element.closest('.form-group').append(error);
},
success : function(label) {
label.remove();
},
});
});
/**
* 重置新增表单
*/
function resetForm(){
$("#form")[0].reset();
}<file_sep>/src/main/webapp/js/system/monitor/list.js
//列表对象
var dataGrid;
$(function() {
dataGrid = $("#gridArea").ligerGrid({
columns: [
{display: 'id', name: 'id', width: 50 } ,
{ display: 'cpu使用率', name: 'cpuUsage', width: 100 },
{ display: '预设cpu使用率', name: 'setCpuUsage', width: 100,},
{ display: 'Jvm使用率', name: 'jvmUsage', width: 100, },
{ display: '预设Jvm使用率', name: 'setJvmUsage', width: 100, },
{ display: 'Ram使用率', name: 'ramUsage', width: 100, },
{ display: '预设Ram使用率', name: 'setRamUsage', width: 100, },
{ display: '发送的邮件', name: 'email', width: 200, },
{ display: '发送的时间', name: 'operTime', width: 200 ,
render: function(rowdata,index,data) {
return new Date(data).format("yyyy-MM-dd hh:mm:ss");
}
},
{ display: '备注', name: 'mark', minWidth: 300, },
],
url: rootPath + '/monitor/findByPage.shtml',
sortName: 'id',
sortOrder: 'desc',
fixedCellHeight: false,
});
$("#pageloading").hide();
$("#search").click("click", function() {// 绑定查询按扭
search();
});
$("#reset").click("click", function() {// 绑定查询按扭
reset();
});
});
function search(){
var searchParams = $("#searchForm").serializeJson();// 初始化传参数
dataGrid.search(searchParams);
}
function reset(){
$("#searchForm")[0].reset();
dataGrid.clearParm();
dataGrid.reloadAll();
}<file_sep>/src/main/webapp/js/fund/account_period/add.js
//获取此窗口对象以及父节点表格
var thisDialog = frameElement.dialog; //当前窗口
var targetGrid = thisDialog.get('data').grid; //用户列表
$(function() {
$("#txtYear").ligerComboBox({
data: year_data,
valueFieldID: 'hidYear'
}).setValue(currentYear);
$("button[type='reset']").click(function(event){
thisDialog.close();
});
$("form").validate({
ignore: [], //对隐藏域也可以验证
errorClass:'error-msg',
submitHandler : function(form) {// 必须写在验证前面,否则无法ajax提交
ly.ajaxSubmit(form, {// 验证新增是否成功
type : "post",
dataType : "json",
success : function(data) {
if (data == "success") {
targetGrid.reloadAll();
$.ligerDialog.confirm('添加成功!是否关闭窗口?', function(yes) {
if(yes == true){
thisDialog.close();
}
});
} else {
$.ligerDialog.error('添加失败!' + data);
}
}
});
},
rules : {
"hidYear" : {
required : true,
remote : { // 异步验证是否存在
type : "POST",
url : 'isExist.shtml',
data : {
year : function() {
return $("#hidYear").val();
}
}
}
}
},
messages : {
"hidYear": {
required: "请选择年份",
remote : "该年份账期已经存在"
}
},
success : function(label) {
},
});
});<file_sep>/src/main/webapp/js/fund/borrow_money/audit.js
//获取此窗口对象以及父节点表格
var thisDialog = frameElement.dialog; //当前窗口
var targetGrid = thisDialog.get('data').grid; //用户列表
var form_validator;
$(function() {
$("button[type='reset']").click(function(event){
thisDialog.close();
});
form_validator = $("#form").validate({
errorClass:'error-msg',
submitHandler : function(form) {// 必须写在验证前面,否则无法ajax提交
ly.ajaxSubmit(form, {// 验证新增是否成功
type : "post",
dataType : "json",
success : function(data) {
if (data == "success") {
resetForm();
targetGrid.reloadAll();
/*$.ligerDialog.alert('审核成功','提示',function(){
thisDialog.close();
});*/
$("button[type='submit']").attr("disabled",true);
layer.msg('审核成功');
setTimeout(function(){
$("button[type='submit']").attr("disabled",false);
thisDialog.close();
/*mainTab.reload(tab_id);*/
},2000);
/*ligerDialog.tip({title: '提示', content:'审核成功'})*/
/*$.ligerDialog.message('审核成功');
*/
} else {
$.ligerDialog.error('添加失败!' + data);
}
}
});
},
rules : {
"refund_date" : {
required : true,
validateNullOrWhiteSpace: true,
},
"borrow_money" : {
required : true,
number : true,
validateNullOrWhiteSpace: true,
},
"borrow_interest" : {
required : true,
number : true,
validateNullOrWhiteSpace: true,
},
"account_opening_bank" : {
required : true,
validateNullOrWhiteSpace: true,
},
"account_name" : {
required : true,
validateNullOrWhiteSpace: true,
},
"account_number" : {
required : true,
number : true,
validateNullOrWhiteSpace: true,
},
"bank_type_name" : {
required : true,
validateNullOrWhiteSpace: true,
},
"comment" : {
maxlength: 200,
},
"borrow_cause" : {
maxlength: 200,
}
},
messages : {
"refund_date" : {
required : "预计还款日期不能为空"
},
"borrow_money" : {
required : "申请借款金额不能为空"
},
"borrow_interest" : {
required : "借款利率不能为空"
},
"bank_type_name" : {
required : "收款银行不能为空"
},
"account_opening_bank" : {
required : "开户行不能为空"
},
"account_name" : {
required : "收款账户不能为空"
},
"account_number" : {
required : "收款银行账号不能为空"
},
"comment" : {
required: "备注过长",
},
"borrow_cause" : {
required: "借款原因太长",
}
},
errorPlacement : function(error, element) {
var error_container = $("<div><label class='col-xs-4'></label></div>");
error.addClass('col-xs-6');
error_container.append(error);
element.closest('.form-group').append(error_container);
},
success : function(label) {
label.remove();
},
});
});
/**
* 重置新增表单
*/
function resetForm(){
$("#form")[0].reset();
$("#apply_org_id").val('');
}<file_sep>/src/main/webapp/js/system/orguserrole/add.js
//获取此窗口对象以及父节点表格
var thisDialog = frameElement.dialog; //当前窗口
var targetGrid = thisDialog.get('data').grid; //用户
/*var tree_data;
var role_data;*/
/**
* 重置新增表单
*/
function resetForm(){
$("#form")[0].reset();
}
$(function() {
/*
//加载组织树
var url = rootPath + '/organization/orgTrees.shtml';
var data = CommnUtil.ajax(url, null,"json");
if (data.result>0) {
tree_data = data.list;
} else {
layer.msg("获取组织信息错误,请联系管理员!");
}
//加载角色列表
var url = rootPath + '/role/getAll.shtml';
var data = CommnUtil.ajax(url, null, "json");
if (data.length > 0) {
role_data = data;
} else {
layer.msg("获取角色信息错误,请联系管理员!");
}
$("#org").ligerComboBox({
width : '100%',
selectBoxWidth: '600',
selectBoxHeight: 300,
textField: 'orgName',
valueFieldID:'orgId',
treeLeafOnly: false,
tree: {
data : tree_data,
dataParmName: 'list',
ajaxType: 'get',
idFieldName: 'id',
parentIDFieldName: 'parentOrgId',
textFieldName: 'orgName',
isExpand: 2,
checkbox: true,
isLeaf: function(treenodedata){
return treenodedata.children.length > 0 ? false : true;
}
}
});
$("#role").ligerComboBox({
data:role_data,
width : '100%',
selectBoxWidth: '600',
selectBoxHeight: 300,
textField: 'name',
valueFieldID:'roleId',
isMultiSelect:true
});
*/
$("form").validate({
submitHandler : function(form) {// 必须写在验证前面,否则无法ajax提交
ly.ajaxSubmit(form, {// 验证新增是否成功
type : "post",
dataType : "json",
success : function(data) {
if (data.result > 0) {
resetForm();
targetGrid.reloadAll();
$.ligerDialog.confirm('添加成功!是否关闭窗口?', function (yes) {
if (yes == true) {
thisDialog.close();
}
});
} else {
$.ligerDialog.error('添加失败!' + data.depict);
}
}
});
},
errorPlacement : function(error, element) {// 自定义提示错误位置
$(".l_err").show();
$(".l_err").html(error.html());
},
success : function(label) {// 验证通过后
$(".l_err").hide();
}
});
getOrgList("companyId",1,0);
getRoleList(0);
getUserList(0);
});
function getOrgList(divId,orgType,parentId){
var url = rootPath + '/organization/orglists.shtml';
var data = CommnUtil.ajax(url, {orgType: orgType,parentId:parentId}, "json");
if (data.result > 0) {
var list = data.list;
var h = "<option value='-1'>请选择</option>";
for (var i = 0; i < list.length; i++) {
h += "<option value='" + list[i].id + "'>" + list[i].orgName + "</option>";
}
$("#"+divId).html(h);
} else {
layer.msg("获取菜单信息错误,请联系管理员!");
}
}
function getRoleList(roleId){
var default_prams = {'rules':[],'op':'and'};
default_prams.rules.push({
field: 'state', op: 'equal', value: 1, type:'int'
});
var url = rootPath + '/role/findByList.shtml';
var data = CommnUtil.ajax(url, {where: JSON2.stringify(default_prams)}, "json");
if (data.length > 0) {
var list = data;
var h = "<option value='-1'>请选择</option>";
for (var i = 0; i < list.length; i++) {
if (parseInt(roleId, 10) == parseInt(list[i].id, 10)) {
h += "<option value='" + list[i].id + "' selected='selected'>" + list[i].name + "</option>";
}else {
h += "<option value='" + list[i].id + "'>" + list[i].name + "</option>";
}
}
$("#roleId").html(h);
} else {
layer.msg("获取菜单信息错误,请联系管理员!");
}
}
function getUserList(userId){
var default_prams = {'rules':[],'op':'and'};
default_prams.rules.push({
field: 'status', op: 'equal', value: 1, type:'int'
});
var url = rootPath + '/user/findByList.shtml';
var data = CommnUtil.ajax(url, {where: JSON2.stringify(default_prams)}, "json");
if (data.length > 0) {
var list = data;
var h = "<option value='-1'>请选择</option>";
for (var i = 0; i < list.length; i++) {
if (parseInt(userId, 10) == parseInt(list[i].id, 10)) {
h += "<option value='" + list[i].id + "' selected='selected'>" + list[i].accountName + "</option>";
}else {
h += "<option value='" + list[i].id + "'>" + list[i].accountName + "</option>";
}
}
$("#userId").html(h);
} else {
layer.msg("获取菜单信息错误,请联系管理员!");
}
}
function getOrgSelect(orgId)
{
var url = rootPath + '/organization/orglists.shtml';
var data = CommnUtil.ajax(url, null,"json");
if (data.result>0) {
var h = "<option value='0'>------顶级目录------</option>";
for ( var i = 0; i < data.list.length; i++) {
if (parseInt(orgId, 10) == parseInt(data.list[i].id, 10)) {
h += "<option value='" + data.list[i].id + "' selected='selected'>" + data.list[i].orgName + "</option>";
}
else
{
h += "<option value='" + data.list[i].id + "'>" + data.list[i].orgName + "</option>";
}
}
$("#orgId").html(h);
} else {
layer.msg("获取菜单信息错误,请联系管理员!");
}
}
<file_sep>/src/main/resources/sqlFiles/1.0.3-setButtonType.sql
UPDATE ly_button set buttonType=1 WHERE id=1;
UPDATE ly_button set buttonType=2 WHERE id=2;
UPDATE ly_button set buttonType=3 WHERE id=3;
UPDATE ly_button set buttonType=3 WHERE id=4;
UPDATE ly_button set buttonType=3 WHERE id=5;
UPDATE ly_button set buttonType=2 WHERE id=6;
UPDATE ly_button set buttonType=2 WHERE id=7;<file_sep>/src/main/webapp/js/fund/cost_ticket_adjust/detail.js
var log_grid;
var columns = [
{display: '时间', name: 'create_date', width: 180, show: true},
{display: '操作人', name: 'user_name', width: 180, show: true},
{display: '操作内容', name: 'content', width: 100, show: true},
{display: '操作结果', name: 'result', width: 100, show: true},
{display: '审批意见', name: 'opinion', width: 180, show: true},
]
$(function() {
var rules = [];
var groupData = {};
var id = $("input[name=id]").val();
console.log(id)
rules.push({field: 'bill_id', op: 'equal', value: id, type:'int' });
rules.push({field: 'clazz', op: 'equal', value: 'FundCostTicketAdjust', type:'string' });
groupData.rules = rules;
groupData.op = "and";
log_grid = $("#log_grid").ligerGrid({
width: '740',
height: '575',
columns: columns,
sortName: 'create_date',
sortOrder: 'asc',
checkbox: false,
parms : {where: JSON2.stringify(groupData)},
url : rootPath + '/fund_log/findByPage.shtml'
});
$("#b").blur(function(){
aa();
});
$("#relate_cost_ticket_adjust").click(clickCostTicket);
var rules = [];
var groupData = {};
var id = $("input[name=id]").val();
rules.push({field: 'bill_id', op: 'equal', value: id, type:'int' });
rules.push({field: 'clazz', op: 'equal', value: 'FundCostTicketAdjust', type:'string' });
groupData.rules = rules;
groupData.op = "and";
/**
* 点击选择费用票
*/
function clickCostTicket(){
$.ligerDialog.open({
title: '选择费用票明细',
url: rootPath + '/fund_cost_ticket_adjust/extract.shtml',
width: 800,
height: 760
});
}
/**
* 自动计算
*/
function aa(){
var a = $("#a").val();
var b = $("#b").val();
console.log(a)
console.log(b)
$("#c").val(b-a);
}
/**
* 更新主表单信息
*/
function updateMainForm(data) {
$("input[name=cost_ticket_type]").val(data.number);
$("input[name=receipt_type]").val(data.receipt_type);
$("input[name=jiashui_total_monye]").val(data.jiashui_total_monye);
$("input[name=tax_amount_manager]").val(data.tax_amount_manager);
$("input[name=receipt_number_manager]").val(data.receipt_number_manager);
$("input[name=receipt_details_manager]").val(data.receipt_details_manager);
$("input[name=created_time_manager]").val(data.created_time_manager);
$("input[name=sell_name_manager]").val(data.sell_name_manager);
}
});
<file_sep>/src/main/java/cn/zllog/controller/BaseController.java
package cn.zllog.controller;
import cn.zllog.beanEntity.*;
import cn.zllog.dao.ICommonDAO;
import cn.zllog.plugin.PageView;
import cn.zllog.services.*;
import cn.zllog.utils.*;
import org.apache.commons.lang.StringUtils;
import org.apache.shiro.SecurityUtils;
import org.apache.shiro.session.Session;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.web.bind.WebDataBinder;
import org.springframework.web.bind.annotation.InitBinder;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
import javax.inject.Inject;
import javax.servlet.http.HttpServletRequest;
import java.beans.PropertyEditorSupport;
import java.sql.Time;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
/**
*
* @author crown
* Email:<EMAIL>
* date:2014-2-17
*/
public class BaseController {
private static final Logger logger = LoggerFactory.getLogger(BaseController.class);
private final String entityPrefix = "com.crown.entity.";
@Inject
private ICommonDAO commonDAO;
// @InitBinder
// public void initBind(WebDataBinder binder){
// binder.registerCustomEditor(Date.class, new CustomDateEditor());
// binder.registerCustomEditor(Time.class, new CustomTimeEditor());
// }
/* @ExceptionHandler
public String handle(Exception exception, HttpServletResponse response){
System.out.println("哈哈");
return null;
}*/
private class CustomDateEditor extends PropertyEditorSupport {
@Override
public void setAsText(String text) throws IllegalArgumentException {
SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
Date date = null;
try {
date = format.parse(text);
} catch (ParseException e) {
format = new SimpleDateFormat("yyyy-MM-dd");
try {
date = format.parse(text);
} catch (ParseException e1) {
}
}
setValue(date);
}
}
// private class CustomTimeEditor extends PropertyEditorSupport {
// @Override
// public void setAsText(String text) throws IllegalArgumentException {
// Time time = null;
// try {
// Date d = BaseUtil.toDate("2018-01-01 "+text,"yyyy-MM-dd HH:mm:ss");
// time = new Time(d.getTime());
// } catch (Exception e) {
//
// }
// setValue(time);
// }
// }
// public PageView pageView = null;
// public PageView getPageView(String pageNow,String pageSize,String orderby) {
// if (Common.isEmpty(pageNow)) {
// pageView = new PageView(1);
// } else {
// pageView = new PageView(Integer.parseInt(pageNow));
// }
// if (Common.isEmpty(pageSize)) {
// pageSize = "10";
// }
// pageView.setPageSize(Integer.parseInt(pageSize));
// pageView.setOrderby(orderby);
// return pageView;
// }
// public <T> T toFormMap(T t,String pageNow,String pageSize,String orderby){
// @SuppressWarnings("unchecked")
// FormMap<String, Object> formMap = (FormMap<String, Object>) t;
// formMap.put("paging", getPageView(pageNow, pageSize,orderby));
// return t;
// }
//
/**
// * 获取返回某一页面的按扭组
// * @return Class<T>
// * @throws Exception
// */
// public List<Map<String,Object>> findByRes(String id){
// // 通过工具类获取当前登录的bean
// LyUser lyUser = Common.findUserSession();
// // user id
// int userId = lyUser.getId();
// List<Map<String,Object>> rse = resourceService.findUserResourcess(userId,Integer.valueOf(id));
// for (Map<String,Object> resFormMap : rse) {
// Object o =resFormMap.get("description");
// if(o!=null&&!Common.isEmpty(o.toString())){
// resFormMap.put("description",Common.stringtohtml(o.toString()));
// }
// }
// return rse;
// }
/**
* 获取返回某一页面的按扭组
* @return Class<T>
* @throws Exception
*/
// public List<Map<String,Object>> findByRes(){
// // 资源ID
// String id = getPara("resId");
// return findByRes(id);
// }
/**
* 获取页面传递的某一个参数值
*/
public String getPara(String key){
HttpServletRequest request = ((ServletRequestAttributes)RequestContextHolder.getRequestAttributes()).getRequest();
return request.getParameter(key);
}
// /**
// * 获取页面传递的参数值
// */
// public SearchTranslator getSearchTranslator(Class clazz,HttpServletRequest request) throws Exception{
// //数据规则解析方法定义
// FilterTranslator whereTranslator =new FilterTranslator();
// //数据权限
// //根据实体注解获取表名
// String tableName= BaseUtil.getTableName(clazz);
// //查询条件
// SearchTranslator searchTranslator=new SearchTranslator();
// //获取前台提交的参数字符串
// HashMap<String, String> parameterMap = RequestJsonUtils.getRequestParameterMap(request);
// if(!parameterMap.isEmpty()) {
// if(parameterMap.get("pageNow") != null) {
// searchTranslator.setPageNow(Integer.valueOf(parameterMap.get("pageNow")));
// }
// if(parameterMap.get("pageSize") != null) {
// searchTranslator.setPageSize(Integer.valueOf(parameterMap.get("pageSize")));
// }
// if(parameterMap.get("column") != null) {
// searchTranslator.setColumn(parameterMap.get("column"));
// }
// if(parameterMap.get("sort") != null) {
// searchTranslator.setSort(parameterMap.get("sort"));
// }
// if(parameterMap.get("where") != null) {
// searchTranslator.setStrWhere(parameterMap.get("where"));
// }
//
// String strWhere = searchTranslator.getStrWhere();
// LyUser lyUser=Common.findUserSession();
//
// if (StringUtils.isNotEmpty(strWhere)) {
// FilterGroup rule = JacksonUtils.strToBean(strWhere, FilterGroup.class);
// if(lyUser.getIsMaster()) {
// whereTranslator.setGroup(rule);
// }else {
// whereTranslator.setGroup(dataPrivilegeService.MergeRuleGroup(tableName, rule));
// }
// } else {
// if(!lyUser.getIsMaster()) {
// whereTranslator.setGroup(dataPrivilegeService.GetRuleGroup(tableName));
// }
// }
// whereTranslator.Translate(true,tableName,false);
// }
// else
// {
// whereTranslator.setCommandText("");
// }
// searchTranslator.setCondition(whereTranslator.getCommandText());
// return searchTranslator;
// }
/**
* 获取页面传递的某一个数组值
* @return Class<T>
* @throws Exception
*/
public String[] getParaValues(String key){
HttpServletRequest request = ((ServletRequestAttributes)RequestContextHolder.getRequestAttributes()).getRequest();
return request.getParameterValues(key);
}
/*
* @ModelAttribute
* 这个注解作用.每执行controllor前都会先执行这个方法
* @author crown
* Email:<EMAIL>
* date:2015-4-05
* @param request
* @throws Exception
* @throws
*/
/*@ModelAttribute
public void init(HttpServletRequest request){
String path = Common.BACKGROUND_PATH;
Object ep = request.getSession().getAttribute("basePath");
if(ep!=null){
if(!path.endsWith(ep.toString())){
Common.BACKGROUND_PATH = "/WEB-INF/jsp/background"+ep;
}
}
}*/
/**
* 获取传递的所有参数,
* 反射实例化对象,再设置属性值
* 通过泛型回传对象.
* @return Class<T>
* @throws Exception
*/
// public <T> T getFormMap(Class<T> clazz){
// HttpServletRequest request = ((ServletRequestAttributes)RequestContextHolder.getRequestAttributes()).getRequest();
// Enumeration<String> en = request.getParameterNames();
// T t = null;
// try {
// t = clazz.newInstance();
// @SuppressWarnings("unchecked")
// FormMap<String, Object> map = (FormMap<String, Object>) t;
// String order = "",sort="";
// while (en.hasMoreElements()) {
// String nms = en.nextElement().toString();
// if(nms.endsWith("[]")){
// String[] as = request.getParameterValues(nms);
// if(as!=null&&as.length!=0&&as.toString()!="[]"){
// String mname = t.getClass().getSimpleName().toUpperCase();
// if(nms.toUpperCase().startsWith(mname)){
// nms=nms.substring(nms.toUpperCase().indexOf(mname)+1);
// map.put( nms,as);
// }
// }
// }else{
// String as = request.getParameter(nms);
// if(!Common.isEmpty(as)){
// String mname = t.getClass().getSimpleName().toUpperCase();
// if(nms.toUpperCase().startsWith(mname)){
// nms=nms.substring(mname.length()+1);
// map.put( nms, as);
// }
// if(nms.toLowerCase().equals("column"))order = as;
// if(nms.toLowerCase().equals("sort"))sort = as;
// }
// }
// }
// if(!Common.isEmpty(order) && !Common.isEmpty(sort))
// map.put("orderby", " order by " + order + " " + sort);
// } catch (InstantiationException e) {
// logger.error("获取传递的所有参数错误",e);
// } catch (IllegalAccessException e) {
// logger.error("获取传递的所有参数错误",e);
// }
// return t;
// }
/**
* 获取传递的所有参数,
* 再设置属性值
* 通过回传Map对象.
* @return Class<T>
* @throws Exception
*/
// public <T> T findHasHMap(Class<T> clazz){
// HttpServletRequest request = ((ServletRequestAttributes)RequestContextHolder.getRequestAttributes()).getRequest();
// Enumeration<String> en = request.getParameterNames();
// T t = null;
// try {
// t = clazz.newInstance();
// @SuppressWarnings("unchecked")
// FormMap<String, Object> map = (FormMap<String, Object>) t;
// while (en.hasMoreElements()) {
// String nms = en.nextElement().toString();
// if(!"_t".equals(nms)){
// if(nms.endsWith("[]")){
// String[] as = request.getParameterValues(nms);
// if(as!=null&&as.length!=0&&as.toString()!="[]"){
// map.put( nms,as);
// }
// }else{
// String as = request.getParameter(nms);
// if(!Common.isEmpty(as)){
// map.put( nms, as);
// }
// }
// }
// }
// } catch (Exception e) {
// logger.error("获取传递的所有参数错误",e);
// }
// return t;
// }
//
// protected FundCustomColumn getCustomColumns(String clazz){
// FundCustomColumn customColumn = fundCustomColumnService.findBy(clazz, UserUtil.getUserInfo().getId());
// return customColumn;
// }
//
// protected List<FundSearchSight> getSearchSight(String clazz){
// List<FundSearchSight> fundSearchSights = fundSearchSightService.findBy(clazz, UserUtil.getUserInfo().getId());
// return fundSearchSights;
// }
//
// protected LyUser findUser(){
// LyUser lyUser = Common.findUserSession();
// return lyUser;
// }
protected Object findUserLoginCompany(){
Session session = SecurityUtils.getSubject().getSession();
return session.getAttribute("loginCompany");
}
// protected Object findUserDepts(){
// Session session = SecurityUtils.getSubject().getSession();
// LyUser lyUser = (LyUser) session.getAttribute("userSession");
// LyOrganization organization = (LyOrganization) session.getAttribute("loginCompany");
// return userService.getCompanyDeptMap(lyUser.getId()).get(organization.getId());
// }
//
// protected Object findLoginCompanyDepts(){
// Session session = SecurityUtils.getSubject().getSession();
// LyOrganization organization = (LyOrganization) session.getAttribute("loginCompany");
// return organizationService.getCompanySalesDepartments(organization.getId());
// }
//
// protected Table getTable(String entityName){
// try {
// return (Table) tableComponent.getTableByEntityNameMaps().get(entityName);
// } catch (Exception e) {
// return null;
// }
// /*try {
// Class _clazz = Class.forName(entityPrefix+entityName);
// return TableUtil.getTable(commonDAO.getEm().getEntityManagerFactory(),_clazz);
// } catch (ClassNotFoundException e) {
// return null;
// }*/
// }
}<file_sep>/src/main/resources/sqlFiles/1.0.5-add-user-menu.sql
-- 用户管理 --
-- 一级菜单 --
SELECT @system_id := id
FROM ly_resources
WHERE resKey = 'system' AND type = 0;
-- 二级菜单 --
SET @ly_person_id = 0;
INSERT INTO ly_resources (buttonId, description, icon, ishide, level, name, parentId, resKey, resUrl, type) VALUES ( 0, '人员管理', 'fa-desktop', 0, 30, '人员管理', @system_id, 'person', '/person/list.shtml', '1');
SELECT @ly_person_id := id
FROM ly_resources
WHERE resKey = 'person' AND type = 1;
update ly_resources set level = 10 where resKey = 'ly_resources' and type = 1;
update ly_resources set level = 20,description='组织查询',name='组织查询' where resKey = 'organization' and type = 1;
update ly_resources set level = 30 where resKey = 'person' and type = 1;
update ly_resources set level = 40 where resKey = 'user' and type = 1;
update ly_resources set level = 50,description='权限配置',name='权限配置' where resKey = 'orguserrole' and type = 1;
update ly_resources set level = 60 where resKey = 'role' and type = 1;
update ly_resources set level = 70 where resKey = 'button' and type = 1;
update ly_resources set level = 80 where resKey = 'dataprivilege' and type = 1;
-- 列表按钮组 --
-- 列表按钮组(枚举值) --
INSERT INTO ly_resources (buttonId, description, icon, ishide, level, name, parentId, resKey, resUrl, type) VALUES (0, '<button type="button" id="add" class="btn btn-primary marR10">添加人员</button>', null, 0, 10, '添加人员', @ly_person_id, 'add', '/person/add.shtml', '2');
INSERT INTO ly_resources (buttonId, description, icon, ishide, level, name, parentId, resKey, resUrl, type) VALUES (0, '<button type="button" id="edit" class="btn btn-primary marR10">修改</button>', null, 0, 10, '修改', @ly_person_id, 'edit', '/person/edit.shtml', '2');
INSERT INTO ly_resources (buttonId, description, icon, ishide, level, name, parentId, resKey, resUrl, type) VALUES (0, '<button type="button" id="delete" class="btn btn-primary marR10">删除</button>', null, 0, 10, '删除', @ly_person_id, 'delete', '', '2');
INSERT INTO ly_resources (buttonId, description, icon, ishide, level, name, parentId, resKey, resUrl, type) VALUES (0, '<button type="button" id="enable" class="btn btn-primary marR10">启用</button>', null, 0, 10, '启用', @ly_person_id, 'enable', '', '2');
INSERT INTO ly_resources (buttonId, description, icon, ishide, level, name, parentId, resKey, resUrl, type) VALUES (0, '<button type="button" id="disable" class="btn btn-primary marR10">禁用</button>', null, 0, 10, '禁用', @ly_person_id, 'disable', '', '2');
-- 删除菜单管理按钮 --
select @menu_id := id from ly_resources where resKey = 'ly_resources';
delete from ly_resources where parentId = @menu_id;
-- 添加帐号管理按钮 --
select @user_id := id from ly_resources where resKey = 'user';
INSERT INTO ly_resources (buttonId, description, icon, ishide, level, name, parentId, resKey, resUrl, type) VALUES (0, '<button type="button" id="enable" class="btn btn-primary marR10">启用</button>', null, 0, 70, '启用', @user_id, 'enable', '', '2');
INSERT INTO ly_resources (buttonId, description, icon, ishide, level, name, parentId, resKey, resUrl, type) VALUES (0, '<button type="button" id="disable" class="btn btn-primary marR10">禁用</button>', null, 0, 80, '禁用', @user_id, 'disable', '', '2');
<file_sep>/src/main/webapp/js/fund/quota/edit.js
//获取此窗口对象以及父节点表格
var thisDialog = frameElement.dialog; //当前窗口
var targetGrid = thisDialog.get('data').grid; //用户列表
$(function() {
$("#quota").ligerComboBox({
width : '100%',
selectBoxHeight: 300,
data:[
{'id':1,'text':'汇总'},
{'id':2,'text':'填报'}
],
value:$("#quotaType").val(),
valueField: 'id',
textField: 'text',
valueFieldID:'quotaType',
textFieldID : 'quota',
resize:false,
});
$("button[type='reset']").click(function(event){
thisDialog.close();
});
$("form").validate({
errorClass:'error-msg',
submitHandler : function(form) {// 必须写在验证前面,否则无法ajax提交
ly.ajaxSubmit(form, {// 验证新增是否成功
type : "post",
dataType : "json",
success : function(data) {
if (data == "success") {
//刷新列表
targetGrid.reloadAll();
//确认是否关闭
$.ligerDialog.confirm('保存成功!是否关闭窗口?', function(yes) {
if(yes == true){
thisDialog.close();
}
//刷新弹窗
self.location.reload();
});
} else {
$.ligerDialog.error('操作失败! ' + data);
}
}
});
},
rules : {
"name" : {
required : true,
remote : { // 异步验证是否存在
type : "POST",
url : rootPath + '/fund_quota/nameNotExist.shtml',
data : {
name : function() {
return $("#name").val();
},
id : function() {
return $("#id").val();
},
}
},
validateNullOrWhiteSpace: true,
maxlength: 20,
},
"quota" : {
required : true,
validateNullOrWhiteSpace: true,
},
},
messages : {
"name" : {
required : "请输入指标名称",
remote : "指标名称名称已经存在",
validateNullOrWhiteSpace: "名称不能为空白字符",
maxlength: "指标名称过长",
},
"quota" : {
required : "请选择指标属性",
validateNullOrWhiteSpace: "请选择指标属性",
},
},
errorPlacement : function(error, element) {
var error_container = $("<div><label class='col-xs-4'></label></div>");
error.addClass('col-xs-8');
error_container.append(error);
element.closest('.form-group').append(error_container);
},
success : function(label) {
label.parent().remove();
},
});
});
<file_sep>/src/main/webapp/ligerui/jquery-validation/jquery.validate.expand.js
/**
* 添加判断是否为空白字符的校验方法
*/
jQuery.validator.addMethod("validateNullOrWhiteSpace", function (input, element) {
if (typeof input === 'undefined' || input == null)
return false;
return !(input.replace(/\s/g, '').length < 1);
}, "不能为空白字符");
jQuery.validator.addMethod("idCard", function(value, element) {
//身份证号码为15位或者18位,15位时全为数字,18位前17位为数字,最后一位是校验位,可能为数字或字符X
var reg = /(^[1-9]\d{5}(18|19|([23]\d))\d{2}((0[1-9])|(10|11|12))(([0-2][1-9])|10|20|30|31)\d{3}[0-9Xx])|([1−9]\d5\d2((0[1−9])|(10|11|12))(([0−2][1−9])|10|20|30|31)\d2[0−9Xx])|([1−9]\d5\d2((0[1−9])|(10|11|12))(([0−2][1−9])|10|20|30|31)\d2[0−9Xx])/;
return reg.test(value);
}, "身份证信息有误");
jQuery.validator.addMethod("english", function(value, element) {
var reg = new RegExp("^[a-zA-Z]+$");
return !value ? true : reg.test(value);
}, "不是全英文字符");<file_sep>/src/main/resources/sqlFiles/1.0.3-alter-ly-log-methods.sql
ALTER TABLE ly_log MODIFY COLUMN methods LONGTEXT;
ALTER TABLE ly_log MODIFY COLUMN description LONGTEXT;
ALTER TABLE ly_log MODIFY COLUMN module VARCHAR(255);
<file_sep>/src/main/webapp/js/system/dict/form.js
/*
* 该js文件实现“添加”、“编辑”表单的提交,并且对表单项进行验证
*
* @author XWL 2018-4-8
* */
//获取此窗口对象以及父节点表格
var thisDialog = frameElement.dialog; //当前窗口
var targetGrid = thisDialog.get('data').grid;
$(function () {
var dictId = $("#id").val();
var confirmMsg = "添加";
if (dictId != "") {
confirmMsg = "更新";
}
$("form").validate({
submitHandler: function (form) {// 必须写在验证前面,否则无法ajax提交
ly.ajaxSubmit(form, {//验证新增是否成功
type: "post",
dataType: "json",
success: function (data) {
if (data == "success") {
targetGrid.reloadAll();
$.ligerDialog.confirm(confirmMsg + '成功!', function (yes) {
if (yes == true) {
thisDialog.close();
}
});
$("#dictForm")[0].reset();
} else {
layer.alert(confirmMsg + '失败!', 3);
}
}
});
},
rules : {
"name": {
required: true
},
"code": {
required: true
},
"parentId": {
required: true
},
"orderId": {
required: true,
digits: true
}
},
messages : {
"name": {
required: "请输入名称"
},
"code": {
required: "请输入代码"
},
"parentId": {
required: "请选择分类"
},
"orderId": {
required: "请输入排序号",
digits: "排序号必须为整数"
}
},
errorPlacement : function(error, element) {// 自定义提示错误位置
$(".l_err").css('display', 'block');
$(".l_err").html(error.html());
},
success : function(label) {// 验证通过后
$(".l_err").css('display', 'none');
}
});
$("#btnCancel").click("click", function () {
thisDialog.close();
});
});<file_sep>/src/main/webapp/js/fund/crash_pay/detail.js
var log_grid;
var log_columns = [
{display: '时间', name: 'create_date', width: 180, show: true},
{display: '操作人', name: 'user_name', width: 180, show: true},
{display: '操作内容', name: 'content', width: 180, show: true},
{display: '操作结果', name: 'result', width: 180, show: true},
{display: '审批意见', name: 'opinion', width: 180, show: true},
];
var columns = [
{display: '单据编号', name: 'number', width: 300, show: true,},
{display: '单据日期', name: 'create_date', width: 300, show: true},
{display: '单据状态', name: 'state', width: 150, show: true,
editor: {type:'combobox',valueField: 'key' ,textField:'text',options:{selectBoxWidth:180}},
data:[{ key:0,text:'草稿'},{ key:1,text:'待审核'},{ key:2,text:'审核通过'},{ key:3,text:'审核不通过'}],
render:function(rowdata,index,data){
return {0 : '草稿', 1 : '待审核',2:'审核通过',3:'审核不通过'}[data]
}
},
{display: '付款状态', name: 'payment_state', width: 150, show: true,
editor: {type:'combobox',valueField: 'key' ,textField:'text',options:{selectBoxWidth:180}},
data:[{ key:0,text:'待付款'},{ key:1,text:'付款中'},{ key:2,text:'已付款'}],
render:function(rowdata,index,data){
return {0 : '待付款', 1 : '付款中', 2: '已付款'}[data]
}
},
{display: '申请组织', name: 'apply_org', width: 300, show: true},
{display: '部门负责人', name: 'dept_charge_user', width: 300, show: true},
{display: '提现申请金额', name: 'apply_money', width: 300, show: true},
{display: '审批金额', name: 'approval_money', width: 300, show: true},
{display: '实付金额', name: 'fact_money', width: 300, show: true},
{display: '收款户名', name: 'receipt_user', width: 300, show: true},
{display: '收款银行类别', name: 'receipt_bank_type', width: 300, show: true},
{display: '收款行名', name: 'receipt_bank_name', width: 300, show: true},
{display: '收款帐号', name: 'receipt_account', width: 300, show: true},
{display: '收款行号', name: 'receipt_bank_no', width: 300, show: true},
{display: '制单人', name: 'create_user', width: 300, show: true},
{display: '业务类型', name: 'tran_type', width: 300, show: true,
editor: {type:'combobox',valueField: 'key' ,textField:'text',options:{selectBoxWidth:180}},
data:[{ key:1,text:'提现申请单'}],
render:function(rowdata,index,data){
return {1 : '提现申请单'}[data]
}
},
{display: '所属公司', name: 'company', width: 300, show: true},
{display: '备注', name: 'remark', width: 300, show: true},
];
$(function() {
var rules = [];
var groupData = {};
var id = $("input[name=id]").val();
rules.push({field: 'bill_id', op: 'equal', value: id, type:'int' });
rules.push({field: 'clazz', op: 'equal', value: 'FundCrashPay', type:'string' });
groupData.rules = rules;
groupData.op = "and";
log_grid = $("#log_grid").ligerGrid({
columns: log_columns,
sortName: 'create_date',
sortOrder: 'asc',
checkbox: false,
parms : {where: JSON2.stringify(groupData)},
url : rootPath + '/fund_log/findByPage.shtml'
});
if($("#crash_applies").val()){
listGrid_data = JSON2.parse($("#crash_applies").val());
}
listGrid = $("#crash_apply").ligerGrid({
columns: columns,
sortName: 'id',
sortOrder: 'desc',
checkbox: false,
data: {
"records":listGrid_data
}
});
});
<file_sep>/src/main/webapp/js/fund/balance_income_expenses/list.js
var listGrid, //列表对象
bank_type = 0, //默认银行菜单栏
url_prefix = "/fund_balance_income_expenses/"; //默认url前缀
var mainTab = frameElement.tab;//框架的tab对象
var tab_id = frameElement.id;
var mainData = frameElement.openerData; //框架的mainData对象
$(function () {
$("#layout_bank").ligerLayout({ leftWidth: 200 ,allowLeftCollapse:false,allowRightCollapse:false,topHeight:74});
$(".l-link").hover(function () {
$(this).addClass("l-link-over");
}, function () {
$(this).removeClass("l-link-over");
});
$(".l-link").bind('click',function(){
var isActive = $(this).hasClass("l-link-active");
$(".l-link").removeClass("l-link-active");
$(this).addClass("l-link-active");
if(!isActive){
if($(this).attr("type") == "0"){
setBalanceIncomeExpensesType(0)
}
else{
setBalanceIncomeExpensesType(1)
}
}
});
//默认显示公司银行维护列表
setBalanceIncomeExpensesType(0);
$("#search").bind("keydown", function (event) {// 绑定查询按扭
//event.preventDefault();
if(event.keyCode == "13") {
search($(this).val());
}
});
//新增
$("#add").click("click", function () {
add();
});
//修改
$("#edit").click("click", function () {
edit();
});
//删除
$("#delete").click("click", function () {
del();
});
//启用
$("#enable").click("click", function () {
enable();
});
//禁用
$("#disable").click("click", function () {
disable();
});
});
/**
* 自定义查询
* @param value
*/
function search(value) {
var rules = [];
if(value){
rules.push({
field: 'createUser', op: 'like', value: value, type:'string'
})
}
var custom_rules = {};
custom_rules.rules = rules;
listGrid.custom_rules = custom_rules;
listGrid.searchData();
}
/**
* 新增
*/
function add() {
$.ligerDialog.open({
title: '新增余额类型收支项',
url: rootPath + '/fund_balance_income_expenses/add.shtml',
width: 800,
height: 800,
urlParms:{
'incomeExpensesType' : bank_type
},
data: {
grid: listGrid, //将列表对象传给弹窗,
mainData: mainData
}
});
}
/**
* 修改
*/
function edit() {
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待编辑的项");
return;
}
if (selected.length > 1) {
layer.msg("只能选中一项进行编辑");
return;
}
var exist_error = false;
$.each(selected, function(index,item){
if (item.status != 0 && item.status != 2){
exist_error = true;
return;
}
});
if(!exist_error){
$.ligerDialog.open({
title: '编辑余额类型收支项',
url: rootPath + '/fund_balance_income_expenses/edit.shtml?id=' + selected[0].id,
urlParms:{
'bankType' : bank_type
},
width: 800,
height: 800,
data: {
grid: listGrid //将用户列表传给弹窗
}
});
}
else{
layer.msg("只能修改草稿或者禁用的单据");
}
}
/**
* 删除
*/
function del() {
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待删除的项");
return;
}
var exist_error = false;
$.each(selected, function(index,item){
if (item.status != 2){
exist_error = true;
return;
}
});
if(!exist_error){
var ids = $.map(selected, function (item) {
return item.id;
});
ids = ids.join(',');
$.ligerDialog.confirm('确定是否删除?', function (yes) {
if (yes == true) {
var url = rootPath + url_prefix +'/delete.shtml?ids=' + ids;
var result = CommnUtil.ajax(url, {}, "json");
if (result == "success") {
listGrid.reloadAll();
layer.msg('删除成功');
} else {
layer.msg('删除失败');
}
}
});
}
else{
layer.msg("只能删除草稿状态的单据");
}
}
/**
* 启用
*/
function enable() {
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待启用的项");
return;
}
var exist_error = false;
$.each(selected, function(index,item){
if (item.status == 1){
exist_error = true;
return;
}
});
if(!exist_error){
var ids = $.map(selected, function (item) {
return item.id;
});
ids = ids.join(',');
$.ligerDialog.confirm('确定是否启用?', function (yes) {
if (yes == true) {
var url = rootPath + url_prefix +'/enable.shtml?ids=' + ids;
var result = CommnUtil.ajax(url, {}, "json");
if (result == "success") {
listGrid.reloadAll();
layer.msg('启用成功');
} else {
layer.msg('启用失败');
}
}
});
}
else{
layer.msg("存在状态不合法的单据,不能启用");
}
}
/**
* 禁用
*/
function disable() {
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待禁用的项");
return;
}
var exist_error = false;
$.each(selected, function(index,item){
if (item.status != 1){
exist_error = true;
return;
}
});
if(!exist_error){
var ids = $.map(selected, function (item) {
return item.id;
});
ids = ids.join(',');
$.ligerDialog.confirm('确定是否禁用?', function (yes) {
if (yes == true) {
var url = rootPath + url_prefix +'/disable.shtml?ids=' + ids;
var result = CommnUtil.ajax(url, {}, "json");
if (result == "success") {
listGrid.reloadAll();
layer.msg('禁用成功');
} else {
layer.msg('禁用失败');
}
}
});
}
else{
layer.msg("存在状态不合法的单据,不能禁用");
}
}
/**
* 清除现场
*/
function clear(){
//恢复默认值
if(listGrid){
listGrid.clear();
delete listGrid;
}
$("#center").append('<div id="gridArea" style="margin: 0; padding: 0"></div>');
listGrid = {}, //列表对象
bank_type = 0, //默认银行菜单栏
url_prefix = "/fund_balance_income_expenses"; //默认url前缀
}
/**
* 重新加载
*/
function reload(type){
//异步加载
if(type == 0){
setBalanceIncomeExpensesType(bank_type);
}
//刷新整个tab
else{
mainTab.reload(tab_id);
}
}
/**
* 切换收入支出类别
* @param type
*/
function setBalanceIncomeExpensesType(type){
clear();
//更新参数
bank_type = type;
url_prefix = "/fund_balance_income_expenses";
//重绘列表
var columns = [
{display: '编号', name: 'id', width: 300, show: true},
{display: '所属公司', name: 'lyOrganization', width: 300, show: true,
render:function(rowdata,index,data){
return data.orgName;
}
},
{display: '业务类型', name: 'businessObjectType', width: 300, show: true,
render:function(rowdata,index,data){
return {1 : '提现申请单', 2: '提现付款单', 3: '收款单', 4: '借款单线上', 5: '还款单', 6: '还款单', 7: '三方冷链扣费单', 8: '水电空调扣费单',
9: '送货服务扣费单', 10: '房租费扣费单', 11: '其他扣费单', 12: '电汇手续费', 13: '进项暂扣', 14: '销项转移', 15: '费用票扣费单', 16: '转款单'}[data];
}
},
{display: '收入/支出资金类型', name: 'capitalTypes', width: 300, show: true,
render:function(rowdata,index,data){
var data2 = _.split(data,";");
var result = [];
_.forIn(data2, function (val) {
if(val==0)
result.push('收入');
else if(val==1)
result.push('支出');
});
return _.join(result,";");
}
},
{display: '状态', name: 'status', width: 300, show: true,
render:function(rowdata,index,data){
return {0 : '禁用', 1: '启用',2:'草稿'}[data];
}
},
{display: '创建人', name: 'createUser', width: 300, show: true},
{display: '创建日期', name: 'createTime', width: 300, show: true},
{display: '修改人', name: 'modifiyUser', width: 300, show: true},
{display: '修改日期', name: 'modifiyTime', width: 300, show: true,},
];
listGrid = $("#gridArea").ligerFilterGrid({
columns: columns,
url: rootPath + url_prefix+'/findByPage.shtml?incomeExpensesType=' + type,
sortName: 'id',
sortOrder: 'desc',
originalColumns: columns , //保留原始表单列
// summaryUrl: rootPath + '/common/summary.shtml', //统计url
// summaryTable: 'fund_company_bank', //统计表名(表名不是java entity)
// summaryShowRecordCount : true, //是否显示统计条数
// summary: [ //统计字段
// /* {text:'合计提现申请金额',field:'apply_money'},
// {text:'合计审批金额',field:'approval_money'},
// {text:'合计实付金额',field:'fact_money'}*/
// ],
customColumn : { //表单配置
listUrl : rootPath + '/fund_custom_column/list.shtml', //自定义列查询url
submitUrl: rootPath + '/fund_custom_column/submit.shtml', //自定义列提交url
clazz: 'FundBalanceIncomeAndExpenses', //自定义列要存储的唯一主键
id: 'formManage', //自定义列配置插件id
/* dialogTitle: '表单配置',
leftId: 'listBox-left', //表单配置左列表id
leftTitle: '隐藏的列', //表单配置左列表title
rightId: 'listBox-right', //表单配置右列表id
rightTitle: '显示的列', //表单配置右列表title*/
callback : function(){ //自定义列保存成功回调函数
setBalanceIncomeExpensesType(bank_type);
}
},
seniorSearch: { //高级搜索,与ligerGrid本身的高级搜索互不相干
id: 'seniorSearchButton', //高级搜索插件id
searchSight: { //场景配置
id: 'search_sight', //自定义列配置插件id
listUrl: rootPath + '/fund_search_sight/listJson.shtml',
submitUrl: rootPath + '/fund_search_sight/addEntity.shtml', //自定义列提交url
clazz: 'FundBalanceIncomeAndExpenses', //自定义列要存储的唯一主键
callback: function(form){ //场景配置保存成功回调函数
form.reset();
setBalanceIncomeExpensesType(bank_type);
}
}
}
});
$("#pageloading").hide();
}
<file_sep>/src/main/webapp/js/system/resources/list.js
var pageii = null;
var grid = null;
$(function() {
grid = $("#gridArea").ligerGrid({
columns: [
{display: '菜单名称', name: 'name', align: 'left', id: 'id1',width: 200,} ,
{ display: '菜单类型', name: 'type',width: 100,
render : function(rowdata,index,data) {
if(data=="0"){
return "目录";
}else if(data=="1"){
return "菜单";
}else if(data=="2"){
return "按钮";
}else if(data=="3"){
return "方法";
}
}
},
{ display: '唯一KEY', name: 'resKey',width: 200, },
{ display: 'URL地址', name: 'resUrl',width: 200, },
{ display: '是否隐藏', name: 'ishide',width: 100,
render : function(rowindex, index,data) {
if(data=="0"){
return "否";
}else if(data=="1"){
return "是";
}
}
},
{ display: '描述', name: 'description', minWidth: 200}
],
url: rootPath + '/resources/treelists.shtml',
usePager : false,
tree: {
columnId: 'id1'
}
});
$("#pageloading").hide();
$("#search").click("click", function() {// 绑定查询按扭
search();
});
$("#reset").click("click", function() {// 绑定查询按扭
reset();
});
$("#addFun").click("click", function() {
addAccount();
});
$("#editFun").click("click", function() {
editAccount();
});
$("#delFun").click("click", function() {
delAccount();
});
$("#permissions").click("click", function() {
permissions();
});
});
function addAccount() {
$.ligerDialog.open({
title: '新增',
url: rootPath + '/resources/addUI.shtml',
width: 800,
height: 560,
data: {
grid: grid //将用户列表传给弹窗
}
});
}
function editAccount() {
var selected = grid.getSelectedRows();
if(selected.length == 0){
layer.msg("请选中待编辑的项");
return;
}
if (selected.length > 1) {
layer.msg("只能选中一项进行编辑");
return;
}
$.ligerDialog.open({
title: '新增',
url: rootPath + '/resources/editUI.shtml?id=' + selected[0].id,
width: 800,
height: 560,
data: {
grid: grid //将用户列表传给弹窗
}
});
}
function delAccount() {
var selected = grid.getSelectedRows();
if(selected.length == 0){
layer.msg("请选中待删除的项");
return;
}
var ids = $.map(selected,function(item){return item.id;});
$.ligerDialog.confirm('确定是否删除?', function(yes) {
if(yes == true) {
var url = rootPath + '/resources/deleteEntity.shtml?ids=' + ids.join(',');
var result = CommnUtil.ajax(url, {}, "json");
if (result == "success") {
grid.reloadAll();
layer.msg('删除成功');
} else {
layer.msg('删除失败');
}
}
});
}
function editFun() {
var cbox = grid.getSelectedCheckbox();
if (cbox.length > 1 || cbox == "") {
layer.alert("只能选中一个");
return;
}
pageii = layer.open({
title : "编辑",
type : 2,
area : [ "600px", "80%" ],
content : rootPath + '/resources/editUI.shtml?id=' + cbox
});
}
function addFun() {
pageii = layer.open({
title : "新增",
type : 2,
area : [ "600px", "80%" ],
content : rootPath + '/resources/addUI.shtml'
});
}
function delFun() {
var cbox = grid.getSelectedCheckbox();
if (cbox == "") {
layer.alert("请选择删除项!!");
return;
}
layer.confirm('是否删除?', function(index) {
var url = rootPath + '/resources/deleteEntity.shtml';
var s = CommnUtil.ajax(url, {
ids : cbox.join(",")
}, "json");
if (s == "success") {
layer.msg('删除成功');
grid.loadData();
} else {
layer.msg('删除失败');
}
});
}
<file_sep>/src/main/webapp/ligerui/components/ligerFilterGrid.js
/**
*
* Author liubb 2018 [ <EMAIL> ]
*扩展了onFilter事件,用于filter框点击确定时回调,参数为column,包含了column上挂在的filter窗体和filter规则
*如果用户在filter框点击确定后不实现onFilter回调,而是由统一搜索按钮触发,则column的filter规则的内容可以从grid._columns中获取
*本插件依赖CommnUtil 需要先引入
*依赖layer 需要先引入
* 依赖JSON2 需要先引入
*/
(function ($) {
$.fn.ligerFilterGrid = function (options) {
return $.ligerui.run.call(this, "ligerFilterGrid", arguments);
};
$.fn.ligerGetFilterGridManager = function () {
return $.ligerui.run.call(this, "ligerGetFilterGridManager", arguments);
};
$.ligerDefaults.FilterGrid = $.extend(true, $.ligerDefaults.Grid, {
originalColumns: [], //保留原始表单列
summaryUrl: null, //求和统计url
summaryTable: null, //统计表表名
summaryShowRecordCount: true, //是否显示统计条数
summary: [], //统计字段(一般请填写可以求和字段,后台不校验字段合法性)
customColumn: { //表单配置
listUrl: null, //自定义列查询url
submitUrl: null, //自定义列提交url
clazz: null, //自定义列要存储的唯一主键
id: null, //自定义列配置插件id
dialogTitle: '表单配置',
leftId: 'listBox-left', //表单配置左列表id
leftTitle: '隐藏的列', //表单配置左列表title
rightId: 'listBox-right', //表单配置右列表id
rightTitle: '显示的列', //表单配置右列表title
callback: null //自定义列保存成功回调函数
},
seniorSearch: { //高级搜索,与ligerGrid本身的高级搜索互不相干
id: null, //高级搜索插件id
searchSight: { //场景配置
id: null, //场景配置插件id
listUrl: null, //场景列表url
submitUrl: null, //场景提交url
clazz: null, //场景存储的唯一主键
data: [], //local数据
callback: null //场景配置保存成功回调函数
}
}
});
//接口方法扩展
$.ligerMethos.FilterGrid = $.ligerMethos.FilterGrid || {};
$.ligerDefaults.FilterGridString = $.extend(true, {}, $.ligerDefaults.GridString);
$.ligerDefaults.FilterGrid_columns = $.extend(true, {}, $.ligerDefaults.Grid_columns);
$.ligerDefaults.FilterGrid_editor = $.extend(true, {}, $.ligerDefaults.Grid_editor);
$.ligerui.controls.FilterGrid = function (element, options) {
$.ligerui.controls.FilterGrid.base.constructor.call(this, element, options);
};
$.ligerui.controls.FilterGrid.ligerExtend($.ligerui.controls.Grid, {
__getType: function () {
return '$.ligerui.controls.FilterGrid';
},
__idPrev: function () {
return 'filter-grid';
},
_extendMethods: function () {
return $.ligerMethos.FilterGrid;
},
//重写
_render: function () {
var g = this, p = this.options;
g.enable_columns = [];
g.disable_columns = [];
g.custom_rules = [];
g.column_rules = [];
g.senior_rules = [];
g.search_sight_rules = [];
//构建高级查询和场景
var seniorSearch = p.seniorSearch;
if(seniorSearch && seniorSearch.id){
$("#"+seniorSearch.id).unbind("click").bind("click", function (event) {// 高级搜索
event.preventDefault();
g.seniorSearch();
});
var searchSight = seniorSearch.searchSight;
if(searchSight){
//加载场景
var search_sight_select = $("#"+searchSight.id);
//场景html元素存在
if(search_sight_select[0]){
var result = CommnUtil.ajax( rootPath + searchSight.listUrl, {
'clazz': searchSight.clazz,
}, "json");
if(result && result.searchSights){
search_sight_select.html("");
search_sight_select.append('<option value="">全部</option>');
$.each(result.searchSights, function (index, data) {
var option = $("<option value="+data.search_condition+">"+data.name+"</option>");
option.attr("selected",data.is_default == 1 ? true : false);
search_sight_select.append(option);
});
}
else{
search_sight_select.html("");
if(layer){
layer.msg("获取场景信息失败,请联系管理员!");
}
else{
console.error("获取场景信息失败,请联系管理员!")
}
}
if(search_sight_select.val()){
var default_params = {where: search_sight_select.val()}
if(p.parms.length == 0 || !p.parms){
p.parms = default_params;
}
}
//场景下拉事件绑定
$("#"+searchSight.id).bind('change',function(event){
var search_sight_rules = [];
var rules = event.target.value
if(rules){
search_sight_rules = JSON.parse(rules);
}
else{
search_sight_rules = []
}
g.search_sight_rules = search_sight_rules;
g.searchData();
});
$("#"+searchSight.id).next("i").bind('click',function(){
g.search_sight_manage();
})
}
else{
console.error("html元素不存在!")
}
}
}
//构建表单配置组件
var customColumn = p.customColumn;
if (customColumn.id && customColumn.listUrl && customColumn.submitUrl && customColumn.clazz) {
//表单设置
$("#" + customColumn.id).unbind("click").bind('click', function () {
g.formManage();
});
var result = CommnUtil.ajax(customColumn.listUrl, {
'clazz': customColumn.clazz,
}, "json");
var custom_columns = result.custom_columns;
var exist_custom_column = result.exist_custom_column;
var enable_columns = [];
var disable_columns;
if (exist_custom_column) {
if (custom_columns) {
$.each(JSON2.parse(custom_columns), function (index, item) {
var exist = false;
$.each(p.originalColumns, function (index, column) {
if (!exist) {
if (column.name === item.field) {
exist = true;
if (item.width) {
column.width = item.width;
}
enable_columns.push(column);
}
}
else {
return;
}
});
});
disable_columns = p.originalColumns.filter(function (column) {
var exist = false;
$.each(enable_columns, function (index, enable_column) {
if (!exist) {
if (enable_column.name === column.name) exist = true;
}
else {
return;
}
})
return !exist;
});
}
else {
enable_columns = [];
disable_columns = p.columns;
}
}
else {
enable_columns = p.columns;
}
g.enable_columns = enable_columns;
g.disable_columns = disable_columns;
p.columns = enable_columns;
}
$.ligerui.controls.FilterGrid.base._render.call(this);
},
_createHeaderCell: function (column) {
var jcell = $.ligerui.controls.FilterGrid.base._createHeaderCell.call(this, column);
var filterable = column.filterable !== undefined ? column.filterable : true;
var jcell_inner = $(".l-grid-hd-cell-inner", jcell);
if (filterable && !column.frozen) {
jcell_inner.prepend($("<span class='l-grid-hd-cell-filter glyphicon glyphicon-filter'></span>"));
}
return jcell
},
_getSrcElementByEvent: function (e) {
//沿用父类的内部方法
var g = this;
var obj = (e.target || e.srcElement);
var jobjs = $(obj).parents().add(obj);
var fn = function (parm) {
for (var i = 0, l = jobjs.length; i < l; i++) {
if (typeof parm == "string") {
if ($(jobjs[i]).hasClass(parm)) return jobjs[i];
}
else if (typeof parm == "object") {
if (jobjs[i] == parm) return jobjs[i];
}
}
return null;
};
var r = $.ligerui.controls.FilterGrid.base._getSrcElementByEvent.call(this, e);
if (r.gridheader) {
r.hcellfilter = fn("l-grid-hd-cell-filter");//filter
}
return r;
},
_onClick: function (e) {
var g = this, p = this.options;
var src = g._getSrcElementByEvent(e);
//增加列筛选器判断
if (src.hcellfilter) {
if (src.column.winfilter) {
src.column.winfilter.show();
return;
}
var filter = src.column.filter;
if (!filter) {
var filter_id = this.id + "_" + src.column["__id"];
var filtercontainer = $('<form id="' + filter_id + '_filtercontainer"></form>').width(700).height(300).hide();
filter = filtercontainer.ligerColumnFilter(
{
fields: getFields([src.column]),
headAlign: 'center',
}
);
src.column.filter = filter;
}
src.column.winfilter = $.ligerDialog.open({
id: '',
title: '',
width: 800, height: 400,
target: filtercontainer, isResize: false, top: 50,
buttons: [
{
text: '确定', onclick: function (item, dialog) {
if (src.column.filter.getData().rules && src.column.filter.getData().rules.length) {
$("#" + src.column["__domid"].replace(/\|/g, "\\|")).addClass("filtered");
}
else {
$("#" + src.column["__domid"].replace(/\|/g, "\\|")).removeClass("filtered");
}
var data_rules = [];
var column_rules = {};
$.each(g._columns,function(k,v){
if(v.filter){
var data = v.filter.getData(v.filter.group);
if (data.rules && data.rules.length){
data_rules = data_rules.concat(data.rules)
}
}
});
column_rules.rules = data_rules;
g.column_rules = column_rules;
g.searchData();
dialog.hide();
g.trigger("filter", [src.column, g]);
}
},
{
text: '取消', onclick: function (item, dialog) {
dialog.hide();
}
},
{
text: '重置', onclick: function () {
src.column.filter.reset();
}
}
]
});
}
//调用基类方法
$.ligerui.controls.FilterGrid.base._onClick.call(this, e);
if (src.row && g.enabledCheckbox()) //复选框选择行
{
//复选框
var selectRowButtonOnly = p.selectRowButtonOnly ? true : false;
if ((src.checkbox || !selectRowButtonOnly) && p.selectable != false) {
var selected = g.getSelectedRows();
var selectRows = $.map(selected, function (item) {
return item.id;
});
var select_ids = selectRows.join(",");
g.summingUp(select_ids);
}
}
if (src.checkboxall) {
var selected = g.getSelectedRows();
var selectRows = $.map(selected, function (item) {
return item.id;
});
var select_ids = selectRows.join(",");
g.summingUp(select_ids);
}
//将grid的columns转换为filter的fields
function getFields(columns) {
var fields = [];
//如果是多表头,那么g.columns为最低级的列
$(columns).each(function () {
var isNumber = this.type == "int" || this.type == "number" || this.type == "float";
var isDate = this.type == "date";
if (isNumber) this.type = "number";
if (isDate) this.type = "date";
fields.push(this);
});
return fields;
}
},
_showData: function (sourceType) {
var g = this, p = this.options;
$.ligerui.controls.FilterGrid.base._showData.call(this, sourceType);
g.summingUp();
},
setColumnWidth: function (columnparm, newwidth) {
var g = this, p = this.options;
$.ligerui.controls.FilterGrid.base.setColumnWidth.call(this, columnparm, newwidth);
var customColumn = p.customColumn;
if (customColumn.submitUrl) {
var columns = [];
$.each(g._columns, function (index, __column) {
var _column = {};
_column.field = __column.name;
_column.width = __column._width;
columns.push(_column)
});
if (customColumn.clazz) {
if (CommnUtil) {
var result = CommnUtil.ajax(customColumn.submitUrl, {
'clazz': customColumn.clazz,
"columns": JSON2.stringify(columns),
}, "json");
if (result == "success") {
}
else {
console.log("保存自定义列配置失败");
}
}
else {
console.error("缺少CommnUtil");
}
}
}
},
//新定义方法
summingUp: function (ids) {
var g = this, p = this.options;
if (p.totalRender) return;//如果totalRender存在,则不再渲染
if (p.summary.length > 0) {
var url = p.summaryUrl;
var table = p.summaryTable;
if (url && table) {
var summary_field = $.map(p.summary, function (item) {
return item.field;
});
var params = {
tableName: table,
summary: summary_field.join(',')
}
if (ids) params.ids = ids;
if (CommnUtil) {
var result = CommnUtil.ajax(url, params, "json");
if (result.data) {
$(".l-panel-bar-total", g.element).remove();
var container = $('<div class="l-panel-bar-total"></div>');
if (p.summaryShowRecordCount) {
container.append($('<span class="summary-item">合计条数' + ':' + g.data.rowCount + '</span>'));
}
$.each(p.summary, function (index, field) {
var value = result.data[field.field] == null ? '0' : result.data[field.field];
var item = $('<span class="summary-item">' + field.text + ':' + value + '</span>');
container.append(item);
});
$(".l-panel-bar", g.element).before(container)
}
}
else {
console.error("缺少CommnUtil");
}
}
else {
$(".l-panel-bar-total", g.element).remove();
var container = $('<div class="l-panel-bar-total"></div>');
if (p.summaryShowRecordCount) {
container.append($('<span class="summary-item">合计条数' + ':' + g.data.rowCount + '</span>'));
}
}
}
},
/**
* 表单设置
*/
formManage: function () {
var g = this, p = this.options;
var customColumn = p.customColumn;
var content = $('<div class="search-listbox-group"></div>');
var listBox1 = $('<div></div>').attr("id", customColumn.leftId);
var listBox2 = $('<div></div>').attr("id", customColumn.rightId);
content.append(listBox1);
listBox1.ligerSearchListBox({
isShowCheckBox: true,
isMultiSelect: true,
width: 200,
height: 400,
valueField: 'name',
textField: 'display',
searchCls: 'form-control',
enableCheckAll: true,
title: customColumn.leftTitle,
hasBorder: true,
data: g.disable_columns
});
var input_left = $('<input type="button" value="<" />');
input_left.bind('click', function () {
moveToLeft();
});
var input_right = $('<input type="button" value=">" />');
input_right.bind('click', function () {
moveToRight();
});
var container = $('<div class="middle"></div>');
container.append(input_left);
container.append(input_right);
content.append(container);
content.append(listBox2);
listBox2.ligerSearchListBox({
isShowCheckBox: true,
isMultiSelect: true,
width: 200,
height: 400,
valueField: 'name',
textField: 'display',
searchCls: 'form-control',
enableCheckAll: true,
title: customColumn.rightTitle,
hasBorder: true,
data: g.enable_columns
});
content.append('<span class="clearfix"></span>');
var dialog = $.ligerDialog.open({
title: customColumn.dialogTitle,
width: 600,
height: 500,
content: content,
buttons: [
{
text: '确定',
cls: 'btn btn-success',
onclick: function () {
var box2 = liger.get(customColumn.rightId);
var enabled = [];
$.each(box2.data, function (index, column) {
var _column = {}
_column.field = column.name;
var exist = false;
$.each(g._columns, function (index, __column) {
if (!exist) {
if (column.name === __column.name) {
exist = true;
_column.width = __column._width;
}
}
else {
return;
}
});
//处理从隐藏列到显示列,列值为空则从默认配置中获取
if (!exist) {
$.each(p.originalColumns, function (index, __column) {
if (!exist) {
if (column.name === __column.name) {
exist = true;
_column.width = __column.width;
}
}
else {
return;
}
});
}
//原始列和当前列均找不到信息,则给个默认宽度
if (!exist) {
if (p.columnWidth) {
_column.width = p.columnWidth;
}
else {
_column.width = 200;
}
}
enabled.push(_column);
});
if (CommnUtil) {
var result = CommnUtil.ajax(customColumn.submitUrl, {
'clazz': customColumn.clazz,
"columns": JSON2.stringify(enabled),
}, "json");
if (result == "success") {
if (layer) {
dialog.close();
layer.msg('保存成功');
}
else {
g.trigger('customColumnSubmitSuccess');
}
if (customColumn.callback) {
customColumn.callback.call(g)
}
} else {
if (layer) {
layer.msg('保存失败,' + result);
}
else {
g.trigger('customColumnSubmitFail', result);
}
}
}
else {
console.error("缺少CommnUtil");
}
}
},
{
text: '取消',
onclick: function () {
dialog.close();
}
}
]
});
/**
* 表单设置左移
*/
function moveToLeft() {
var box1 = liger.get(customColumn.leftId), box2 = liger.get(customColumn.rightId);
var selected = box2.getSelectedItems();
if (!selected || !selected.length) return;
box2.removeItems(selected);
box1.addItems(selected);
}
/**
* 表单设置右移
*/
function moveToRight() {
var box1 = liger.get(customColumn.leftId), box2 = liger.get(customColumn.rightId);
var selected = box1.getSelectedItems();
if (!selected || !selected.length) return;
box1.removeItems(selected);
box2.addItems(selected);
}
},
/**
* 高级搜索
*/
seniorSearch: function () {
var g = this, p = this.options;
var searchSight = p.seniorSearch.searchSight;
if (g.winfilter) {
g.winfilter.show();
return;
}
//fields过滤,以后抽取公共方法供调用
var fields = [];
$(g.columns).each(function (index, o) {
if (o.show == true) {
var isNumber = o.type == "int" || o.type == "number" || o.type == "float";
var isDate = o.type == "date";
if (isNumber) o.type = "number";
if (isDate) o.type = "date";
fields.push(o);
}
});
var filtercontainer = $('<div id="' + g.id + '_filtercontainer" style="overflow-y: auto;max-height:300px;"></div>');
g.filter = filtercontainer.ligerUnionFilter({
fields: fields,
showGroup: false,
atLeastOne: true,
headAlign: 'center',
widths: ['180px', '120px', '180px'],
texts: ['字段', '运算符', '值'],
});
//filter.addRule($(filter.element.lastChild));
g.winfilter = $.ligerDialog.open({
title: '自定义查询',
width: 800,
height: 500,
target: filtercontainer, isResize: false, top: 50,
buttons: [
{
text: '确定', onclick: function (item, dialog) {
var senior_rules = g.filter.getData();
var form = g.winfilter.dialog.content.find("form");
var form_data = form.serializeJson();
//勾选了保存为场景按钮
if (form_data.is_set_sight == 'on') {
if (!form_data.sight_name) {
//给出提示 需要填写场景名称
return;
}
else if (senior_rules.rules.length == 0) {
return;
}
else {
if (searchSight && searchSight.submitUrl && searchSight.clazz) {
//发起请求 保存为场景
var data = {};
data.name = form_data.sight_name;
if (form_data.is_default == 'on') {
data.is_default = 1
}
data.clazz = searchSight.clazz;
data.search_condition = JSON2.stringify(senior_rules);
var result = CommnUtil.ajax(searchSight.submitUrl, data, 'json');
if (result == 'success') {
if (layer) {
layer.msg('保存场景成功');
}
else {
console.log('保存场景成功');
}
dialog.hide();
g.senior_rules = senior_rules;
g.searchData();
if (searchSight.callback) {
searchSight.callback.call(g,form[0]);
}
}
else {
if (layer) {
layer.msg('保存场景失败');
}
else {
console.log('保存场景失败');
}
}
}
}
}
else {
g.senior_rules = senior_rules;
g.searchData();
dialog.hide();
}
}
},
{
text: '取消', onclick: function (item, dialog) {
dialog.hide();
}
},
{
text: '重置', onclick: function () {
g.filter.reset();
}
}
]
});
if(searchSight){
var form = '<form style=" padding: 20px 0px;">' +
'<div class="checkbox col-xs-3">' +
'<label>' +
'<input type="checkbox" name="is_set_sight">保存为场景' +
'</label>' +
'</div>' +
'<div class="form-group col-xs-6">' +
'<input type="text" class="form-control" name="sight_name" placeholder="输入场景名称">' +
'</div>' +
'<div class="checkbox col-xs-6">' +
'<label>' +
'<input type="checkbox" name="is_default">设为默认' +
'</label>' +
'</div>' +
'</form>';
g.winfilter.dialog.content.append($('<div role="separator" class="divider"></div>"'));
g.winfilter.dialog.content.append($(form));
}
},
//自定义查询方法,把页面所有的查询条件拼接进行查询
searchData: function(){
var g = this, p = this.options;
var hash ={};
var rules = [];
var groupData = {};
$.each(g.custom_rules.rules,function(k,v){
if(!hash[v.field+"_"+v.op+"_"+v.value]){
hash[v.field+"_"+v.op+"_"+v.value] = true
rules.push(v);
}
});
$.each(g.column_rules.rules,function(k,v){
if(!hash[v.field+"_"+v.op+"_"+v.value]){
hash[v.field+"_"+v.op+"_"+v.value] = true
rules.push(v);
}
});
$.each(g.senior_rules.rules,function(k,v){
if(!hash[v.field+"_"+v.op+"_"+v.value]){
hash[v.field+"_"+v.op+"_"+v.value] = true
rules.push(v);
}
});
$.each(g.search_sight_rules.rules,function(k,v){
if(!hash[v.field+"_"+v.op+"_"+v.value]){
hash[v.field+"_"+v.op+"_"+v.value] = true
rules.push(v);
}
});
groupData.rules = rules;
groupData.op = "and";
g.search({where: JSON2.stringify(groupData)});
},
/**
* 场景管理
*/
search_sight_manage: function (){
var dialog = $.ligerDialog.open({
title: '场景管理',
width: 600,
height: 500,
content: '',
buttons:[
{
text:'确定',
cls:'btn btn-success',
onclick:function(){
dialog.close();
}
},
{
text:'取消',
onclick:function(){
dialog.close();
}
}
],
data: {
}
});
},
//清除对象
clear: function ()
{
$(document).unbind("mouseup.grid");
$(document).unbind("click.grid");
$(document).unbind("resize.grid");
$(document).unbind("keydown.grid");
$(document).unbind("keyup.grid");
this.destroy();
},
});
$.ligerui.controls.FilterGrid.prototype.enabledTotal = $.ligerui.controls.FilterGrid.prototype.isTotalSummary;
$.ligerui.controls.FilterGrid.prototype.add = $.ligerui.controls.FilterGrid.prototype.addRow;
$.ligerui.controls.FilterGrid.prototype.update = $.ligerui.controls.FilterGrid.prototype.updateRow;
$.ligerui.controls.FilterGrid.prototype.append = $.ligerui.controls.FilterGrid.prototype.appendRow;
$.ligerui.controls.FilterGrid.prototype.getSelected = $.ligerui.controls.FilterGrid.prototype.getSelectedRow;
$.ligerui.controls.FilterGrid.prototype.getSelecteds = $.ligerui.controls.FilterGrid.prototype.getSelectedRows;
$.ligerui.controls.FilterGrid.prototype.getCheckedRows = $.ligerui.controls.FilterGrid.prototype.getSelectedRows;
$.ligerui.controls.FilterGrid.prototype.getCheckedRowObjs = $.ligerui.controls.FilterGrid.prototype.getSelectedRowObjs;
$.ligerui.controls.FilterGrid.prototype.setOptions = $.ligerui.controls.FilterGrid.prototype.set;
$.ligerui.controls.FilterGrid.prototype.reload = $.ligerui.controls.FilterGrid.prototype.loadData;
$.ligerui.controls.FilterGrid.prototype.refreshSize = $.ligerui.controls.FilterGrid.prototype._onResize;
$.ligerui.controls.FilterGrid.prototype.append = $.ligerui.controls.FilterGrid.prototype.appendRange;
})(jQuery);<file_sep>/src/main/resources/sqlFiles/1.0.12-qzl-upd-borrow-money-menus.sql
-- 修改借款管理中的url --
SELECT @fund_loan_manage_id := id FROM ly_resources WHERE resKey='loan_manage' AND type=1;
UPDATE ly_resources SET resUrl = '/fund_borrow_money/list.shtml' WHERE id = @fund_loan_manage_id;
<file_sep>/README.md
# zlblog
志磊博客,不一般的博客!
<file_sep>/src/main/java/cn/zllog/beanEntity/FilterParam.java
package cn.zllog.beanEntity;
public class FilterParam {
private String Name ;
private Object Value ;
public FilterParam(String name, Object value)
{
this.Name = name;
this.Value = value;
}
public String getName() {
return this.Name;
}
public void setName(String Name) {
this.Name = Name;
}
public Object getValue() {
return this.Value;
}
public void setValue(Object value) {
this.Value = value;
}
}
<file_sep>/src/main/webapp/js/fund/quota_scene/allocation.js
//获取此窗口对象以及父节点表格
var thisDialog = frameElement.dialog; //当前窗口
var orgGrid, userGrid , //列表对象
tree, //树对象
custom_rules = {}, //自定义查询条件
checkedFunction = [], // 初始化选中数据
primaryCheckedFunction = []; // 原始选中数据
$(function() {
$(".l-layout-center").width($("#layout1").width()-205);
orgGrid = $("#orgGrid").ligerGrid({
columns: [
{
display : "orgIds",
name : "orgIds",
hide : true
},
{
display : "id",
name : "id",
hide : true
},{
display : "组织编码",
name : "orgCode",
width: 50
}, {
display : "组织名称",
name : "orgName",
id:"orgName",
align: 'left',
width: 150
}
],
url: rootPath +'/fund_quota_scene/findBySceneId.shtml?id='+sceneId,
sortName: 'id',
sortOrder: 'desc',
// onFilter: columnFilter,
rownumbers:false,
checkbox:false
});
orgGrid.bind('selectRow', function ( data)
{
if(data){
userGrid = $("#gridArea").ligerGrid({
columns: [
{
display : "账户",
name : "accountName",
id:"accountName",
align: 'left',
width: 100
},{
display : "人员名称",
name : "name",
width: 100
}, {
display : "关联人员",
name : "full",
width: 200
}, {
display : "描述",
name : "description",
width: 100
}
],
url: rootPath +'/fund_quota_scene/findByOrgId.shtml?ids='+data.orgIds+"&sceneId="+sceneId+"&id="+data.id,
sortName: 'id',
sortOrder: 'desc',
// onFilter: columnFilter,
rownumbers:false,
//初始化选择行
isChecked: f_isChecked,
//选择事件(复选框)
onCheckRow: f_onCheckRow,
//选择事件(复选框 全选/全不选)
onCheckAllRow: f_onCheckAllRow,
onSuccess:function (data, grid) {//加载成功后赋值checkedFunction 初始化
//清空 初始化选中数据及原始选中数据
checkedFunction.length = 0;
primaryCheckedFunction.length = 0;
if(data.userId){
checkedFunction.push(data.userId);
primaryCheckedFunction.push(data.userId);
}
}
});
}
});
$("#pageloading").hide();
$("#add").click("click", function() {
add();
});
});
function add() {
var selected = orgGrid.getSelectedRows();
if(checkedFunction.length == 0){
layer.msg("请选中分配人员");
return;
}
if (checkedFunction.length > 1) {
layer.msg("只能选中一名人员分配");
return;
}
// 验证新增是否成功
$.ajax({
url:rootPath +'/fund_quota_scene/addQuota_UserEntity.shtml',
type : "post",
dataType : "json",
data: {
id:checkedFunction[0],
orgId:selected[0].id,
sceneId:sceneId
},
success : function(data) {
if (data == "success") {
// resetForm();
// targetGrid.reloadAll();
$.ligerDialog.confirm('保存成功!是否关闭窗口?', function(yes) {
if(yes == true){
thisDialog.close();
}
});
} else {
$.ligerDialog.error('保存失败!' + data);
}
}
});
}
/** 选择事件(复选框 全选/全不选)*/
function f_onCheckAllRow(checked)
{
for (var rowid in this.records)
{
if(checked)
addCheckedFunction(this.records[rowid]['id']);
else
removeCheckedFunction(this.records[rowid]['id']);
}
}
/**
实现表单分页多选 即利用onCheckRow将选中的行记忆下来,并利用isChecked将记忆下来的行初始化选中
*/
function findCheckedFunction(id)
{
for(var i =0;i<checkedFunction.length;i++)
{
if(checkedFunction[i] == id) return i;
}
return -1;
}
function addCheckedFunction(id)
{
if(findCheckedFunction(id) == -1)
checkedFunction.push(id);
}
function removeCheckedFunction(id)
{
var i = findCheckedFunction(id);
if(i==-1) return;
checkedFunction.splice(i,1);
}
/** 初始化选中数据*/
function f_isChecked(rowdata)
{
// if (findCheckedFunction(rowdata.id) == -1)
// return false;
// return true;
//遍历所有的grid 让其可以被默认选中
var str = f_getChecked();
//alert(str.indexOf(rowdata.id))
if (str.indexOf(rowdata.id)!=-1)
return true;
return false;
}
/** 选择并添加到checkedFunction中*/
function f_onCheckRow(checked, data)
{
if (checked) addCheckedFunction(data.id);
else removeCheckedFunction(data.id);
}
/** 获取初始化选中数据*/
function f_getChecked()
{
return checkedFunction.join(',');
}
<file_sep>/src/main/webapp/js/system/mdm_supplier/muchEdit.js
//获取此窗口对象以及父节点表格
var thisDialog = frameElement.dialog; //当前窗口
var targetGrid = thisDialog.get('data').grid; //用户列表
$(function () {
$("button[type='reset']").click(function (event) {
thisDialog.close();
});
$("form").validate({
errorClass: 'error-msg',
submitHandler: function (form) {// 必须写在验证前面,否则无法ajax提交
ly.ajaxSubmit(form, {// 验证新增是否成功
type: "post",
dataType: "json",
success: function (data) {
if (data == "success") {
targetGrid.reloadAll();
resetForm();
$.ligerDialog.confirm('批量编辑成功!是否关闭窗口?', function (yes) {
if (yes == true) {
thisDialog.close();
}
});
} else {
$.ligerDialog.error('批量编辑失败!'+data);
}
}
});
},
rules: {},
messages: {}
});
});
/**
* 重置表单
*/
function resetForm() {
$("#form")[0].reset();
}
<file_sep>/src/main/java/cn/zllog/controller/doIndexController.java
package cn.zllog.controller;
import cn.zllog.dao.CommonDAOImpl;
import cn.zllog.dao.ICommonDAO;
import cn.zllog.entity.BbsEntity;
import cn.zllog.plugin.PageView;
import cn.zllog.services.IFundBlogService;
import eu.bitwalker.useragentutils.Browser;
import eu.bitwalker.useragentutils.UserAgent;
import eu.bitwalker.useragentutils.Version;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import javax.inject.Inject;
import javax.servlet.http.HttpServletRequest;
import java.io.BufferedReader;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.Locale;
import java.util.Map;
@Controller
@RequestMapping("/")
public class doIndexController {
@Inject
private IFundBlogService fundBlogService;
@Inject
private ICommonDAO commonDAO;
@RequestMapping("reply")
@ResponseBody
public String doReply(Integer id,String bbs,String user,String tell){
BbsEntity bbsEntity = new BbsEntity();
bbsEntity.setBbs(bbs);
bbsEntity.setUser(user);
bbsEntity.setTell(tell);
bbsEntity.setReply(id);
fundBlogService.add(bbsEntity);
return "success";
}
@RequestMapping("down")
@ResponseBody
public String doDwon(@RequestParam Integer id){
fundBlogService.doDown(id);
return "success";
}
@RequestMapping("up")
@ResponseBody
public String doUp(@RequestParam Integer id){
fundBlogService.doUp(id);
return "success";
}
@RequestMapping("index")
public String doIndexUI(Model model,HttpServletRequest request){
String str = request.getHeader("user-agent");
int start = str.indexOf("(",1)+1;
int end = str.indexOf(";",1);
String name = "";
if(start>0&&end>0){
name = str.substring(start,end);
}else{
name = "未知系统";
}
String ip = fundBlogService.getIp(request);
fundBlogService.saveStati(ip,name);
model.addAttribute("ip",ip);
model.addAttribute("name",name);
model.addAttribute("count",fundBlogService.findCountStati());
model.addAttribute("day",fundBlogService.findCountDay());
String agent =request.getHeader("User-Agent");
System.out.println(agent);
UserAgent userAgent = UserAgent.parseUserAgentString(agent);
System.out.println(userAgent);
Browser browser = userAgent.getBrowser();
Version version = browser.getVersion(request.getHeader("User-Agent"));
model.addAttribute("browser",browser.getName() + "/" + version.getVersion());
return "WEB-INF/jsp/log/list";
}
@RequestMapping("getAddr")
@ResponseBody
public String getAddr(String ip){
return fundBlogService.getAddresses(ip);
}
@RequestMapping("addBbs")
@ResponseBody
public String addEntity(@RequestParam String tell,@RequestParam String user,@RequestParam String bbs){
BbsEntity bbsEntity = new BbsEntity();
bbsEntity.setTell(tell);
bbsEntity.setUser(user);
bbs=bbs.replaceAll(" ", " ");
bbs=bbs.replaceAll("<", "<");
bbs=bbs.replaceAll(">", ">");
bbs=bbs.replaceAll("\"", """);
bbs=bbs.replaceAll("/", "⁄");
bbsEntity.setBbs(bbs);
fundBlogService.add(bbsEntity);
return "success";
}
@RequestMapping("bbs")
public String doBbsUI(Model model){
model.addAttribute("bbs",fundBlogService.findBbs());
return "WEB-INF/jsp/log/bbs";
}
@RequestMapping("findByPage")
@ResponseBody
public PageView findByPage(Integer currPage,Integer pageSize, String condition,String sort) throws Exception{
PageView pageView = fundBlogService.findByPage(currPage,pageSize,condition,sort);
pageView.setMessage("ok");
return pageView;
}
}
<file_sep>/src/main/webapp/js/fund/code_rule/edit.js
//获取此窗口对象以及父节点表格
var thisDialog = frameElement.dialog; //当前窗口
var targetGrid = thisDialog.get('data').grid; //用户列表
$(function() {
//初始化把select赋值为空
$("#apply_org_id").bind('change',function(event){
$("#apply_org").val($(this).find("option:selected").text());
}).val('');
$("#bank_type_id").bind('change',function(event){
$("#bank_type_name").val($(this).find("option:selected").text());
}).val('');
$("button[type='reset']").click(function(event){
thisDialog.close();
});
$("form").validate({
ignore: [], //对隐藏域也可以验证
errorClass:'error-msg',
submitHandler : function(form) {// 必须写在验证前面,否则无法ajax提交
ly.ajaxSubmit(form, {// 验证新增是否成功
type : "post",
dataType : "json",
success : function(data) {
if (data == "success") {
resetForm();
targetGrid.reloadAll();
$.ligerDialog.confirm('添加成功!是否关闭窗口?', function(yes) {
if(yes == true){
thisDialog.close();
}
});
} else {
$.ligerDialog.error('添加失败!' + data);
}
}
});
},
rules : {
"ruleName" : {
required : true,
validateNullOrWhiteSpace: true,
maxlength: 255,
},
// "dutyParagraph" : {
// required : true,
// remote : { // 异步验证是否存在
// type : "POST",
// url : rootPath + '/mdm_supplier/dutyParagraphNotExist.shtml',
// data : {
// dutyParagraph : function() {
// return $("#dutyParagraph").val();
// }
// }
// },
// validateNullOrWhiteSpace: true,
// maxlength: 255,
// },
// "ruleName" : {
// required : true,
// validateNullOrWhiteSpace: true,
// maxlength: 255,
// },
"orgIds" : {
required : true,
validateNullOrWhiteSpace: true,
maxlength: 255,
},
"fixedChar" : {
required : true,
validateNullOrWhiteSpace: true,
maxlength: 255,
},
"dateName" : {
required : true,
validateNullOrWhiteSpace: true,
maxlength: 255,
}
},
messages : {
"ruleName" : {
required : "请输入规则名称",
validateNullOrWhiteSpace: "规则名称不能为空白字符",
maxlength: "名称过长",
},
"orgIds" : {
required : "请输入适用公司",
validateNullOrWhiteSpace: "适用公司不能为空白字符",
maxlength: "适用公司过长",
},
"fixedChar" : {
required : "请输入固定字符",
validateNullOrWhiteSpace: "固定字符不能为空白字符",
maxlength: "固定字符过长",
},
"dateName" : {
required : "请输入日期字符",
validateNullOrWhiteSpace: "日期字符不能为空白字符",
maxlength: "日期过长",
}
},
errorPlacement : function(error, element) {
//error.addClass('col-sm-4');
element.closest('.form-group').append(error);
},
success : function(label) {
label.remove();
},
});
$("#check_combo").ligerComboBox({ isShowCheckBox: true, isMultiSelect: true,
data: org_data, valueFieldID: 'orgIds'
}).setValue(selectedOrgIds);
$('#my-checkbox').bootstrapSwitch('size','mini');
$('#my-checkbox').bootstrapSwitch('state',switchState);
// console.log('ssss:',switchState)
$('#my-checkbox').on('switchChange.bootstrapSwitch', function (event, state) {
//event.preventDefault();
if(state){
// console.log('开启')
$('#companyShortName').val('testShortName');
}else{
// console.log('关闭')
$('#companyShortName').val('');
}
});
$("#dateName").bind('change',function(event){
//console.log('sdkjskdsjd:',$(this).find("option:selected").val())
var test = $(this).find("option:selected").val();
if(test==1){
$('#dateNameText').html(new Date().format("yyMMdd"));
}else if(test==2){
$('#dateNameText').html(new Date().format("yyyyMMdd"));
}
});
$("#serialNumberLen").bind('change',function(event){
var test = $(this).find("option:selected").val();
$('#serialNumberSim').html(CommnUtil.addPreZero('1',test));
});
});
/**
* 重置新增表单
*/
function resetForm(){
$("#form")[0].reset();
}<file_sep>/pom.xml
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.crown</groupId>
<artifactId>ROOT</artifactId>
<packaging>war</packaging>
<version>1.0.3</version>
<name>MDM Maven Webapp</name>
<url>http://maven.apache.org</url>
<!-- 项目属性 -->
<properties>
<!-- jar 版本设置 -->
<spring.version>4.2.5.RELEASE</spring.version>
<shiro.versigion>1.2.5</shiro.versigion>
<mysql.version>5.1.30</mysql.version>
<druid.version>1.0.16</druid.version>
<junit.version>4.12</junit.version>
<guava.version>18.0</guava.version>
<log4j.version>1.2.17</log4j.version>
<slf4j.version>1.7.5</slf4j.version>
<poi.version>3.17</poi.version>
<commons-lang3.version>3.1</commons-lang3.version>
<commons-io.version>2.4</commons-io.version>
<commons-codec.version>1.8</commons-codec.version>
<commons-fileupload.version>1.3.1</commons-fileupload.version>
<commons-beanutils.version>1.8.3</commons-beanutils.version>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<jdk.version>1.7</jdk.version>
</properties>
<dependencies>
<!-- Spring -->
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
<version>${spring.version}</version>
<exclusions>
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-webmvc</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-orm</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context-support</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-tx</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-aop</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-beans</artifactId>
<version>${spring.version}</version>
</dependency>
<!--Apache Shiro所需的jar包 -->
<dependency>
<groupId>org.apache.shiro</groupId>
<artifactId>shiro-core</artifactId>
<version>${shiro.version}</version>
</dependency>
<dependency>
<groupId>org.apache.shiro</groupId>
<artifactId>shiro-ehcache</artifactId>
<version>${shiro.version}</version>
<exclusions>
<exclusion>
<groupId>net.sf.ehcache</groupId>
<artifactId>ehcache-core</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.shiro</groupId>
<artifactId>shiro-web</artifactId>
<version>${shiro.version}</version>
</dependency>
<dependency>
<groupId>org.apache.shiro</groupId>
<artifactId>shiro-quartz</artifactId>
<version>${shiro.version}</version>
</dependency>
<dependency>
<groupId>org.apache.shiro</groupId>
<artifactId>shiro-spring</artifactId>
<version>${shiro.version}</version>
</dependency>
<!-- mysql -->
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>${mysql.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.microsoft.sqlserver/mssql-jdbc -->
<dependency>
<groupId>com.microsoft.sqlserver</groupId>
<artifactId>mssql-jdbc</artifactId>
<version>6.4.0.jre7</version>
</dependency>
<!-- druid -->
<!-- ################### json解释 ################ -->
<!--<dependency>-->
<!--<groupId>org.codehaus.jackson</groupId>-->
<!--<artifactId>jackson-core-lgpl</artifactId>-->
<!--<version>1.9.6</version>-->
<!--</dependency>-->
<!--<dependency>-->
<!--<groupId>org.codehaus.jackson</groupId>-->
<!--<artifactId>jackson-core-asl</artifactId>-->
<!--<version>1.9.4</version>-->
<!--</dependency>-->
<!--<dependency>-->
<!--<groupId>org.codehaus.jackson</groupId>-->
<!--<artifactId>jackson-mapper-asl</artifactId>-->
<!--<version>1.9.13</version>-->
<!--</dependency>-->
<!--<dependency>-->
<!--<groupId>org.codehaus.jackson</groupId>-->
<!--<artifactId>jackson-mapper-lgpl</artifactId>-->
<!--<version>1.9.6</version>-->
<!--</dependency>-->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>2.9.4</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.9.4</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>2.9.4</version>
</dependency>
<!-- ############################################## -->
<!-- javax提供的annotation -->
<dependency>
<groupId>javax.inject</groupId>
<artifactId>javax.inject</artifactId>
<version>1</version>
</dependency>
<!-- 提供对c标签的支持 -->
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>jstl</artifactId>
<version>1.2</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.testng/testng -->
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<version>6.10</version>
<scope>test</scope>
</dependency>
<!-- aop代理 -->
<dependency>
<groupId>asm</groupId>
<artifactId>asm</artifactId>
<version>3.3.1</version>
</dependency>
<dependency>
<groupId>org.aspectj</groupId>
<artifactId>aspectjrt</artifactId>
<version>1.8.7</version>
</dependency>
<dependency>
<groupId>org.aspectj</groupId>
<artifactId>aspectjweaver</artifactId>
<version>1.8.7</version>
</dependency>
<!-- poi -->
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi</artifactId>
<version>${poi.version}</version>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-ooxml</artifactId>
<version>${poi.version}</version>
<exclusions>
<exclusion>
<artifactId>stax-api</artifactId>
<groupId>stax</groupId>
</exclusion>
</exclusions>
</dependency>
<!-- log4j -->
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>${log4j.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>${slf4j.version}</version>
</dependency>
<!-- GENERAL UTILS begin <dependency> <groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId> <version>${commons-lang3.version}</version>
</dependency> <dependency> <groupId>commons-codec</groupId> <artifactId>commons-codec</artifactId>
<version>${commons-codec.version}</version> </dependency> -->
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>${commons-io.version}</version>
</dependency>
<dependency>
<groupId>commons-fileupload</groupId>
<artifactId>commons-fileupload</artifactId>
<version>${commons-fileupload.version}</version>
</dependency>
<dependency>
<groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils</artifactId>
<version>${commons-beanutils.version}</version>
<exclusions>
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>commons-httpclient</groupId>
<artifactId>commons-httpclient</artifactId>
<version>3.1</version>
<exclusions>
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.httpcomponents/httpclient -->
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>4.5.2</version>
<exclusions>
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.httpcomponents/httpasyncclient -->
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpasyncclient</artifactId>
<version>4.1.3</version>
<exclusions>
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- servlet api -->
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
<version>3.1.0</version>
</dependency>
<dependency>
<groupId>javax.servlet.jsp</groupId>
<artifactId>javax.servlet.jsp-api</artifactId>
<version>2.3.1</version>
<scope>provided</scope>
</dependency>
<!--json-lib -->
<dependency>
<groupId>net.sf.json-lib</groupId>
<artifactId>json-lib</artifactId>
<version>2.4</version>
<classifier>jdk15</classifier>
<exclusions>
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.fusesource</groupId>
<artifactId>sigar</artifactId>
<version>1.6.4</version>
</dependency>
<!-- <dependency> <groupId>javax.servlet</groupId> <artifactId>servlet-api</artifactId>
<version>2.5</version> <scope>provided</scope> </dependency> -->
<dependency>
<groupId>javax.mail</groupId>
<artifactId>mail</artifactId>
<version>1.4.7</version>
</dependency>
<dependency>
<groupId>net.sf.ehcache</groupId>
<artifactId>ehcache</artifactId>
<version>2.10.2</version>
</dependency>
<dependency>
<groupId>org.codehaus.groovy</groupId>
<artifactId>groovy-all</artifactId>
<version>2.4.5</version>
</dependency>
<!-- hibernate jpa -->
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core</artifactId>
<version>5.0.4.Final</version>
<exclusions>
<exclusion>
<artifactId>xml-apis</artifactId>
<groupId>xml-apis</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-entitymanager</artifactId>
<version>5.0.4.Final</version>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-validator</artifactId>
<version>5.2.1.Final</version>
</dependency>
<!-- http://mvnrepository.com/artifact/com.alibaba/druid -->
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid</artifactId>
<version>1.0.20</version>
</dependency>
<dependency>
<groupId>javax.transaction</groupId>
<artifactId>jta</artifactId>
<version>1.1</version>
</dependency>
<dependency>
<groupId>javax.el</groupId>
<artifactId>el-api</artifactId>
<version>2.2</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>${guava.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-redis</artifactId>
<version>1.2.0.RELEASE</version>
</dependency>
<dependency>
<groupId>redis.clients</groupId>
<artifactId>jedis</artifactId>
<version>2.5.2</version>
</dependency>
<dependency>
<groupId>com.caucho</groupId>
<artifactId>hessian</artifactId>
<version>4.0.38</version>
</dependency>
<dependency>
<groupId>org.apache.pdfbox</groupId>
<artifactId>pdfbox</artifactId>
<version>2.0.8</version>
<exclusions>
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- 对象差异对比 https://github.com/javers/javers -->
<dependency>
<groupId>org.javers</groupId>
<artifactId>javers-core</artifactId>
<version>2.9.2</version>
</dependency>
<!-- liquibase -->
<dependency>
<groupId>org.liquibase</groupId>
<artifactId>liquibase-core</artifactId>
<version>3.6.0</version>
<exclusions>
<exclusion>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- https://mvnrepository.com/artifact/net.sourceforge.jexcelapi/jxl -->
<dependency>
<groupId>net.sourceforge.jexcelapi</groupId>
<artifactId>jxl</artifactId>
<version>2.6.12</version>
</dependency>
<!-- https://mvnrepository.com/artifact/eu.bitwalker/UserAgentUtils -->
<dependency>
<groupId>eu.bitwalker</groupId>
<artifactId>UserAgentUtils</artifactId>
<version>1.20</version>
</dependency>
</dependencies>
<build>
<finalName>MDM</finalName>
<resources>
<resource>
<directory>src/main/java</directory>
<excludes>
<exclude>**/*.java</exclude>
</excludes>
</resource>
<resource>
<directory>src/main/resources</directory>
<includes>
<include>**/*.*</include>
</includes>
</resource>
</resources>
<plugins>
<plugin>
<artifactId>maven-war-plugin</artifactId>
<version>2.3</version>
<configuration>
<warSourceDirectory>${project.basedir}/src/main/webapp</warSourceDirectory>
<failOnMissingWebXml>false</failOnMissingWebXml>
</configuration>
</plugin>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.1</version>
<configuration>
<source>${jdk.version}</source>
<target>${jdk.version}</target>
<encoding>UTF-8</encoding>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-resources-plugin</artifactId>
<version>2.6</version>
<configuration>
<skip>true</skip>
<encoding>UTF-8</encoding>
</configuration>
</plugin>
<plugin>
<groupId>org.mortbay.jetty</groupId>
<artifactId>jetty-maven-plugin</artifactId>
<version>8.1.16.v20140903</version>
<configuration>
<scanIntervalSeconds>0</scanIntervalSeconds>
<contextXml>${project.basedir}/library/jetty-context.xml</contextXml>
<webApp>
<contextPath>/</contextPath>
</webApp>
<webAppSourceDirectory>${project.basedir}/src/main/webapp
</webAppSourceDirectory>
<connectors>
<connector implementation="org.eclipse.jetty.server.nio.SelectChannelConnector">
<port>80</port>
<maxIdleTime>60000</maxIdleTime>
</connector>
</connectors>
<stopKey>stop</stopKey>
<stopPort>9999</stopPort>
</configuration>
</plugin>
</plugins>
</build>
</project>
<file_sep>/src/main/java/cn/zllog/services/IFundBlogService.java
package cn.zllog.services;
import cn.zllog.entity.BbsEntity;
import cn.zllog.plugin.PageView;
import javax.servlet.http.HttpServletRequest;
import java.util.List;
public interface IFundBlogService {
//点赞
void doUp(Integer id);
void doDown(Integer id);
//查询地址
String getAddresses(String ip);
//查询今日访问
Integer findCountDay();
//查询总访问量
Integer findCountStati();
//添加访问信息
void saveStati(String ip, String name);
//获取ip
String getIp(HttpServletRequest request);
//分页查询
PageView findByPage(Integer currPage, Integer pageSize, String condition, String sort);
//保存留言信息
void add(BbsEntity bbsEntity);
List<BbsEntity> findBbs();
}
<file_sep>/src/main/webapp/js/fund/borrow_begin/list.js
var listGrid, //列表对象
url="/fund_borrow_begin_down/", //默认借款url
clazz = 'FundBorrowMoney', //默认类
borrowType = 1, //默认线上
borrowTitle="借款修改(线下)", //默认修改标题
user_id = -1, //用户id
title="借款新增(线下)"; //默认标题
var columns = [
{display: '单据编号', name: 'number', width: 300, show: true,
render: function(rowdata,index,data){
return '<a href="#" onclick="detail('+rowdata.id+')">'+data+'</a>';
}
},
{display: '单据日期', name: 'create_date', width: 300, show: true, export_name:'create_date_str',
render: function(rowdata,index,data){
return new Date(data).format("yyyy-MM-dd");
}
},
{display: '单据状态', name: 'document_state', width: 300, show: true, export_name:'documents_state_str',
render: function(rowdata,index,data){
return rowdata.documents_state_str;
}
},
{display: '付款状态', name: 'pay_state', width: 300, show: true, export_name:'pay_state_str',
render: function(rowdata,index,data){
return rowdata.pay_state_str;
}
},
{display: '借款类型', name: 'type', width: 300, show: true, export_name:'type_str',
render: function(rowdata,index,data){
return rowdata.type_str;
}
},
{display: '所属公司', name: 'company', width: 300, show: true},
{display: '申请业务部门', name: 'dept', width: 300, show: true},
{display: '申请借款日期', name: 'borrow_date', width: 300, show: true,export_name:'borrow_date_str',
render: function(rowdata,index,data){
if(data==null){
return "";
}
return new Date(data).format("yyyy-MM-dd");
}
},
{display: '预计还款日期', name: 'refund_date', width: 300, show: true,export_name:'refund_date_str',
render: function(rowdata,index,data){
if(data==null){
return "";
}
return new Date(data).format("yyyy-MM-dd");
}
},
{display: '借款天数', name: 'borrow_day', width: 300, show: true},
{display: '申请借款金额', name: 'borrow_money', width: 300, show: true},
{display: '一审审批金额', name: 'first_examine_money', width: 300, show: true},
{display: '终审审批金额', name: 'last_examine_money', width: 300, show: true},
{display: '剩余未还款金额', name: 'residue_money', width: 150, show: true},
{display: '到期日未还款金额', name: 'expire_residue_money', width: 150, show: true},
{display: '实际划款金额', name: 'make_money', width: 150, show: true},
{display: '借款原因', name: 'borrow_cause', width: 300, show: true},
{display: '借款利率', name: 'borrow_interest', width: 300, show: true},
{display: '申请人', name: 'apply_user', width: 300, show: true},
{display: '制单人', name: 'create_user', width: 300, show: true}
];
var mainTab = frameElement.tab;//框架的tab对象
var tab_id = frameElement.id;
var mainData = frameElement.openerData; //框架的mainData对象
$(function () {
user_id = $("#user_id").val();
listGrid = $("#gridArea").ligerFilterGrid({
columns: columns,
url: rootPath + url + '/findByPage.shtml',
sortName: 'id',
sortOrder: 'desc',
originalColumns: columns, //保留原始表单列
summaryUrl: rootPath + '/common/summary.shtml', //统计url
summaryTable: 'fund_borrow_money', //统计表名(表名不是java entity)
summaryShowRecordCount : true, //是否显示统计条数
summary: [ //统计字段
{text:'合计借款金额',field:'borrow_money'}
],
customColumn : { //表单配置
listUrl : rootPath + '/fund_custom_column/list.shtml', //自定义列查询url
submitUrl: rootPath + '/fund_custom_column/submit.shtml', //自定义列提交url
clazz: 'FundBorrowMoney', //自定义列要存储的唯一主键
id: 'formManage', //自定义列配置插件id
dialogTitle: '表单配置', //弹窗title
leftId: 'listBox-left', //表单配置左列表id(有默认值,可以不配置)
leftTitle: '隐藏的列', //表单配置左列表title(有默认值,可以不配置)
rightId: 'listBox-right', //表单配置右列表id(有默认值,可以不配置)
rightTitle: '显示的列', //表单配置右列表title(有默认值,可以不配置)
callback : function(){ //自定义列保存成功回调函数
mainTab.reload(tab_id);
}
},
seniorSearch: { //高级搜索,与ligerGrid本身的高级搜索互不相干
id: 'seniorSearchButton', //高级搜索插件id
searchSight: { //场景配置
id: 'search_sight', //场景配置插件id
listUrl: rootPath + '/fund_search_sight/listJson.shtml', //场景列表url
submitUrl: rootPath + '/fund_search_sight/addEntity.shtml', //场景提交url
clazz: 'FundBorrowMoney', //场景要存储的唯一主键
callback: function(form){ //场景配置保存成功回调函数
form.reset();
mainTab.reload(tab_id);
}
}
}
});
$("#pageloading").hide();
$("#search").bind("keydown", function (event) {// 绑定查询按扭
//event.preventDefault();
if(event.keyCode == "13") {
search($(this).val());
}
});
//新增
$("#add").click("click", function () {
add();
});
//修改
$("#edit").click("click", function () {
edit();
});
//删除
$("#delete").click("click", function () {
del();
});
});
/**
* 自定义查询
* @param value
*/
function search(value) {
var rules = [];
var custom_rules = {};
if(value){
rules.push({
field: 'borrow_money', op: 'equal', value: value, type:'float'
})
}
custom_rules.rules = rules;
listGrid.custom_rules = custom_rules;
listGrid.searchData();
}
/**
* 新增
*/
function add() {
$.ligerDialog.open({
title: title,
url: rootPath + "/fund_borrow_begin/add.shtml?borrowType="+borrowType,
width: 800,
height: 650,
data: {
grid: listGrid, //将列表对象传给弹窗,
mainData: mainData
}
});
}
/**
* 修改
*/
function edit() {
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待编辑的项");
return;
}
if (selected.length > 1) {
layer.msg("只能选中一项进行编辑");
return;
}
var exist_error = false;
if(!exist_error){
$.ligerDialog.open({
title: borrowTitle,
url: rootPath + '/fund_borrow_begin/edit.shtml?id=' + selected[0].id,
width: 800,
height: 640,
data: {
grid: listGrid //将用户列表传给弹窗
}
});
}
else{
layer.msg("存在不支持单据,不能修改");
}
}
/**
* 详情
* @param id
*/
function detail(id){
$.ligerDialog.open({
title: '详情',
url: rootPath + '/fund_borrow_begin/detail.shtml?id=' + id,
urlParms:{
'borrowType' : borrowType
},
width: 800,
height: 640,
data: {
grid: listGrid //将用户列表传给弹窗
}
});
}
/**
* 删除
*/
function del() {
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待删除的项");
return;
}
var exist_error = false;
if(!exist_error){
var ids = $.map(selected, function (item) {
return item.id;
});
ids = ids.join(',');
$.ligerDialog.confirm('确定是否删除?', function (yes) {
if (yes == true) {
var lastUrl = rootPath + url+'delete.shtml?ids=' + ids;
var result = CommnUtil.ajax(lastUrl, null, "json");
if (result == "success") {
listGrid.reloadAll();
layer.msg('删除成功');
} else {
layer.msg('删除失败');
}
}
});
}
else{
layer.msg("当前单据不支持此操作");
}
}
<file_sep>/src/main/webapp/js/system/dataprivilege/DataPrivilageSysParm.js
var ruleDataOptions = {
width: 200,
url: "/role/getAll.shtml",
isMultiSelect: true, split: ','
};
var userDataOptions = {
width: 200,
url: "/user/getAll.shtml"
};
var companyDataOptions = {
width: 200,
url: "../handler/select.ashx?view=Employees&idfield=EmployeeID&textfield=Title"
};
var deptDataOptions = {
width: 200,
url: "../handler/select.ashx?view=CF_Department&idfield=DeptID&textfield=DeptName"
};
var OrgIDsDataOptions = {
width: 200,
url: "../handler/select.ashx?view=Suppliers&idfield=SupplierID&textfield=CompanyName"
};
var SysParms = [
{ name: '{CurrentUserID}', display: '{当前用户}', type: 'array',
editor: { type: 'combobox', options: userDataOptions,valueField:"id",textField:"userName" }
},
{ name: '{CurrentRoleID}', display: '{当前角色}', type: 'array',
editor: { type: 'combobox', options: ruleDataOptions,valueField:"id",textField:"name" }
},
{ name: '{CurrentOrgIDs}', display: '{当前管理组织}', type: 'int',
editor: { type: '', options: null,valueField:"id",textField:"name" }
},
{ name: '{CurrentUserIDs}', display: '{当前管理用户}', type: 'int',
editor: { type: '', options: null,valueField:"id",textField:"name" }
},
];
function getFields(view)
{
for (var i = 0, l = DbViews.length; i < l; i++)
{
var v = DbViews[i];
if (v.name == view)
{
var fields = [];
$(v.columns).each(function ()
{
fields.push({
name: this.name,
display: this.display,
type: this.type,
editor: getFieldEditor(view, this.name)
});
});
$(SysParms).each(function ()
{
fields.push({
name: this.name,
display: this.display,
type: this.type,
editor: this.editor
});
});
return fields;
}
}
return SysParms;
}
var fieldEditors = {};
fieldEditors['Orders'] = {
'ShipCity': { type: 'combobox',
options: {
width: 200,
url: "../handler/select.ashx?view=Orders&idfield=ShipCity&textfield=ShipCity&distinct=true"
}
}
};
fieldEditors['Products'] = {
'CategoryID': { type: 'combobox',
options: {
width: 200,
url: "../handler/select.ashx?view=Categories&idfield=CategoryID&textfield=CategoryName",
isMultiSelect: true, split: ','
}
}
};
function getFieldEditor(view, field)
{
if(fieldEditors[view] && fieldEditors[view][field])
return fieldEditors[view][field];
return null;
}<file_sep>/src/main/webapp/js/system/userlogin/list.js
var grid = null;
$(function ()
{
grid = $("#gridArea").ligerGrid({
columns: [
{display: 'id', name: 'id', hide:'true',width: 100 } ,
{ display: '账号', name: 'accountName' ,width: 200},
{ display: '登入时间', name: 'loginTime',minWidth: 200,
render: function(rowdata,index,data) {
return new Date(data).format("yyyy-MM-dd hh:mm:ss");
}},
{ display: '登入IP', name: 'loginIp' ,width: 100}
],
url: rootPath + '/userlogin/findByPage.shtml',
});
$("#pageloading").hide();
});
function f_search()
{
var searchParams = $("#searchForm").serializeJson();
grid.search(searchParams);
}
<file_sep>/src/main/webapp/ligerui/components/ligerUnionFilter.js
/**
* jQuery ligerUI 1.3.3
*
* http://ligerui.com
*
* Author daomi 2015 [ <EMAIL> ]
*
*/
(function ($)
{
$.fn.ligerUnionFilter = function ()
{
return $.ligerui.run.call(this, "ligerUnionFilter", arguments);
};
$.fn.ligerGetUnionFilterManager = function ()
{
return $.ligerui.run.call(this, "ligerGetUnionFilterManager", arguments);
};
$.ligerDefaults.UnionFilter = $.extend(true,{
showGroup : true,
widths:['180px','120px','180px'],
texts:['字段','运算符', '值'],
headAlign: 'center',
atLeastOne:true,
},$.ligerDefaults.Filter);
//接口方法扩展
$.ligerMethos.UnionFilter = $.ligerMethos.UnionFilter || {};
$.ligerDefaults.UnionFilterString = $.extend(true,{
},$.ligerDefaults.FilterString);
//过滤器组件
$.ligerui.controls.UnionFilter = function (element, options)
{
$.ligerui.controls.UnionFilter.base.constructor.call(this, element, options);
};
$.ligerui.controls.UnionFilter.ligerExtend($.ligerui.controls.Filter, {
__getType: function ()
{
return '$.ligerui.controls.UnionFilter'
},
__idPrev: function ()
{
return 'UnionFilter';
},
_render: function ()
{
var g = this, p = this.options;
$.ligerui.controls.UnionFilter.base._render.call(this);
if(p.atLeastOne){
g.addRule($(g.element.lastChild));
}
},
//增加一个条件
//parm [jgroup] 分组的jQuery对象
addRule: function (jgroup)
{
var g = this, p = this.options;
jgroup = jgroup || g.group;
var lastrow = $(">tbody:first > tr:last", jgroup);
var rulerow = $(g._bulidRuleRowHtml());
lastrow.before(rulerow);
if (p.fields.length)
{
//如果第一个字段启用了自定义输入框
g.appendEditor(rulerow, p.fields[0]);
}
//事件:字段列表改变时
$("select.fieldsel", rulerow).bind('change', function ()
{
var jopsel = $(this).parent().next().find("select:first");
var fieldName = $(this).val();
if (!fieldName) return;
var field = g.getField(fieldName);
//字段类型处理
var fieldType = field.type || "string";
var oldFieldtype = rulerow.attr("fieldtype");
if (fieldType != oldFieldtype)
{
jopsel.html(g._bulidOpSelectOptionsHtml(fieldType,field.operator ));
rulerow.attr("fieldtype", fieldType);
}
//当前的编辑器
var editorType = null;
//上一次的编辑器
var oldEditorType = rulerow.attr("editortype");
if (g.enabledEditor(field)) editorType = field.editor.type;
if (oldEditorType)
{
//如果存在旧的输入框
g.removeEditor(rulerow);
}
if (editorType)
{
//如果当前选择的字段定义了输入框
g.appendEditor(rulerow, field);
} else
{
rulerow.removeAttr("editortype").removeAttr("editorid");
$("td.l-filter-value:first", rulerow).html('<input type="text" class="valtxt form-control l-text" />'); //修改了此段代码,切换字段时候,给最后文本列增加样式
}
});
return rulerow;
},
//获取分组数据
getData: function (group)
{
var groupData = $.ligerui.controls.UnionFilter.base.getData.call(this,group);
//数据过滤,过滤没用的规则
groupData.rules && (groupData.rules = groupData.rules.filter(function(item){
return item.value && item.value != null && item.value != ""
}));
if(groupData.rules && groupData.rules.length > 1){
groupData.op = "and";
}
return groupData;
},
//删除一个条件
deleteRule: function (rulerow)
{
var g = this;
$.ligerui.controls.UnionFilter.base.deleteRule.call(this,rulerow);
},
//修复原始插件的bug
appendEditor: function (rulerow, field)
{
var g = this, p = this.options;
if (g.enabledEditor(field))
{
var container = $("td.l-filter-value:first", rulerow).html("");
var editor = p.editors[field.editor.type];
var editorTag = ++g.editorCounter;
var editParm = {
filter: g
};
editParm.field = $.extend(true, {}, field);
editParm.field.name = field.name + "_" + editorTag;
g.editors[editorTag] = editor.create.call(this, container, editParm.field);
rulerow.attr("editortype", field.editor.type).attr("editorid", editorTag);
}
},
//获取一个分组的html
_bulidGroupTableHtml: function (altering, allowDelete)
{
var g = this, p = this.options;
var tableHtmlArr = [];
tableHtmlArr.push('<table cellpadding="0" cellspacing="10" border="0" class="l-filter-group" style="border-spacing: 10px;border-collapse:separate "> ');
if (altering)
tableHtmlArr.push(' l-filter-group-alt');
tableHtmlArr.push('<thead>');
tableHtmlArr.push('<th style="text-align:'+p.headAlign+';width:'+p.widths[0]+'">'+p.texts[0]+'</th>');
tableHtmlArr.push('<th style="text-align:'+p.headAlign+';width:'+p.widths[1]+'">'+p.texts[1]+'</th>');
tableHtmlArr.push('<th style="text-align:'+p.headAlign+';width:'+p.widths[2]+'">'+p.texts[2]+'</th>');
tableHtmlArr.push('</thead>');
if (p.atLeastOne){
tableHtmlArr.push('<tbody class="at-least-one">');
}
else{
tableHtmlArr.push('<tbody>');
}
tableHtmlArr.push('<tr class="l-filter-rowlast"><td class="l-filter-rowlastcell" align="left" colSpan="4">');
//and or
/* tableHtmlArr.push('<select class="groupopsel">');
tableHtmlArr.push('<option value="and">' + p.strings['and'] + '</option>');
tableHtmlArr.push('<option value="or">' + p.strings['or'] + '</option>');
tableHtmlArr.push('</select>');*/
//add group
if(p.showGroup){
tableHtmlArr.push('<input type="button" value="' + p.strings['addgroup'] + '" class="addgroup">');
}
//add rule
tableHtmlArr.push('<div class="l-icon-add addrule" style="width:16px ;height: 16px;cursor: pointer;"></div>');
if(p.showGroup && allowDelete){
tableHtmlArr.push('<input type="button" value="' + p.strings['deletegroup'] + '" class="deletegroup">');
}
tableHtmlArr.push('</td></tr>');
tableHtmlArr.push('</tbody></table>');
return tableHtmlArr.join('');
},
//获取字段值规则的html
_bulidRuleRowHtml: function (fields)
{
var g = this, p = this.options;
fields = fields || p.fields;
var rowHtmlArr = [];
var fieldType = fields && fields.length && fields[0].type ? fields[0].type : "string";
rowHtmlArr.push('<tr fieldtype="' + fieldType + '" class="l-filter-column"><td class="l-filter-column">');
rowHtmlArr.push('<select class="fieldsel form-control" style="width:"' +p.widths[0]+ '>');
for (var i = 0, l = fields.length; i < l; i++)
{
var field = fields[i];
rowHtmlArr.push('<option value="' + field.name + '"');
if (i == 0) rowHtmlArr.push(" selected ");
rowHtmlArr.push('>');
rowHtmlArr.push(field.display);
rowHtmlArr.push('</option>');
}
rowHtmlArr.push("</select>");
rowHtmlArr.push('</td>');
rowHtmlArr.push('<td class="l-filter-op">');
rowHtmlArr.push('<select class="opsel form-control" style="width:"' +p.widths[1]+ '>');
rowHtmlArr.push(g._bulidOpSelectOptionsHtml(fieldType, fields && fields.length ? fields[0].operator : null));
rowHtmlArr.push('</select>');
rowHtmlArr.push('</td>');
rowHtmlArr.push('<td class="l-filter-value">');
rowHtmlArr.push('<input type="text" class="valtxt form-control" style="width:"' +p.widths[2]+ '>');
rowHtmlArr.push('</td>');
rowHtmlArr.push('<td>');
rowHtmlArr.push('<div class="l-icon-cross deleterole"></div>');
rowHtmlArr.push('</td>');
rowHtmlArr.push('</tr>');
return rowHtmlArr.join('');
},
/**
* 自定义方法
*/
//重置
reset: function(){
var p = this.options;
if(p.atLeastOne){
$("table.l-filter-group tbody tr.l-filter-column").not(":first-child").remove();
$("table.l-filter-group tbody tr.l-filter-column:first-child td input").val("");
}
else{
$("table.l-filter-group tbody tr.l-filter-column").remove();
}
},
});
})(jQuery);<file_sep>/src/main/java/cn/zllog/beanEntity/OrgUser.java
package cn.zllog.beanEntity;
public class OrgUser implements java.io.Serializable {
private Integer userId;
private Integer orgId;
private String orgName;
// Property accessors
public Integer getUserId() {
return this.userId;
}
public void setUserId(Integer userId) {
this.userId = userId;
}
public Integer getOrgId() {
return this.orgId;
}
public void setOrgId(Integer orgId) {
this.orgId = orgId;
}
public String getOrgName() {
return this.orgName;
}
public void setOrgName(String orgName) {
this.orgName = orgName;
}
}
<file_sep>/src/main/webapp/ligerui/ligerui-fix.js
/**
* 解决liger一些原生bug
* 扩展liger不满足的业务需求
**/
//覆盖liger原生run方法,支持对象型参数
liger.run = function(plugin, args, ext){
if (!plugin) return;
ext = $.extend({
defaultsNamespace: 'ligerDefaults',
methodsNamespace: 'ligerMethods',
controlNamespace: 'controls',
idAttrName: 'ligeruiid',
isStatic: false,
hasElement: true, //是否拥有element主体(比如drag、resizable等辅助性插件就不拥有)
propertyToElemnt: null //链接到element的属性名
}, ext || {});
plugin = plugin.replace(/^ligerGet/, '');
plugin = plugin.replace(/^liger/, '');
if (this == null || this == window || ext.isStatic)
{
if (!liger.plugins[plugin])
{
liger.plugins[plugin] = {
fn: $[liger.pluginPrev + plugin],
isStatic: true
};
}
return new $.ligerui[ext.controlNamespace][plugin]($.extend({}, $[ext.defaultsNamespace][plugin] || {}, $[ext.defaultsNamespace][plugin + 'String'] || {}, args.length > 0 ? args[0] : {}));
}
if (!liger.plugins[plugin])
{
liger.plugins[plugin] = {
fn: $.fn[liger.pluginPrev + plugin],
isStatic: false
};
}
if (/Manager$/.test(plugin)) return liger.get(this, ext.idAttrName);
this.each(function ()
{
if (this[ext.idAttrName] || $(this).attr(ext.idAttrName))
{
var manager = liger.get(this[ext.idAttrName] || $(this).attr(ext.idAttrName));
if (manager && args.length > 0) manager.set(args[0]);
//已经执行过
return;
}
if (args.length >= 1 && typeof args[0] == 'string') return;
//只要第一个参数不是string类型,都执行组件的实例化工作
var options = args.length > 0 ? args[0] : null;
var p = $.extend(true,{}, $[ext.defaultsNamespace][plugin], $[ext.defaultsNamespace][plugin + 'String'], options);
if (ext.propertyToElemnt) p[ext.propertyToElemnt] = this;
if (ext.hasElement)
{
new $.ligerui[ext.controlNamespace][plugin](this, p);
}
else
{
new $.ligerui[ext.controlNamespace][plugin](p);
}
});
if (this.length == 0) return null;
if (args.length == 0) return liger.get(this, ext.idAttrName);
if (typeof args[0] == 'object') return liger.get(this, ext.idAttrName);
if (typeof args[0] == 'string')
{
var manager = liger.get(this, ext.idAttrName);
if (manager == null) return;
if (args[0] == "option")
{
if (args.length == 2)
return manager.get(args[1]); //manager get
else if (args.length >= 3)
return manager.set(args[1], args[2]); //manager set
}
else
{
var method = args[0];
if (!manager[method]) return; //不存在这个方法
var parms = Array.apply(null, args);
parms.shift();
return manager[method].apply(manager, parms); //manager method
}
}
return null;
}
//覆盖combobox的_setValue方法,解决多选初始化复选框按钮未选中bug
liger.methods.ComboBox._setValue = function(value, text){
var g = this, p = this.options;
var isInit = false, isTriggerEvent = true;
if (text == "init")
{
text = null;
isInit = true;
isTriggerEvent = p.initIsTriggerEvent ? true : false;
}
if (p.isTextBoxMode)
{
text = value;
} else
{
text = text || g.findTextByValue(value);
}
if (p.tree)
{
//刷新树的选中状态
setTimeout(function ()
{
if (p.setTextBySource)
{
//刷新树的选中状态并更新文本框
g.selectValueByTree(value);
} else
{
g.treeSelectInit(value);
}
}, 100);
}
else if (!p.isMultiSelect)
{
g._changeValue(value, text, isTriggerEvent);
$("tr[value='" + value + "'] td", g.selectBox).addClass("l-selected");
$("tr[value!='" + value + "'] td", g.selectBox).removeClass("l-selected");
}
else
{
g._changeValue(value, text, isTriggerEvent);
if (value != null)
{
var targetdata = value.toString().split(p.split);
$("table.l-table-checkbox :checkbox", g.selectBox).each(function () { this.checked = false; });
for (var i = 0; i < targetdata.length; i++)
{
$("table.l-table-checkbox tr[value=" + targetdata[i] + "] :checkbox", g.selectBox).each(function () {
this.checked = true;
//增加添加初始化数据选中checkbox
$("table.l-table-checkbox tr[value=" + targetdata[i] + "] a").addClass("l-checkbox-checked");
});
}
}
}
if (p.selectBoxRenderUpdate)
{
p.selectBoxRenderUpdate.call(g, {
selectBox: g.selectBox,
value: value,
text: text
});
}
}<file_sep>/src/main/webapp/js/fund/influence/list.js
var listGrid, //列表对象
url_prefix = "/fund_influence/"; //默认url前缀
var mainTab = frameElement.tab;//框架的tab对象
var tab_id = frameElement.id;
var mainData = frameElement.openerData; //框架的mainData对象
var current_type = []; //当前业务单据类型,数组有2位 分别是id和name
//列表
var columns = [
{display: '编号', name: 'id', width: 300, show: true},
{display: '影响因素名称', name: 'name', width: 300, show: true},
{display: '对应基础档案类型', name: 'fundArchive.name', width: 300, show: true},
{display: '创建人', name: 'createPerson.name', width: 300, show: true},
{display: '创建日期', name: 'createDate', width: 300, show: true,
render: function(rowdata,index,data){
return new Date(data).format("yyyy-MM-dd");
}
},
{display: '修改人', name: 'modifyPerson.name', width: 300, show: true},
{display: '修改日期', name: 'modifyDate', width: 300, show: true,
render: function(rowdata,index,data){
return new Date(data).format("yyyy-MM-dd");
}
},
];
$(function () {
$("#layout_bank").ligerLayout({ leftWidth: 200 ,allowLeftCollapse:false,allowRightCollapse:false,topHeight:74});
$(".l-link").hover(function () {
$(this).addClass("l-link-over");
}, function () {
$(this).removeClass("l-link-over");
});
$(".l-link").bind('click',function(){
var isActive = $(this).hasClass("l-link-active");
$(".l-link").removeClass("l-link-active");
$(this).addClass("l-link-active");
if(!isActive){
current_type = $(this).attr("data").split(",");
switchType();
}
});
//默认加载第一个
var data = $("#left a.l-link-active").attr("data");
current_type = data.split(",");
var rules = [];
if(current_type && current_type.length == 2){
rules.push({
field: 'fundVoucherBusinessOrder.id', op: 'equal', value: current_type[0], type:'int'
})
}
var default_rules = {};
default_rules.rules = rules;
default_rules.op = 'and';
listGrid = $("#gridArea").ligerGrid({
columns: columns,
url: rootPath + url_prefix+'/findByPage.shtml',
sortName: 'id',
sortOrder: 'desc',
parms :{where: JSON2.stringify(default_rules)}
});
$("#pageloading").hide();
//新增
$("#add").click("click", function () {
add();
});
//修改
$("#edit").click("click", function () {
edit();
});
//删除
$("#delete").click("click", function () {
del();
});
});
/**
* 新增
*/
function add() {
$.ligerDialog.open({
title: '新增银行',
url: rootPath + url_prefix+ '/add.shtml',
width: 1000,
height: 300,
data: {
grid: listGrid, //将列表对象传给弹窗,
mainData: mainData,
current_type:current_type
}
});
}
/**
* 修改
*/
function edit() {
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待编辑的项");
return;
}
if (selected.length > 1) {
layer.msg("只能选中一项进行编辑");
return;
}
$.ligerDialog.open({
title: '编辑',
url: rootPath + url_prefix+ '/edit.shtml?id=' + selected[0].id,
width: 1000,
height: 300,
data: {
grid: listGrid //将用户列表传给弹窗
}
});
}
/**
* 删除
*/
function del() {
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待删除的项");
return;
}
var ids = $.map(selected, function (item) {
return item.id;
});
ids = ids.join(',');
$.ligerDialog.confirm('确定是否删除?', function (yes) {
if (yes == true) {
var url = rootPath + url_prefix +'/delete.shtml?ids=' + ids;
var result = CommnUtil.ajax(url, {}, "json");
if (result == "success") {
listGrid.reloadAll();
layer.msg('删除成功');
} else {
layer.msg('删除失败');
}
}
});
}
/**
* 左侧切换
*/
function switchType(){
var rules = [];
if(current_type && current_type.length == 2){
rules.push({
field: 'fundVoucherBusinessOrder.id', op: 'equal', value: current_type[0], type:'int'
})
}
var default_rules = {};
default_rules.rules = rules;
default_rules.op = 'and';
listGrid.search({where: JSON2.stringify(default_rules)});
}
<file_sep>/src/main/webapp/ligerui/components/ligerColumnFilter.js
/**
* jQuery ligerUI 1.3.3
*
* http://ligerui.com
*
* Author liubb 2018 [ <EMAIL> ]
*
*/
(function ($)
{
$.fn.ligerColumnFilter = function ()
{
return $.ligerui.run.call(this, "ligerColumnFilter", arguments);
};
$.fn.ligerGetColumnFilterManager = function ()
{
return $.ligerui.run.call(this, "ligerGetColumnFilterManager", arguments);
};
$.ligerDefaults.ColumnFilter = $.extend(true,{
widths: ['180', '120', '180'],
texts:['字段','运算符', '值'],
headAlign: 'center',
onFilter:null,
},$.ligerDefaults.Filter);
//接口方法扩展
$.ligerMethos.ColumnFilter = $.ligerMethos.ColumnFilter || {};
$.ligerDefaults.ColumnFilter.operators['string'] =
$.ligerDefaults.ColumnFilter.operators['text'] =
["equal", "notequal", "startwith", "endwith", "like", "in", "notin"];
$.ligerDefaults.ColumnFilter.operators['number'] =
$.ligerDefaults.ColumnFilter.operators['int'] =
$.ligerDefaults.ColumnFilter.operators['float'] =
["equal", "notequal", "greater", "greaterorequal", "less", "lessorequal", "in", "notin"];
/*$.ligerDefaults.ColumnFilter.operators['date'] = ["in"];
$.ligerDefaults.ColumnFilter.operators['combobox'] =
["equal", "notequal"];*/
$.ligerDefaults.ColumnFilterString = $.extend(true,{
},$.ligerDefaults.FilterString);
//过滤器组件
$.ligerui.controls.ColumnFilter = function (element, options)
{
$.ligerui.controls.ColumnFilter.base.constructor.call(this, element, options);
};
$.ligerui.controls.ColumnFilter.ligerExtend($.ligerui.controls.Filter, {
__getType: function ()
{
return 'ColumnFilter'
},
__idPrev: function ()
{
return 'ColumnFilter';
},
/** 重写方法 **/
_render: function ()
{
var g = this, p = this.options;
$.ligerui.controls.ColumnFilter.base._render.call(this);
//默认添加一个规则
g.addRule($(g.element.lastChild));
},
//增加一个条件
//parm [jgroup] 分组的jQuery对象
addRule: function (jgroup)
{
var g = this, p = this.options;
var rulerow = $.ligerui.controls.ColumnFilter.base.addRule.call(this,jgroup);
if($("td.l-filter-value:first", rulerow)){
var input = $("input",$("td.l-filter-value:first", rulerow));
//if(input) input.addClass("form-control").css({"width": p.widths[2]+'px'});
}
return rulerow;
},
//重置
reset: function(){
var g = this;
var p = this.options;
var group = g.group;
//每行editor重置(因为有些editor比较特殊,所以单独判断)
$("> tbody > tr", group).each(function (i, row){
var editorid = $(row).attr("editorid");
var editortype = $(row).attr("editortype");
var editor = g.editors[editorid];
if (editortype == 'date'){
var date_editor = liger.get(editor.attr("ligeruiid"));
date_editor.clear();
}
});
//清理其它的表单元素
g.element.reset()
},
//获取分组数据
getData: function (group)
{
var g = this, p = this.options;
var groupData = $.ligerui.controls.ColumnFilter.base.getData.call(this,group || g.group);
//数据过滤,过滤没用的规则
groupData.rules && (groupData.rules = groupData.rules.filter(function(item){
return item.value && item.value != null && item.value != ""
}));
return groupData;
},
//附加一个输入框
appendEditor: function (rulerow, field)
{
var g = this, p = this.options;
if (g.enabledEditor(field))
{
var container = $("td.l-filter-value:first", rulerow).html("");
var editor = p.editors[field.editor.type];
var editorTag = ++g.editorCounter;
var editParm = {
filter: g
};
editParm.field = $.extend(true, {}, field);
editParm.field.name = field.name + "_" + editorTag;
g.editors[editorTag] = editor.create.call(this, container, editParm.field);//修复默认filter的bug
rulerow.attr("editortype", field.editor.type).attr("editorid", editorTag);
}
},
//获取一个分组的html
_bulidGroupTableHtml: function (altering)
{
var g = this, p = this.options;
var tableHtmlArr = [];
tableHtmlArr.push('<table cellpadding="0" cellspacing="10" border="0" class="l-filter-group" style="border-spacing: 10px;border-collapse:separate "> ');
if (altering)
tableHtmlArr.push(' l-filter-group-alt');
tableHtmlArr.push('<thead>');
tableHtmlArr.push('<th style="text-align:'+p.headAlign+';width:'+p.widths[0]+'px;">'+p.texts[0]+'</th>');
tableHtmlArr.push('<th style="text-align:'+p.headAlign+';width:'+p.widths[1]+'px;">'+p.texts[1]+'</th>');
tableHtmlArr.push('<th style="text-align:'+p.headAlign+';width:'+p.widths[2]+'px;">'+p.texts[2]+'</th>');
tableHtmlArr.push('</thead>');
tableHtmlArr.push('<tbody class="at-least-one">');
tableHtmlArr.push('<tr class="l-filter-rowlast"><td class="l-filter-rowlastcell" align="left" colSpan="4">');
//and or
/* tableHtmlArr.push('<select class="groupopsel">');
tableHtmlArr.push('<option value="and">' + p.strings['and'] + '</option>');
tableHtmlArr.push('<option value="or">' + p.strings['or'] + '</option>');
tableHtmlArr.push('</select>');*/
//add group
//tableHtmlArr.push('<input type="button" value="' + p.strings['addgroup'] + '" class="addgroup">');
/*if(p.enableAdd){
//add rule
tableHtmlArr.push('<div class="l-icon-add addrule" style="width:16px ;height: 16px;cursor: pointer;"></div>');
}*/
//tableHtmlArr.push('<input type="button" value="' + p.strings['deletegroup'] + '" class="deletegroup">');
tableHtmlArr.push('</td></tr>');
tableHtmlArr.push('</tbody></table>');
return tableHtmlArr.join('');
},
//获取字段值规则的html
_bulidRuleRowHtml: function (fields)
{
var g = this, p = this.options;
fields = fields || p.fields;
var rowHtmlArr = [];
var fieldType = fields && fields.length && fields[0].type ? fields[0].type : "string";
rowHtmlArr.push('<tr fieldtype="' + fieldType + '"><td class="l-filter-column">');
rowHtmlArr.push('<select disabled class="fieldsel form-control" style="width:'+p.widths[0]+'px;">');
for (var i = 0, l = fields.length; i < l; i++)
{
var field = fields[i];
rowHtmlArr.push('<option value="' + field.name + '"');
if (i == 0) rowHtmlArr.push(" selected ");
rowHtmlArr.push('>');
rowHtmlArr.push(field.display);
rowHtmlArr.push('</option>');
}
rowHtmlArr.push("</select>");
rowHtmlArr.push('</td>');
rowHtmlArr.push('<td class="l-filter-op">');
rowHtmlArr.push('<select class="opsel form-control" style="width:'+p.widths[1]+'px;">');
rowHtmlArr.push(g._bulidOpSelectOptionsHtml(fieldType, fields && fields.length ? fields[0].operator : null));
rowHtmlArr.push('</select>');
rowHtmlArr.push('</td>');
rowHtmlArr.push('<td class="l-filter-value">');
rowHtmlArr.push('<input type="text" class="valtxt"'); //style="width:'+p.widths[2]+'px;">
rowHtmlArr.push('</td>');
rowHtmlArr.push('<td>');
rowHtmlArr.push('<div class="l-icon-cross deleterole"></div>');
rowHtmlArr.push('</td>');
rowHtmlArr.push('</tr>');
return rowHtmlArr.join('');
},
});
})(jQuery);<file_sep>/src/main/webapp/js/fund/quota_report_collect/list.js
var pageii = null;
var grid = null;
var tree_data = null;
var fundQuotaReport=null;
var columns =null;
var flag=false;
var search_organizationId = null;
var search_sceneId = null;
var search_start_time = null;
var search_end_time = null;
$(function() {
columns = [{
display: "id",
name: "id",
hide: true
}, {
display: "指标编码",
name: "orgName",
id: "orgName",
align: 'left'
}];
columns.push({
display: "指标名称",
name: "XXX"
});
grid = $("#gridArea").ligerGrid({
columns: columns,
data: "",
usePager: false,
tree: {
columnId: 'orgName'
},
autoCheckChildren: false,
checkbox: false
});
$("#pageloading").hide();
$("#search").click("click", function () {// 绑定查询按扭
search();
});
$("#reset").click("click", function () {// 绑定查询按扭
reset();
});
$("#exportExcel").click("click", function () {// 绑定查询按扭
exportExcel();
})
//获取场景
var comboBox_scene_data = "";
var scene_url = rootPath + '/quota_report_collect/findFundQuotaSceneList.shtml';
var scene_data = CommnUtil.ajax(scene_url, null, "json");
if (scene_data.records.length > 0) {
comboBox_scene_data = scene_data.records;
} else {
layer.msg("未查到启用的场景,请先设置场景!");
}
$("#scene").ligerComboBox({
width: 200,
data: comboBox_scene_data,
initIsTriggerEvent: false,
textField: 'name',
valueField: 'id',
valueFieldID: 'sceneId',
onSelected: function (value, text) {
var comboBox_org_data="";
var org_url = rootPath + '/quota_report_collect/findLyOrganizationListByScene.shtml?sceneId='+value;
var comboBox_org= CommnUtil.ajax(org_url, null,"json");
if (comboBox_org.records.length>0) {
comboBox_org_data = comboBox_org.records;
} else {
layer.msg("该场景暂未分配有效的组织,或者当前登录人员没有分配公司,请选择其他场景!");
return;
}
$("#organization").val("");
$("#organizationId").val("");
$("#organization").ligerComboBox({data:comboBox_org_data});
var oneScene_url = rootPath + '/quota_report_collect/findFundQuotaScene.shtml?id='+value;
var one_data = CommnUtil.ajax(oneScene_url, null,"json");
$("#date_type").val(one_data.records.cycleType);
// 0日 1月 2季 3年
$("#select_time").val("");
$("#start_time").val("");
$("#end_time").val("");
if(one_data.records.cycleType==0){
$('#start_time').datetimepicker('remove');
$('#end_time').datetimepicker('remove');
$('#two_time').hide();
$('#one_time').show();
$('#start_time').val("");
$('#end_time').val("");
$('#select_time').datetimepicker({
endDate:new Date(),
startView:3,
minView:3,
maxView:3,
language: 'zh-CN',
format: 'yyyy-mm',
autoclose:true
});
}else if(one_data.records.cycleType==1){
$('#start_time').datetimepicker('remove');
$('#end_time').datetimepicker('remove');
$('#one_time').hide();
$('#two_time').show();
$('#select_time').val("");
$('#start_time').datetimepicker({
endDate:new Date(),
startView:3,
minView:3,
maxView:3,
language: 'zh-CN',
format: 'yyyy-mm',
autoclose:true
}).on('changeDate', function(ev){
var startDate = $('#start_time').val();
$("#end_time").datetimepicker('setStartDate',startDate);
});
$('#end_time').datetimepicker({
endDate:new Date(),
startView:3,
minView:3,
maxView:3,
language: 'zh-CN',
format: 'yyyy-mm',
autoclose:true
});
}
else if(one_data.records.cycleType==2){
$('#start_time').datetimepicker('remove');
$('#end_time').datetimepicker('remove');
$('#one_time').hide();
$('#two_time').show();
$('#select_time').val("");
$('#start_time').datetimepicker({
endDate:new Date(),
startView:3,
minView:3,
maxView:3,
language: 'zh-CN',
format: 'yyyy-mm',
autoclose:true
}).on('changeDate', function(ev){
var startDate = $('#start_time').val();
$("#end_time").datetimepicker('setStartDate',startDate);
});
$('#end_time').datetimepicker({
endDate:new Date(),
startView:3,
minView:3,
maxView:3,
language: 'zh-CN',
format: 'yyyy-mm',
autoclose:true
});
}
else if(one_data.records.cycleType==3){
$('#start_time').datetimepicker('remove');
$('#end_time').datetimepicker('remove');
$('#one_time').hide();
$('#two_time').show();
$('#select_time').val("");
$('#start_time').datetimepicker({
endDate:new Date(),
startView:4,
minView:4,
maxView:4,
language: 'zh-CN',
format: 'yyyy',
autoclose:true
}).on('changeDate', function(ev){
var startDate = $('#start_time').val();
$("#end_time").datetimepicker('setStartDate',startDate);
});
$('#end_time').datetimepicker({
endDate:new Date(),
startView:4,
minView:4,
maxView:4,
language: 'zh-CN',
format: 'yyyy',
autoclose:true
});
}
}
});
$("#organization").ligerComboBox({
width : 250,
selectBoxWidth: 230,
selectBoxHeight: 300,
valueField: 'id',
textField: 'orgName',
valueFieldID:'organizationId',
isShowCheckBox: true,
isMultiSelect: true
});
});
function search(){
var organization= $("#organization").val();
var organizationId= $("#organizationId").val();
var scene= $("#scene").val();
var sceneId= $("#sceneId").val();
var start_time= $("#start_time").val();
var end_time= $("#end_time").val();
var select_time= $("#select_time").val();
//将字符串中的;号全部替换成,
organizationId = organizationId.replace(new RegExp(";","g"),",");
if(select_time.length!=0){
start_time=select_time;
//一个日历控件的时候另一个给个标识
end_time="day";
}
if(scene.length==0){
layer.msg("请选择场景!");
return;
}
if(organization.length==0){
layer.msg("请选择组织!");
return;
}
if(start_time.length==0&&end_time.length==0){
layer.msg("请选择日期!");
return;
}
var date = {organizationId:organizationId,sceneId:sceneId,start_time:start_time,end_time:end_time};
search_organizationId=organizationId;
search_sceneId=sceneId;
search_start_time= start_time;
search_end_time = end_time;
var url = rootPath + '/quota_report_collect/findFundQuotaCollect.shtml?organizationId='+organizationId+"&sceneId="+sceneId+"&start_time="+start_time+"&end_time="+end_time;
var data = CommnUtil.ajax(url, null,"json");
grid.set({columns:data.colum});
grid.set({data:data,width:'auto',height:"auto",scroll: true});
//下面的滚动条问题。临时解决吧。稍后看一下ligerGrid 源码。
if(!flag){
setTimeout("setLigerGridWidth()",50);
}
}
function setLigerGridWidth(){
var height=$("#gridAreagrid").css("height").replace("px","");
$("#gridAreagrid").css("height",parseInt(height)+30);
flag=true;
}
function reset(){
$("#searchForm")[0].reset();
grid.clearParm();
grid.reloadAll();
grid.set({data:{},columns:columns});
}
function exportExcel(){
if(search_organizationId==null||search_sceneId==null||search_start_time==null||search_end_time==null){
layer.msg("请先选择导出数据范围!");
return;
}
//导出
window.location.href = rootPath + '/quota_report_collect/export.shtml?organizationId='+search_organizationId+"&sceneId="+search_sceneId+"&start_time="+search_start_time+"&end_time="+search_end_time;
}
//获取两个日期中的所有的日期
function getSectionDate(start_time,end_time){
var result = [];
var start = start_time;
var end = end_time;
var startTime = getDate(start);
var endTime = getDate(end);
while((endTime.getTime()-startTime.getTime())>=0){
var year = startTime.getFullYear();
var month = startTime.getMonth().toString().length==1?"0"+startTime.getMonth().toString():startTime.getMonth();
var day = startTime.getDate().toString().length==1?"0"+startTime.getDate():startTime.getDate();
result.push(year+"-"+month+"-"+day);
startTime.setDate(startTime.getDate()+1);
}
return result;
}
function getDate(datestr){
var temp = datestr.split("-");
var date = new Date(temp[0],temp[1],temp[2]);
return date;
}
//获得区间内所有月份
function getMonthBetween(start_time,end_time){
var result = [];
var s = start_time.split("-");
var e = end_time.split("-");
var min = new Date();
var max = new Date();
min.setFullYear(s[0],s[1]);
max.setFullYear(e[0],e[1]);
var curr = min;
while(curr <= max){
var month = curr.getMonth();
result.push(curr.getFullYear()+"-"+(month<10?("0"+month):month));
curr.setMonth(month+1);
}
return result;
}
//获得区间内所有年份
function getYearBetween(start_time,end_time){
var result = [];
var min = parseInt(start_time);
var max = parseInt(end_time);
var curr = min;
while(curr <= max){
result.push(curr);
curr+=1;
}
return result;
}
function test (){
// var date_type = $("#date_type");
// var fundQuotaReport = data.fundQuotaReport;
//获取两个日期之间的所有的年月日
//var dayOfSection = getSectionDate(start_time,end_time)
//获取两个日期之间的所有的年月
//var monthOfSection = getMonthBetween(start_time,end_time)
//获取两个日期之间的所有的年
//var yearOfSection = getYearBetween(start_time,end_time)
}<file_sep>/src/main/webapp/js/fund/crash_pay/add.js
var listGrid; //表格
var listGrid_data = []; //默认列表的数据
var mainTab = frameElement.tab;//框架的tab对象
var init_records; //初始默认申请单数据
var columns = [
{display: '单据编号', name: 'number', width: 300, show: true,},
{display: '单据日期', name: 'create_date_str', width: 300, show: true},
{display: '单据状态', name: 'state', width: 150, show: true,
editor: {type:'combobox',valueField: 'key' ,textField:'text',options:{selectBoxWidth:180}},
data:[{ key:0,text:'草稿'},{ key:1,text:'待审核'},{ key:2,text:'审核通过'},{ key:3,text:'审核不通过'},{ key:4,text:'反审核'}],
render:function(rowdata,index,data){
return {0 : '草稿', 1 : '待审核',2:'审核通过',3:'审核不通过',4:'反审核'}[data]
}
},
{display: '付款状态', name: 'payment_state', width: 150, show: true,
editor: {type:'combobox',valueField: 'key' ,textField:'text',options:{selectBoxWidth:180}},
data:[{ key:0,text:'待付款'},{ key:1,text:'已付款'}],
render:function(rowdata,index,data){
return {0 : '待付款', 1 : '已付款'}[data]
}
},
{display: '申请组织', name: 'apply_org', width: 300, show: true},
{display: '部门负责人', name: 'dept_charge_user', width: 300, show: true},
{display: '提现申请金额', name: 'apply_money', width: 300, show: true},
{display: '审批金额', name: 'approval_money', width: 300, show: true},
{display: '实付金额', name: 'fact_money', width: 300, show: true},
{display: '收款户名', name: 'receipt_user', width: 300, show: true},
{display: '收款银行类别', name: 'receipt_bank_type', width: 300, show: true},
{display: '收款行名', name: 'receipt_bank_name', width: 300, show: true},
{display: '收款帐号', name: 'receipt_account', width: 300, show: true},
{display: '收款行号', name: 'receipt_bank_no', width: 300, show: true},
{display: '制单人', name: 'create_user', width: 300, show: true},
{display: '业务类型', name: 'tran_type', width: 300, show: true,
editor: {type:'combobox',valueField: 'key' ,textField:'text',options:{selectBoxWidth:180}},
data:[{ key:1,text:'提现申请单'}],
render:function(rowdata,index,data){
return {1 : '提现申请单'}[data]
}
},
{display: '所属公司', name: 'company', width: 300, show: true},
{display: '备注', name: 'remark', width: 300, show: true},
];
$(function() {
if($("#crash_applies").val()){
listGrid_data = JSON2.parse($("#crash_applies").val());
init_records = JSON2.parse($("#crash_applies").val());
updateMainForm();
}
listGrid = $("#crash_apply").ligerGrid({
columns: columns,
sortName: 'id',
sortOrder: 'desc',
data: {
"records":listGrid_data
}
});
$("#pageloading").hide();
//新增申请单
$("#add_crash_apply").bind("click", function () {
add_crash_apply();
});
//删除申请单
$("#del_crash_apply").bind("click", function () {
del_crash_apply();
});
//提交
$("#submit").bind("click",function(){
$("#state").val(1);
if($("form").valid()){
$("form").submit();
}
})
//保存
$("#save").bind("click",function(){
$("#state").val(0);
if($("form").valid()){
$("form").submit();
}
})
//选择银行
$("#pay_bank").bind("focus",function(){
pay_bank_click();
});
$("form").validate({
errorClass:'error-msg',
ignore: null,
submitHandler : function(form) {// 必须写在验证前面,否则无法ajax提交
ly.ajaxSubmit(form, {// 验证新增是否成功
type : "post",
dataType : "json",
success : function(data) {
if (data == "success") {
listGrid_data = [];
listGrid.loadData({'records': listGrid_data});
resetForm();
/*if(mainTab.isTabItemExist("crash_pay_manage")){
mainTab.reload("crash_pay_manage");
}*/
layer.msg('添加成功');
} else {
layer.msg('添加失败!',data);
}
}
});
},
rules : {
"company_id" : {
required : true,
validateNullOrWhiteSpace: true,
},
"pay_type" : {
required : true,
validateNullOrWhiteSpace: true,
},
"pay_bank_id" : {
required : true,
validateNullOrWhiteSpace: true,
},
"apply_ids" : {
required : true,
validateNullOrWhiteSpace: true,
}
},
messages : {
"company_id" : {
required : "所属公司不能为空",
validateNullOrWhiteSpace: "所属公司不能为空",
},
"pay_type" : {
required : "付款方式不能为空",
validateNullOrWhiteSpace: "付款方式不能为空",
},
"pay_bank_id" : {
required : "付款银行不能为空",
validateNullOrWhiteSpace: "付款银行不能为空",
},
"apply_ids" : {
required : "关联单据不能为空",
validateNullOrWhiteSpace: "关联单据不能为空",
}
},
errorPlacement : function(error, element) {
var error_container = $("<div><label class='col-xs-4'></label></div>");
if(!element.is(":hidden")){
element.closest('.form-group').append(error_container);
error.addClass('col-xs-6');
error_container.append(error);
}
else{
if(element.closest('.form-group').length){
element.closest('.form-group').append(error_container);
error.addClass('col-xs-6');
error_container.append(error);
}
else{
var error_container = $("<div class='float-right'></div>");
error_container.append(error);
element.closest('.panel').find('.panel-heading').find(".clearfix").before(error_container);
//error_container.before(element.parent());
}
}
},
success : function(label) {
if(label){
label.parent().remove();
}
},
});
});
/**
* 选择银行
*/
function pay_bank_click(){
var content = $('<div></div>');
var dialogGridContainer = $('<div id="bankList" ></div>');
content.append(dialogGridContainer);
//构建已经选择的数据集合,查询时候进行过滤
var groupData = { };
var rules = [];
rules.push({"field":"state","op":"equal","value":1,"type":"int"});
rules.push({"field":"company.id","op":"equal","value":$("input[name=company_id]").val(),"type":"int"});
groupData.rules = rules;
groupData.op = "and";
//初始化弹窗中的grid
var grid = dialogGridContainer.ligerFilterGrid({
columns: [
{display: '付款账户', name: 'accountName', width: 300, show: true},
{display: '付款银行', name: 'accountOpeningBank', width: 300, show: true},
{display: '付款银行账号', name: 'accountNumber', width: 300, show: true},
],
url: rootPath + '/fund_company_bank/findByPage.shtml',
parms:{
where: JSON2.stringify(groupData)
},
sortName: 'id',
sortOrder: 'desc',
height:400,
isSingleCheck: true,
onFilter:function(column,grid){
var data_rules = [];
$.each(grid._columns,function(k,v){
if(v.filter){
var data = v.filter.getData(v.filter.group);
if (data.rules && data.rules.length){
data_rules = data_rules.concat(data.rules)
}
}
});
var searchGroup = {};
var rules = groupData.rules.concat(data_rules);
searchGroup.op = "and";
searchGroup.rules = rules;
grid.search({where: JSON2.stringify(searchGroup)});
}
});
//初始化弹窗
var dialog = $.ligerDialog.open({
title: '选择付款银行',
content: content,
width: 1000,
height: 500,
buttons:[
{
text:'确定',
cls:'btn btn-success',
onclick:function(){
if (!dialog.options.data.grid){
layer.msg('获取数据失败');
}
else{
var selected_rows = dialog.options.data.grid.getSelectedRows();
if(selected_rows.length == 0){
layer.msg("请至少选择一条数据");
return;
}
else{
var record = selected_rows[0];
$("input[name=pay_bank]").val(record.accountOpeningBank);
$("input[name=pay_bank_id]").val(record.id);
$("input[name=pay_account]").val(record.accountName);
$("input[name=pay_account_number]").val(record.accountNumber);
dialog.close();
}
}
}
},
{
text:'取消',
onclick:function(){
dialog.close();
}
}
],
data: {
'grid':grid
}
});
}
/**
* 新增行
*/
function add_crash_apply(){
var content = $('<div></div>');
var dialogGridContainer = $('<div id="dialogGrid" ></div>');
var grid_columns = $.extend([],columns);
content.append(dialogGridContainer);
//构建已经选择的数据集合,查询时候进行过滤
var exclude_ids = $.map(listGrid_data, function (item) {
return item.id;
});
exclude_ids = exclude_ids.join(',');
var groupData = { };
var rules = [];
if(exclude_ids){
rules.push({"field":"id","op":"notin","value":exclude_ids,"type":"int"});
}
rules.push({"field":"state","op":"equal","value":2,"type":"int"});
rules.push({"field":"pay_id","op":"equal","value":-1,"type":"int"});
groupData.rules = rules;
groupData.op = "and";
//初始化弹窗中的grid
var grid = dialogGridContainer.ligerGrid({
columns: grid_columns,
url: rootPath + '/fund_crash_apply/findByPage.shtml',
parms:{
where: JSON2.stringify(groupData)
},
/*onLoaded:function(g){
g.toggleLoading(false);
g.addRows(init_records);
},*/
sortName: 'id',
sortOrder: 'desc',
height:400,
});
//初始化弹窗
var dialog = $.ligerDialog.open({
title: '选择单据',
content: content,
width: 1000,
height: 500,
buttons:[
{
text:'确定',
cls:'btn btn-success',
onclick:function(){
if (!dialog.options.data.grid){
layer.msg('获取数据失败');
}
else{
var selected_rows = dialog.options.data.grid.getSelectedRows();
if(selected_rows.length == 0){
layer.msg("请至少选择一条数据");
return;
}
else{
listGrid_data = listGrid_data.concat(selected_rows);
listGrid.loadData({'records': listGrid_data});
updateMainForm();
dialog.close();
}
}
}
},
{
text:'取消',
onclick:function(){
dialog.close();
}
}
],
data: {
'grid':grid
}
});
}
/**
* 更新主表单信息
*/
function updateMainForm(){
var apply_money = 0;
var approval_money = 0;
var apply_ids = []
$.each(listGrid_data,function(index,item){
apply_money += item.apply_money;
approval_money += item.approval_money;
apply_ids.push(item.id);
});
$("#apply_money").val(apply_money.toFixed(2));
$("#approval_money").val(approval_money.toFixed(2));
$("#apply_ids").val(apply_ids.join(","));
}
/**
* 删除行
*/
function del_crash_apply(){
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待删除的项");
return;
}
$.ligerDialog.confirm('确定是否删除?', function (yes) {
if (yes == true) {
listGrid.deleteSelectedRow();
(function(){listGrid_data = listGrid.rows;updateMainForm();}).ligerDefer(listGrid, 50);
}
});
}
/**
* 重置新增表单
*/
function resetForm(){
$("#form")[0].reset();
}
<file_sep>/src/main/webapp/js/fund/quota_report/add.js
var listGrid; //表格
var listGrid_data = []; //默认列表的数据
var mainTab = frameElement.tab;//框架的tab对象
var columns = [
{display: '编码', name: 'sn', width: 300, show: true,},
{display: '名称', name: 'name', width: 300, show: true,},
{display: '指标属性', name: 'quotaTypeStr', width: 150, show: true,},
{display: '本期金额', name: 'money', width: 150, editor: { type: 'float' }},
{display: '往期累计金额', name: 'allMoney', width: 150, },
{display: '累计金额隐藏', name: 'allMoneyHide', width: 150,hide:true },
{display: '备注', name: 'remark', width: 150, }
];
$(function() {
var options = {
url: rootPath + '/common_file_upload/upload.shtml',
autoUpload: true,
};
commonImportInit("#fileupload",options);
//指标table
listGrid = $("#report_table").ligerGrid({
columns:columns,
url: rootPath + '/fund_quota_report/findQuota.shtml',
checkbox:false,
usePager:false,
enabledSort:false,
enabledEdit: true, isScroll: false,
onBeforeEdit: f_onBeforeEdit,
onBeforeSubmitEdit: f_onBeforeSubmitEdit,
onAfterEdit: f_onAfterEdit,
width: '100%',
});
//只允许编辑指标属性为填报的
function f_onBeforeEdit(e)
{
if(e.record.quotaType == 1) return false;
return true;
}
//限制必须为数字
function f_onBeforeSubmitEdit(e)
{
if(e.value === "" || e.value ==null){
return false;
}
if(!isNaN(e.value)){
return true;
}else{
alert("只允许填写数值")
return false;
}
}
//编辑后事件
function f_onAfterEdit(e)
{
var changRecod = e.record;
var parentId = changRecod.parentId;
//计算填报指标的累计金额
// if(changRecod.allMoneyHide){
// listGrid.updateCell("allMoney",parseFloat(changRecod.allMoneyHide) + changRecod.money,changRecod);
// }
var totalMoney = 0;
var parentIndex = -1;
for(var i =0; i<listGrid.data.records.length; i++){
if(listGrid.data.records[i].id == parentId){
parentIndex = i;
}
if(listGrid.data.records[i].parentId == parentId){
if(listGrid.data.records[i].money){
totalMoney += listGrid.data.records[i].money;
}
}
}
if(parentIndex> -1){
listGrid.updateCell("money",totalMoney,listGrid.data.records[parentIndex])
// 计算汇总指标的累计金额
// if(listGrid.data.records[parentIndex].allMoneyHide){
// listGrid.updateCell("allMoney",parseFloat(listGrid.data.records[parentIndex].allMoneyHide) + listGrid.data.records[parentIndex].money,listGrid.data.records[parentIndex]);
// }
}
}
$("#pageloading").hide();
//保存
$("#save").bind("click",function(){
submitTable();
});
function submitTable() {
var reportDate = $("#datetimepicker").val();
var sceneName = $("#txtContactName").val();
var orgId = $("#orgId").val();
var records = listGrid.data.records;
var file_able = $("#file_able").val();
var files = $("#fileValues").val();
if(file_able == "1"){
if(!files){
$.ligerDialog.warn('请上传填报附件')
return;
}
}
if(!reportDate){
$.ligerDialog.warn('请选择填报日期')
return;
}
if(!sceneName){
$.ligerDialog.warn('请选择填报场景')
return;
}
if(!orgId){
$.ligerDialog.warn('请选择填报所属组织')
return;
}
for(var i = 0;i<records.length;i++){
if(!records[i].money){
$.ligerDialog.warn('指标未填写完');
return;
}
}
$.ajax({
url:"/fund_quota_report/addEntity.shtml ",
type:"post",
data: {
reportDate: reportDate,
sceneName: sceneName,
orgId: orgId,
records: JSON2.stringify(records),
files: files
},
success:function (data) {
$.ligerDialog.success('添加填报成功');
// setTimeout("location.reload();",3000)
listGrid.reload();
}
})
}
//填报场景表单 ===begin
function getGridOptions(checkbox) {
var options = {
columns: [
{ display: '填报场景', name: 'name', align: 'left', width: 100, minWidth: 60 },
{ display: '周期', name: 'typeName', minWidth: 140, width: 100 },
], switchPageSizeApplyComboBox: false,
url : rootPath + '/fund_quota_report/findSceneByPage.shtml',
pageSize: 10,
checkbox: checkbox
};
return options;
}
var condition = { fields: [{ name: 'name', label: '填报场景',width:90,type:'text' }] };
$("#txtContactName").ligerComboBox({
slide: false,
selectBoxWidth: 400,
selectBoxHeight: 240,
valueField: 'name',
textField: 'name',
grid: getGridOptions(false),
condition: condition,
conditionSearchClick: function (e)
{
var groupData = {};
groupData.rules = e.rules;
e.grid.set('parms', { "where": JSON2.stringify(groupData) });
e.grid.reload();
},
onSelected: function (value,name) {
var selectData = this.getSelected();
var sceneName = "";
if(!selectData){
$("#company_name").val("");
$("#cycle_type").val("");
$("#unitName").val("");
}else {
//判断是否显示上传附件
if(selectData.uploadable){
$("#fileDiv").show();
$("#file_able").val("1");
}else{
$("#fileDiv").hide();
$("#file_able").val("");
}
// $("#company_name").val(selectData.lyOrganization.orgName);
//调用ajax查询场景人员所能选择的组织
$.ajax({
type:"post",
url:rootPath + '/fund_quota_report/findOrgBySceneUser.shtml',
data:{sceneName:name},
dataType:"json",
success:function (data) {
combobox.setData(data);
}
})
$("#cycle_type").val(selectData.typeName);
$("#unitName").val(selectData.unitName);
if(name){
sceneName = name;
}
}
listGrid.search({"sceneName": sceneName});
}
});
//填报场景表单 ===end
//组织下拉选择comboBox ====begin
var combobox = $("#company_name").ligerComboBox({
width : '100%',
selectBoxHeight: 300,
valueField: 'id',
textField: 'orgName',
valueFieldID:'orgId',
textFieldID : 'orgName',
resize:false,
onSelected: function (value,text) {
getAllMoney();
}
});
//组织下拉选择comboBox ====end
//日期选择框
$('#datetimepicker').datetimepicker({
language: 'zh-CN',//显示中文
format: 'yyyy-mm-dd',
minView:"24",
}).on('changeDate',function (ev) {
getAllMoney();
});
});
function getAllMoney() {
var reportDate = $("#datetimepicker").val();
var sceneName = $("#txtContactName").val();
var orgId = $("#orgId").val();
var records = listGrid.data.records;
if(!reportDate || !sceneName || !orgId ){
return;
}
$.ajax({
type:"post",
data:{
date:reportDate,
sceneName:sceneName,
orgId:orgId,
},
dataType:"json",
url:rootPath + '/fund_quota_report/getQuotaSumMoney.shtml',
success:function (data) {
if(data.length > 0){
for(var i = 0; i<data.length;i++){
for(var k = 0; k<records.length; k++){
if(data[i].sn == records[k].sn){
listGrid.updateCell("allMoney",data[i].money,records[k])
}
}
}
}else{
for(var k = 0; k<records.length; k++) {
listGrid.updateCell("allMoney", "", records[k])
}
}
}
})
}
/**
* 重置新增表单
*/
function resetForm(){
$("#form")[0].reset();
}
<file_sep>/src/main/java/cn/zllog/beanEntity/TableInfo.java
package cn.zllog.beanEntity;
import com.google.common.collect.Lists;
import java.util.List;
public class TableInfo {
private String name;
private String display;
private List<TableColumn> columns= Lists.newArrayList();
public String getName() {
return this.name;
}
public void setName(String name) {
this.name = name;
}
public List<TableColumn> getColumns() {
return this.columns;
}
public void setColumns(List<TableColumn> columns) {
this.columns = columns;
}
}
<file_sep>/src/main/webapp/js/fund/quota_scene/add.js
//获取此窗口对象以及父节点表格
var thisDialog = frameElement.dialog; //当前窗口
var targetGrid = thisDialog.get('data').grid; //用户列表
var form_validator;
$(function() {
$("#bill_date").datetimepicker({
language: 'zh-CN',//显示中文
format: 'yyyy-mm-dd ',//显示格式
startView: 'month',
minView: "month",//设置只显示到月份
autoclose: true,//选中自动关闭
todayBtn: true,//显示今日按钮
});
$("#bill_time").datetimepicker({
language: 'zh-CN',//显示中文
format: 'HH:mm:ss',//显示格式
maxView: "day",//设置只显示到月份
startView: 'day',
minView: "hour",//设置只显示到月份
autoclose: true,//选中自动关闭,
});
$("button[type='reset']").click(function(event){
thisDialog.close();
});
//附件选择
$("#uploadable").click(function(event){
var bn=$(this).is(':checked');
if(bn){
$(this).val(1);
}else{
$(this).val(0);
}
});
//组织选择
var url = rootPath + '/organization/orgTrees.shtml?orgTypes=0,1&parentId='+parentId;
var data = CommnUtil.ajax(url, null,"json");
var orgTreeDate;
if (data.result>0) {
orgTreeDate = data.list;
} else {
layer.msg("获取组织信息错误,请联系管理员!");
}
$("#orgName").ligerComboBox({
width : 210,
selectBoxWidth: 200,
selectBoxHeight: 300,
// valueField: 'orgId',
textField: 'orgName',
valueFieldID:'orgId',
isMultiSelect:true,
treeLeafOnly: false,
tree: {
data :orgTreeDate,
dataParmName: 'list',
ajaxType: 'get',
idFieldName: 'id',
parentIDFieldName: 'parentOrgId',
textFieldName: 'orgName',
isExpand: 2,
checkbox: true,
autoCheckboxEven: false
},
onBeforeSelect: function (newvalue)
{
// alert('要选择的是' + newvalue);
// return confirm('onBeforeSelect事件可以阻止选择,是否继续');
},
onSelected: function (newvalue)
{
// alert('选择的是' + newvalue);
}
});
//关联指标选择
var url = rootPath + '/fund_quota_scene/quotaTrees.shtml';
var data = CommnUtil.ajax(url, null,"json");
var quatoTreeDate;
if (data.result>0) {
quatoTreeDate = data.list;
} else {
layer.msg("获取指标信息错误,请联系管理员!");
}
$("#quotaIds").ligerComboBox({
width : 210,
selectBoxWidth: 200,
selectBoxHeight: 300,
// valueField: 'quotaId',
textField: 'name',
valueFieldID:'quotaId',
treeLeafOnly: false,
tree: {
data : quatoTreeDate,
dataParmName: 'list',
ajaxType: 'get',
idFieldName: 'id',
parentIDFieldName: 'parentId',
textFieldName: 'name',
isExpand: 2,
checkbox: true,
autoCheckboxEven: false
}
});
form_validator = $("#form").validate({
errorClass:'error-msg',
submitHandler : function(form) {// 必须写在验证前面,否则无法ajax提交
ly.ajaxSubmit(form, {// 验证新增是否成功
type : "post",
dataType : "json",
success : function(data) {
if (data == "success") {
resetForm();
targetGrid.reloadAll();
$.ligerDialog.confirm('添加成功!是否关闭窗口?', function(yes) {
if(yes == true){
thisDialog.close();
}
});
} else {
$.ligerDialog.error('添加失败!' + data);
}
}
});
},
rules : {
"name" : {
required : true,
maxlength:20,
remote:"/fund_quota_scene/nameNotExist.shtml",
validateNullOrWhiteSpace: true,
},
"cycleType" : {
required : true,
validateNullOrWhiteSpace: true,
},
"unit" : {
required : true,
validateNullOrWhiteSpace: true,
},
"quotaIds" : {
required : true,
validateNullOrWhiteSpace: true,
},
"orgName" : {
required : true,
validateNullOrWhiteSpace: true,
},
"remark" : {
required : true,
maxlength:200,
validateNullOrWhiteSpace: true,
}
},
messages : {
"name" : {
required : "场景名称不能为空",
maxlength:"场景名称最长为20",
remote:"场景名称重复",
validateNullOrWhiteSpace: "场景名称不能为空",
},
"cycleType" : {
required : "场景周期不能为空",
validateNullOrWhiteSpace: "场景周期不能为空",
},
"unit" : {
required : "单位不能为空",
validateNullOrWhiteSpace: "单位不能为空",
},
"quotaIds" : {
required : "关联指标不能为空",
validateNullOrWhiteSpace: "关联指标不能为空",
},
"orgName" : {
required : "组织不能为空",
validateNullOrWhiteSpace: "组织不能为空",
},
"remark" : {
required : "备注不能为空",
maxlength:"备注长度最大为200",
validateNullOrWhiteSpace: "备注不能为空",
}
},
errorPlacement : function(error, element) {
var error_container = $("<div><label class='col-xs-4'></label></div>");
error.addClass('col-xs-6');
error_container.append(error);
element.closest('.form-group').append(error_container);
},
success : function(label) {
label.parent().remove();
},
});
});
/**
* 重置新增表单
*/
function resetForm(){
$("#form")[0].reset();
$("#orgId").val('');
$("#quotaId").val('');
}<file_sep>/src/main/webapp/js/fund/enum/edit.js
//获取此窗口对象以及父节点表格
var thisDialog = frameElement.dialog; //当前窗口
var targetGrid = thisDialog.get('data').grid; //用户列表
$(function() {
var org_url = rootPath + '/fund_enum/findLyOrganizationList.shtml';
var comboBox_org= CommnUtil.ajax(org_url, null,"json");
if (comboBox_org.records.length>0) {
comboBox_org_data = comboBox_org.records;
} else {
layer.msg("没有适合条件的公司!");
return;
}
var manager = $("#companyName").ligerExpandComboBox({
width : 250,
selectBoxWidth: 230,
selectBoxHeight: 300,
valueField: 'id',
textField: 'orgName',
valueFieldID:'companyId',
isShowCheckBox: true,
isMultiSelect: true,
data:comboBox_org_data,
split:','
});
manager.setText($("#companyNamehidden").val());
manager.setValue($("#companyIdhidden").val());
$("#companyId").val($("#companyIdhidden").val());
//开户日期
$("#accountOpeningDate").datetimepicker({
language: 'zh-CN',//显示中文
format: 'yyyy-mm-dd ',//显示格式
startView: 'month',
minView: "month",//设置只显示到月份
autoclose: true,//选中自动关闭
todayBtn: true,//显示今日按钮
});
$("button[type='reset']").click(function(event){
thisDialog.close();
});
$("form").validate({
errorClass:'error-msg',
submitHandler : function(form) {// 必须写在验证前面,否则无法ajax提交
ly.ajaxSubmit(form, {// 验证新增是否成功
type : "post",
dataType : "json",
success : function(data) {
if (data == "success") {
//刷新列表
targetGrid.reloadAll();
//确认是否关闭
$.ligerDialog.confirm('保存成功!是否关闭窗口?', function(yes) {
if(yes == true){
thisDialog.close();
}
//刷新弹窗
self.location.reload();
});
} else {
$.ligerDialog.error('操作失败! ' + data);
}
}
});
},
rules : {
"companyName" : {
required : true,
validateNullOrWhiteSpace: true,
},
"methodName" : {
required : true,
validateNullOrWhiteSpace: true,
}
},
messages : {
"companyName" : {
required : "请选择适用公司",
validateNullOrWhiteSpace: "适用公司不能为空白字符",
},
"methodName" : {
required : "请输入类型名称",
validateNullOrWhiteSpace: "类型名称不能为空白字符",
}
},
errorPlacement : function(error, element) {
var error_container = $("<div><label class='col-xs-4'></label></div>");
error.addClass('col-xs-8');
error_container.append(error);
element.closest('.form-group').append(error_container);
},
success : function(label) {
label.parent().remove();
},
});
});
<file_sep>/src/main/webapp/js/system/mdm_interface_info/list.js
//对象
var mdmInterfaceInfoGrid;
$(function() {
mdmInterfaceInfoGrid = $("#gridArea").ligerGrid({
columns: [
{display: 'id', name: 'id', width: 100 } ,
{ display: '业务名', name: 'serviceName', width: 200 },
{ display: '方法名', name: 'methodName', width: 200,},
{ display: '关键字', name: 'name', minWidth: 200, },
{ display: '状态', name: 'status', width: 100,
render: function(rowdata,index,data){
return data == 1 ? '禁用' : '启用';
}
}
],
url: rootPath + '/mdm_interface_info/findByPage.shtml',
/* sortName: 'createTime',
sortOrder: 'desc',*/
});
/* //搜索表单应用ligerui样式
$("#formsearch").ligerForm({
fields:[
{display:"账号",name:"accountName",newline:false,type:"text",cssClass:"field"},
{display:"用户名",name:"userName",newline:false,type:"text",cssClass:"field"},
{display:"账号状态",name:"locked",newline:false,type:"text",cssClass:"field"},
{display:"描述",name:"description",newline:true,type:"text",cssClass:"field"},
],
toJSON: JSON2.stringify
});*/
$("#pageloading").hide();
$("#enable").click("click", function() {
enable();
});
$("#disable").click("click", function() {
disable();
});
});
function enable() {
var selected = mdmInterfaceInfoGrid.getSelectedRows();
if(selected.length == 0){
layer.msg("请选中待启用的项");
return;
}
var ids = $.map(selected,function(item){return item.id;});
$.ligerDialog.confirm('确定是否启用?', function(yes) {
if(yes == true) {
var url = rootPath + '/mdm_interface_info/changeStatus.shtml?ids=' + ids.join(',')+'&action=enable';
var result = CommnUtil.ajax(url, {}, "json");
if (result == "success") {
mdmInterfaceInfoGrid.reloadAll();
layer.msg('启用成功');
} else {
layer.msg('启用失败');
}
}
});
}
function disable() {
var selected = mdmInterfaceInfoGrid.getSelectedRows();
if(selected.length == 0){
layer.msg("请选中待禁用的项");
return;
}
var ids = $.map(selected,function(item){return item.id;});
$.ligerDialog.confirm('确定是否禁用?', function(yes) {
if(yes == true) {
var url = rootPath + '/mdm_interface_info/changeStatus.shtml?ids=' + ids.join(',')+'&action=disable';
var result = CommnUtil.ajax(url, {}, "json");
if (result == "success") {
mdmInterfaceInfoGrid.reloadAll();
layer.msg('禁用成功');
} else {
layer.msg('禁用失败');
}
}
});
}
<file_sep>/src/main/webapp/js/fund/approval_flow_config/add.js
//获取此窗口对象以及父节点表格
var thisDialog = frameElement.dialog; //当前窗口
var targetGrid = thisDialog.get('data').grid; //用户列表
function getApprovalNode(level) {
var approval_node="<div style=\"margin-left: 50px; color: #00a6ce; font-size: larger; font-weight: bolder\">审批节点"+level+"</div>\n" +
" <div class=\"row\" style=\"background: #adbece\">\n" +
"\n" +
" <div class=\"form-group col-xs-3\">\n" +
" <label class=\"control-label\" style=\"color: chocolate\">节点名称</label>\n" +
" <input class=\"form-control\" type=\"text\" name=\"node.nodeName-"+level+"\" name=\"fixedChar\" placeholder=\"请输入固定字符\" />\n" +
" </div>\n" +
" <div class=\"form-group col-xs-3\">\n" +
" <label class=\"control-label\" style=\"color: chocolate\">审批方式</label>\n" +
" <select class=\"form-control\" name=\"node.approvalType-"+level+"\" id=\"node.approvalType-"+level+"\">\n" +
" <option value=\"-1\">=请选择=</option>\n" +
" <option value=\"1\">角色</option>\n" +
" <option value=\"2\">人员</option>\n" +
" </select>\n" +
" </div>\n" +
" <div class=\"form-group col-xs-3\">\n" +
" <label class=\"control-label\" style=\"color: chocolate\">角色或账户</label>\n" +
" <input class=\"form-control\" type=\"text\" id=\"check_combo-"+level+"\"/>\n" +
" </div>\n" +
" </div>\n";
return approval_node;
}
/**
* 通过选择的是角色还是人员渲染下拉多选框
* @param domObj
*/
function renderRoleOrPerson(domObj) {
var idsp = $(domObj).attr("id").split('-');
if($(domObj).val() == 1){
var roleData = [];
ly.ajax({
async : false, //请勿改成异步,下面有些程序依赖此请求数据
type : "POST",
data : "",
url : rootPath + '/role/getAll.shtml',
dataType : 'json',
success : function(json) {
for(var i in json) {
roleData.push({id:json[i].id, text: json[i].name});
}
}
});
$("#check_combo-"+idsp[1]).removeClass('form-control');
$("#check_combo-"+idsp[1]).ligerComboBox({ isShowCheckBox: true, isMultiSelect: true,
data: roleData, valueFieldID: 'fundApprovalFlowConfigNodesList-'+idsp[1]
});
}else if($(domObj).val() == 2) {
var roleData = [];
ly.ajax({
async : false, //请勿改成异步,下面有些程序依赖此请求数据
type : "POST",
data : "",
url : rootPath + '/person/findByList.shtml',
dataType : 'json',
success : function(json) {
// console.log('xxxx:',json)
for(var i in json) {
roleData.push({id:json[i].id, text: json[i].name});
}
}
});
$("#check_combo-"+idsp[1]).removeClass('form-control');
$("#check_combo-"+idsp[1]).ligerComboBox({ isShowCheckBox: true, isMultiSelect: true,
data: roleData, valueFieldID: 'fundApprovalFlowConfigNodesList-'+idsp[1]
});
}
}
function render_approvaltype(approvalLevel) {
var result = "";
for(var i=1;i<=approvalLevel;i++){
result+=getApprovalNode(i);
}
$('#approval_nodes').html(result);
// 每次动态改变节点时,重新处理事件
$("select[name^='node.approvalType']").each(function () {
$(this).change(function () {
renderRoleOrPerson(this);
});
});
}
$(function() {
$("button[type='reset']").click(function(event){
thisDialog.close();
});
$("#approvalLevel").change(function () {
render_approvaltype($(this).val());
});
// 页面初加载的时候 处理事件,页面初始加载的时候默认选中 1 个节点
render_approvaltype(1);
$("form").validate({
ignore: [], //对隐藏域也可以验证
errorClass:'error-msg',
submitHandler : function(form) {// 必须写在验证前面,否则无法ajax提交
ly.ajaxSubmit(form, {// 验证新增是否成功
type : "post",
dataType : "json",
success : function(data) {
if (data == "success") {
resetForm();
targetGrid.reloadAll();
$.ligerDialog.confirm('添加成功!是否关闭窗口?', function(yes) {
if(yes == true){
thisDialog.close();
}
});
} else {
$.ligerDialog.error('添加失败!' + data);
}
}
});
},
rules : {
},
messages : {
},
errorPlacement : function(error, element) {
//error.addClass('col-sm-4');
element.closest('.form-group').append(error);
},
success : function(label) {
label.remove();
},
});
});
/**
* 重置新增表单
*/
function resetForm(){
$("#form")[0].reset();
}<file_sep>/src/main/webapp/js/fund/cost_ticket_adjust/edit.js
//获取此窗口对象以及父节点表格
var thisDialog = frameElement.dialog; //当前窗口
var targetGrid = thisDialog.get('data').grid; //用户列表
$(function() {
$("#cost_ticket_number").click(clickCostTicket);
//初始化把select赋值为空
$("#operation_org_id").bind('change',function(event){
$("#operation_org").val($(this).find("option:selected").text());
var org_id = event.target.value;
// var url = rootPath + '/fund_cost_ticket/find_org_bank.shtml?org_id=' + org_id;
var url = rootPath + org_id;
var result = CommnUtil.ajax(url, {}, "json");
if (result.data) {
var data = result.data;
$("input[name=number]").val(data.number);
$("input[name=operator_name]").val(data.operator_name);
}
else{
$("input[name=number]").val('');
$("input[name=operator_name]").val('');
}
});
$("button[type='reset']").click(function(event){
thisDialog.close();
});
$("form").validate({
errorClass:'error-msg',
submitHandler : function(form) {// 必须写在验证前面,否则无法ajax提交
ly.ajaxSubmit(form, {// 验证新增是否成功
type : "post",
dataType : "json",
success : function(data) {
if (data == "success") {
//刷新列表
targetGrid.reloadAll();
//确认是否关闭
$.ligerDialog.confirm('保存成功!是否关闭窗口?', function(yes) {
if(yes == true){
thisDialog.close();
}
//刷新弹窗
self.location.reload();
});
} else {
$.ligerDialog.error('操作失败! ' + data);
}
}
});
},
rules : {
"operation_org_id" : {
required : true,
validateNullOrWhiteSpace: true,
},
"number" : {
required : true,
validateNullOrWhiteSpace: true,
},
},
messages : {
"operation_org_id" : {
required : "申请部门不能为空",
validateNullOrWhiteSpace: "申请部门不能为空",
},
"number" : {
required : "单据编号不能为空",
validateNullOrWhiteSpace: "单据编号不能为空",
},
},
errorPlacement : function(error, element) {
var error_container = $("<div><label class='col-xs-4'></label></div>");
error.addClass('col-xs-6');
error_container.append(error);
element.closest('.form-group').append(error_container);
},
success : function(label) {
label.remove();
},
});
});
/**
* 点击选择费用票
*/
function clickCostTicket(){
$.ligerDialog.open({
title: '选择费用票明细',
url: rootPath + '/fund_cost_ticket_adjust/extract.shtml',
width: 800,
height: 760
});
}
function borrowDocument(){
$.fn.ligerDrag = null;
$.ligerDialog.open({
title: '费用票明细',
url: rootPath + "/fund_cost_ticket_adjust/extract.shtml",
modal: true,
isResize: false,
width: 750,
height: 580,
opener: thisDialog
});
}
<file_sep>/src/main/webapp/js/fund/repay_initial/extract.js
var listGrid, //列表对象
custom_rules = {}, //自定义查询条件
user_id, //用户id
borrowType=1, //默认线下借款
clazz = 'FundBorrowMoney', //默认类
default_params = {}; //默认查询条件
var columns = [
{display: '单据编号', name: 'number', width: 300, show: true,
render: function(rowdata,index,data){
return data;
}
},
{display: '单据日期', name: 'create_date', width: 300, show: true, export_name:'create_date_str',
render: function(rowdata,index,data){
return new Date(data).format("yyyy-MM-dd");
}
},
{display: '单据状态', name: 'document_state', width: 300, show: true, export_name:'documents_state_str',
render: function(rowdata,index,data){
return rowdata.documents_state_str;
}
},
{display: '所属公司', name: 'company', width: 300, show: true},
{display: '申请业务部门', name: 'dept', width: 300, show: true},
{display: '申请借款日期', name: 'borrow_date', width: 300, show: true,export_name:'borrow_date_str',
render: function(rowdata,index,data){
if(data==null){
return "";
}
return new Date(data).format("yyyy-MM-dd");
}
},
{display: '预计还款日期', name: 'refund_date', width: 300, show: true,export_name:'refund_date_str',
render: function(rowdata,index,data){
if(data==null){
return "";
}
return new Date(data).format("yyyy-MM-dd");
}
},
{display: '借款天数', name: 'borrow_day', width: 300, show: true},
{display: '申请借款金额', name: 'borrow_money', width: 300, show: true},
{display: '一审审批金额', name: 'first_examine_money', width: 300, show: true},
{display: '终审审批金额', name: 'last_examine_money', width: 300, show: true},
{display: '终审审批时间', name: 'last_examine_date', width: 300, show: true,export_name:'last_examine_date_str',
render: function(rowdata,index,data){
if(data==null){
return "";
}
return new Date(data).format("yyyy-MM-dd");
}
},
{display: '剩余未还款金额', name: 'residue_money', width: 150, show: true},
{display: '到期日未还款金额', name: 'expire_residue_money', width: 150, show: true},
{display: '实际划款金额', name: 'make_money', width: 150, show: true},
{display: '借款原因', name: 'borrow_cause', width: 300, show: true},
{display: '借款利率', name: 'borrow_interest', width: 300, show: true},
{display: '申请人', name: 'apply_user', width: 300, show: true},
{display: '制单人', name: 'create_user', width: 300, show: true},
{display: '凭证状态', name: 'voucher_state', width: 150, show: true, export_name:'voucher_state_str',
editor: {type:'combobox',valueField: 'key' ,textField:'text',options:{selectBoxWidth:180}},
data:[{ key:0,text:'未生成'},{ key:1,text:'已生成'},{ key:2,text:'已冲销'}],
render:function(rowdata,index,data){
return {0 : '未生成', 1 : '已生成', 2:'已冲销'}[data]
}
},
{display: '传输状态', name: 'transfer_state', width: 150, show: true, export_name:'transfer_state_str',
editor: {type:'combobox',valueField: 'key' ,textField:'text',options:{selectBoxWidth:180}},
data:[{ key:0,text:'未传输'},{ key:1,text:'已传输'}],
render:function(rowdata,index,data){
return {0 : '未传输', 1 : '已传输'}[data]
}
},
{display: '备注', name: 'comment', width: 300, show: true}
];
var mainTab = frameElement.tab;//框架的tab对象
var tab_id = frameElement.id;
var mainData = frameElement.openerData; //框架的mainData对象
var thisDialog = frameElement.dialog; //当前窗口
$(function () {
$("#log").on("click","#search",clickSeach)
.on("click","#clear",clickClear)
.on("click","#reset",clickReset)
.on("click","#submit",clickSubmit);
borrowInit();
});
/**
* 回显数据
*/
function showData(data){
$("#refund_money",window.parent.document).val(data.residue_money);
$("#borrow_number",window.parent.document).val(data.number);
$("#borrow_type",window.parent.document).val(borrowType);
$("#borrow_id",window.parent.document).val(data.id);
$("#company",window.parent.document).val(data.company);
$("#company_id",window.parent.document).val(data.company_id);
$("#dept",window.parent.document).val(data.dept);
$("#dept_id",window.parent.document).val(data.dept_id);
$("#borrow_date",window.parent.document).val(data.borrow_date);
$("#expect_refund_date",window.parent.document).val(data.refund_date);
$("#borrow_day",window.parent.document).val(data.borrow_day);
$("#borrow_money",window.parent.document).val(data.borrow_money);
$("#last_examine_money",window.parent.document).val(data.last_examine_money);
$("#make_money",window.parent.document).val(data.make_money);
$("#residue_money",window.parent.document).val(data.residue_money);
$("#expire_residue_money",window.parent.document).val(data.expire_residue_money);
}
/**
* 确定借款单
*/
function clickSubmit(){
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请先选择借款单");
return;
}
showData(selected[0]);
thisDialog.close();
}
/**
* 关闭弹窗
*/
function clickReset(){
thisDialog.close();
}
/**
* 点击重置
*/
function clickClear(){
$("#number").val('');
$("#money").val('');
}
/**
* 点击查询
*/
function clickSeach(){
var rules = [];
var number = $("#number").val();
var money = $("#money").val();
if(number){
rules.push({
field: 'number', op: 'like', value: number, type:'string'
})
}
if(money){
rules.push({
field: 'last_examine_money', op: 'equal', value: money, type:'float'
})
}
custom_rules.rules = rules;
searchData();
}
/**
* 查询数据(将页面所有查询条件汇总)
*/
function searchData(){
var hash ={};
var rules = [];
var groupData = {};
$.each(custom_rules.rules,function(k,v){
if(!hash[v.field+"_"+v.op+"_"+v.value]){
hash[v.field+"_"+v.op+"_"+v.value] = true
rules.push(v);
}
});
groupData.rules = rules;
groupData.op = "and";
listGrid.search({where: JSON2.stringify(groupData)});
}
/**
* 初始化数据
*/
function borrowInit(){
listGrid = $("#log_grid").ligerFilterGrid({
columns: columns,
url: rootPath + '/fund_repay_initial/findBorrow.shtml?borrowType='+borrowType,
parms: default_params,
height: 370,
sortName: 'id',
sortOrder: 'desc',
isSingleCheck: true
});
}
<file_sep>/src/main/resources/sqlFiles/1.0.17-liubb-init-voucher-business-order-data.sql
INSERT INTO fund_voucher_business_order (name,sort) VALUES ('提现付款单',1);
INSERT INTO fund_voucher_business_order (name,sort) VALUES ('借款付款单',2);
INSERT INTO fund_voucher_business_order (name,sort) VALUES ('回款单',3);
INSERT INTO fund_voucher_business_order (name,sort) VALUES ('收款单',4);
INSERT INTO fund_voucher_business_order (name,sort) VALUES ('借款单线上',5);
INSERT INTO fund_voucher_business_order (name,sort) VALUES ('还款单',6);
INSERT INTO fund_voucher_business_order (name,sort) VALUES ('仓储费扣费单',7);
INSERT INTO fund_voucher_business_order (name,sort) VALUES ('三方冷链扣费单',8);
INSERT INTO fund_voucher_business_order (name,sort) VALUES ('水电空调扣费单',9);
INSERT INTO fund_voucher_business_order (name,sort) VALUES ('送货服务扣费单',10);
INSERT INTO fund_voucher_business_order (name,sort) VALUES ('房租费扣费单',11);
INSERT INTO fund_voucher_business_order (name,sort) VALUES ('物业费扣费单',12);
INSERT INTO fund_voucher_business_order (name,sort) VALUES ('其它扣费单',13);
INSERT INTO fund_voucher_business_order (name,sort) VALUES ('费用票扣费单',14);
INSERT INTO fund_voucher_business_order (name,sort) VALUES ('转款单',15);
INSERT INTO fund_voucher_business_order (name,sort) VALUES ('余额调整单',16);
<file_sep>/src/main/webapp/js/system/mdm_data_dictionary/list.js
//列表对象
var listGrid;
$(function () {
listGrid = $("#gridArea").ligerGrid({
columns: [
{
display: '类别', name: 'type', width: 150, show: true,
render: function (item)
{
if (item.type == 'supplier') return '供应商';
else if (item.type == 'medicine') return '药品';
return '';
}
},
{
display: '字典显示名称', name: 'name', width: 200, show: true,
render: function (item)
{
return item.name.length <16?item.name:'<span title="'+item.name+'">'+item.name.substring(0,15)+'..</span>';
}
},
{
display: '字典值', name: 'value', width: 200, show: true
},
{display: '状态', name: 'status', width: 150, show: true}
],
url: rootPath + '/mdm_data_dictionary/findByPage.shtml',
sortName: 'id',
sortOrder: 'desc',
});
$("#pageloading").hide();
$("#searchButton").click("click", function (event) {// 绑定查询按扭
event.preventDefault();
search();
});
$("#addDict").click("click", function () {
add();
});
$("#editDict").click("click", function () {
edit();
});
$("#delButton").click("click", function () {
del();
});
$("#importButton").click("click", function () {
importList();
});
$("#exportButton").click("click", function () {
exportList();
});
});
function search() {
var searchParams = $("#searchForm").serializeJson();// 初始化传参数
listGrid.search(searchParams);
}
function reset() {
$("#searchForm")[0].reset();
listGrid.filter.reset();
listGrid.clearParm();
listGrid.reloadAll();
}
function add() {
$.ligerDialog.open({
title: '新增',
url: rootPath + '/mdm_data_dictionary/addUI.shtml',
width: 800,
height: 340,
data: {
grid: listGrid //将列表对象传给弹窗
}
});
}
function edit() {
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待编辑的项");
return;
}
if (selected.length > 1) {
layer.msg("只能选中一项进行编辑");
return;
}
$.ligerDialog.open({
title: '编辑',
url: rootPath + '/mdm_data_dictionary/editUI.shtml?id=' + selected[0].id,
width: 800,
height: 340,
data: {
grid: listGrid //将用户列表传给弹窗
}
});
}
function del() {
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待删除的项");
return;
}
var ids = $.map(selected, function (item) {
return item.id;
});
ids = ids.join(',');
$.ligerDialog.confirm('确定是否删除?', function (yes) {
if (yes == true) {
var url = rootPath + '/mdm_data_dictionary/deleteEntity.shtml?ids=' + ids;
var result = CommnUtil.ajax(url, {}, "json");
if (result == "success") {
listGrid.reloadAll();
layer.msg('删除成功');
} else {
layer.msg('删除失败');
}
}
});
}
//导入
function importList() {
$.ligerDialog.open({
width: 600,
height: 400,
title: '导入',
url: rootPath + '/mdm_data_dictionary/importUI.shtml',
data: {
grid: listGrid //将列表传给弹窗
}
});
}
//导出
function exportList() {
listGrid.exportData('/mdm_data_dictionary/export.shtml')
}<file_sep>/src/main/webapp/js/system/organization/edit.js
//获取此窗口对象以及父节点表格
var thisDialog = frameElement.dialog; //当前窗口
var targetGrid = thisDialog.get('data').grid; //用户列表
$(function() {
$('input[name=shortcut]').bind('keyup',function(){
$(this).val( $(this).val().toUpperCase());
});
var org_id = $("#id").val();
var default_prams = {'rules':[],'op':'and'};
default_prams.rules.push({
field: 'status', op: 'equal', value: 1, type:'int'
});
default_prams.rules.push({
field: 'organizationId', op: 'equal', value: org_id, type:'int'
})
var combobox = $("#inChargePerson").ligerComboBox({
width : '100%',
selectBoxHeight: 300,
valueField: 'id',
textField: 'fullOrganization',
valueFieldID:'inChargePersonId',
textFieldID : 'inChargePerson',
url: rootPath+'/person/findByList.shtml',
autocomplete:true,
autocompleteAllowEmpty:true,
resize:false,
initValue: $("#inChargePersonId").val(),
parms:{
'where':JSON2.stringify(default_prams)
},
ajaxBeforeSend:function(a,b){
if(combobox){
var parms = combobox.get('parms');
var value = parms['key'];
var groupData = {};
var rules = [];
if(value){
rules.push({
field: 'fullOrganization', op: 'like', value: value, type:'string'
})
}
rules.push({
field: 'status', op: 'equal', value: 1, type:'int'
})
rules.push({
field: 'organizationId', op: 'equal', value: org_id, type:'int'
})
groupData.rules = rules;
groupData.op = "and";
b.data = 'where='+JSON2.stringify(groupData)
delete parms['key'];
}
},
ajaxComplete:function(){
if(combobox){
if(combobox.options.initValue){
console.log("here");
var value = combobox.options.initValue;
var text = combobox.findTextByValue(value);
combobox.options.initValue = null;
combobox._changeValue(value,text,false);
}
}
},
onClear : function(){
combobox.reload();
}
});
// 异步加载所有菜单列表
$("form").validate({
submitHandler : function(form) {// 必须写在验证前面,否则无法ajax提交
ly.ajaxSubmit(form,{//验证新增是否成功
type : "post",
dataType : "json",
success : function(data) {
if (data == "success") {
targetGrid.reloadAll();
$.ligerDialog.confirm('更新成功!是否关闭窗口?', function(yes) {
if(yes == true){
thisDialog.close();
}
});
$("#form")[0].reset();
} else {
layer.alert('更新失败!', 3);
}
}
});
},
rules : {
"shortcut" : {
remote : { // 异步验证是否存在
type : "POST",
url : rootPath + '/organization/isExist.shtml',
data : {
'shortcut' : function() {return $("input[name=shortcut]").val();},
'id' : $("#id").val()
}
},
maxlength : 6,
english : true
},
},
messages : {
"shortcut" : {
remote : "组织简称已存在",
maxlength : "最大长度6位",
english : "输入内容必须全部英文"
},
},
errorClass:'error-msg',
errorPlacement : function(error, element) {// 自定义提示错误位置
var error_container = $("<div><label class='col-xs-4'></label></div>");
error.addClass('col-xs-8');
error_container.append(error);
element.closest('.form-group').append(error_container);
},
success : function(label) {// 验证通过后
label.parent().remove();
}
});
});
/*function but(v){
if(v.value==2){
showBut();
}else{
$("#divbut").css("display","none");
}
}
function toBut(b){
$("#description").val($("#"+b.id).html());
}
function showBut(){
$("#divbut").css("display","block");
var url = rootPath + '/resources/findByButtom.shtml';
var data = CommnUtil.ajax(url, null,"json");
if (data != null) {
var bb = $("#but");
bb.html('');
for ( var i = 0; i < data.length; i++) {
bb.append("<span onclick=\"toBut(this)\" id=\"span_"+data[i].id+"\">"+ data[i].buttom+"</span>");
}
} else {
layer.msg("获取按扭列表失败!");
}
}*/
/*function getOrgSelect(orgId)
{
var url = rootPath + '/organization/orglists.shtml';
var data = CommnUtil.ajax(url, null,"json");
if (data.result>0) {
var h = "<option value='0'>------顶级目录------</option>";
for ( var i = 0; i < data.list.length; i++) {
if (parseInt(orgId, 10) == parseInt(data.list[i].id, 10)) {
h += "<option value='" + data.list[i].id + "' selected='selected'>" + data.list[i].orgName + "</option>";
}
else
{
h += "<option value='" + data.list[i].id + "'>" + data.list[i].orgName + "</option>";
}
}
$("#parentOrgId").html(h);
} else {
layer.msg("获取菜单信息错误,请联系管理员!");
}
}*/
/*function getOrgTypeSelect(orgType) {
var data = [
{name: '集团', id: '0'},
{name: '公司', id: '1'},
{name: '职能部门', id: '2'},
{name: '业务部门', id: '3'},
{name: '岗位', id: '4'},
]
var h="";
for (var i = 0; i < data.length; i++) {
if (parseInt(orgType, 10) == parseInt(data[i].id, 10)) {
h += "<option value='" + data[i].id + "' selected='selected'>" + data[i].name + "</option>";
}
else {
h += "<option value='" + data[i].id + "'>" + data[i].name + "</option>";
}
}
$("#orgType").html(h);
}*/
<file_sep>/src/main/resources/sqlFiles/1.0.9-liubb-add-quota-menus.sql
-- 更新指标管理url --
SELECT @fund_quota_manage_id := id FROM ly_resources WHERE resKey = 'quota_manage' AND type = 1;
INSERT INTO ly_resources (buttonId, description, icon, ishide, level, name, parentId, resKey, resUrl, type) VALUES (0, '<button type="button" id="export" class="btn btn-primary marR10">导出</button>', null, 0, 60, '导出', @fund_quota_manage_id, 'export', '', '2');<file_sep>/src/main/webapp/js/system/yaodian/checkMedicine.js
var mainTab = frameElement.tab;//框架的tab对象
$(function() {
//控制layout
$("#detailLayout").ligerLayout({leftWidth:$('#detailLayout').width()/2, centerBottomHeight:100,
onEndResize:function(param,e){
resetPdfAreaSize();
},
onLeftToggle:function (isCollapse) {
resetPdfAreaSize();
}
});
//控制pdf区域的宽度和高度
resetPdfAreaSize();
//当浏览器大小变化时
$(window).resize(function () {
resetPdfAreaSize(true);
});
//使textarea区域高度自适应
$.each($('textarea'),function(index,item){
item.style.height = item.scrollHeight + 'px';
})
$("#tempSaveButton").click(function() {// 绑定查询按扭
save('temp');
});
$("#checkCompleteButton").click(function() {// 绑定查询按扭
save('complete');
});
$("#cancelButton").click(function() {// 绑定查询按扭
cancel();
});
$("#addPropAtHeadButton").click(function() {// 绑定查询按扭
addPropAtHeadButton($(this));
});
$(".delete-this-prop").click(function(){
deleteThisProp($(this));
});
$(".add-prop-at-blow").click(function(){
addPropAtBlow($(this));
});
});
function resetPdfAreaSize(windowResize){
$("#pdfDetail").height($('#detailLayout').height()-50);
$("#pdfDetail").width($('#detailLayout').width()-$('#mainCenterArea').width()-50);
}
/**
* 在头部添加属性
*/
function addPropAtHeadButton(btn){
$('#headAreaBegin').after(getPropTemplate());
}
function deleteThisProp(btn){
if($(".medicine-prop-group").length == 1){
$.ligerDialog.warn('至少保留一个属性!');
}else {
btn.closest(".medicine-prop-group").remove();
}
}
function addPropAtBlow(btn){
btn.closest(".medicine-prop-group").after(getPropTemplate());
}
/**
* 获取一个属性模版,即找到第一个属性,清除属性,再添加上结束标志
*/
function getPropTemplate(){
var propTemplate = $($(".medicine-prop-group")[0]).clone();
//清除模版中的值
var inputs = propTemplate.find('input,textarea');
inputs.each(function(){
$(this).val('');
});
//修改模版的样式
propTemplate.find('textarea').height('auto');
//为模版中的按钮添加事件绑定
propTemplate.find(".delete-this-prop").click(function(){
deleteThisProp($(this));
});
propTemplate.find(".add-prop-at-blow").click(function(){
addPropAtBlow($(this));
});
return propTemplate;
}
/**
* 保存
* 参照 https://www.cnblogs.com/liuwt365/p/7750888.html
* @type temp暂存 complete校对完成
*/
function save(type){
var propList = [];
var formItemArray = $('form').serializeArray();
for(var index=0; index<formItemArray.length; index = index + 10){
propList.push(generateProp(index,formItemArray));
}
ly.ajax({
//headers 必须要填写,不然会报错
headers: {
'Accept': 'application/json',
'Content-Type': 'application/json'
},
type : "post",
dataType : "json",
url: rootPath + '/yaodian/editEntity.shtml?medicineId=' + medicine.id + "&medicineCode=" + medicine.code + "&type="+type,
data: JSON.stringify(propList),
success : function(data) {
if (data && data.success == true) {
//如果是暂存,则不再刷新
if(type == 'temp'){
layer.msg("暂存成功!");
$('#checkStatusName').text('校对中');
$('#checkPersonName').text(data.checkPerson.userName);
$('#checkTime').text(data.checkTime);
}else {
$.ligerDialog.confirm('保存成功!是否显示下一条未校对的条目?', function (yes) {
if (yes == true) {
if(data.next == null){
$.ligerDialog.tip({ title: '提示', content: '无下一条数据!' });
$('button').attr('disabled','disabled');
}else {
showNext(data.next.id, data.next.cnFormatName, data.next.code);
}
} else {
showNext(medicine.id, medicine.cnFormatName, medicine.code);
}
});
}
} else {
$.ligerDialog.error('保存失败!');
}
}
});
}
/**
* 取消
*/
function cancel(){
var tabid = "yaoDianCheckDetail";
mainTab.removeTabItem(tabid);
}
/**
* 从属性数组中解析出药品属性对象
* @param startIndex
* @param sourceArray
*/
function generateProp(startIndex,sourceArray){
var index = startIndex;
var propObject = {};
propObject.id = sourceArray[index++].value;
propObject.medicineCode = sourceArray[index++].value;
propObject.propNumber = sourceArray[index++].value;
propObject.cnName = sourceArray[index++].value;
propObject.cnFormatName = sourceArray[index++].value;
propObject.pinYin = sourceArray[index++].value;
propObject.enName = sourceArray[index++].value;
propObject.type = sourceArray[index++].value;
propObject.propName = sourceArray[index++].value;
propObject.propValue = sourceArray[index++].value;
return propObject;
}
//查看属性
function showNext(id,name,code){
var tabid = "yaoDianCheckDetail";
var options = {
tabid: tabid,
text: '校对-'+name,
url: rootPath + '/yaodian/medicineCheck.shtml?id=' + id + '&code=' + code,
callback: function () {
//添加tab页之后的回调,可以添加自定义方法,来向tab页中添加内容
}
};
if(mainTab.isTabItemExist(tabid)){
mainTab.removeTabItem(tabid);
}
mainTab.addTabItem(options);
}<file_sep>/src/main/resources/sqlFiles/1.0.18-liubb-init-archive-data.sql
INSERT INTO fund_archive (name,sort) VALUES ('付款方式',1);
INSERT INTO fund_archive (name,sort) VALUES ('回款方式',2);
INSERT INTO fund_archive (name,sort) VALUES ('付款方式',3);
INSERT INTO fund_archive (name,sort) VALUES ('费用类型',4);
INSERT INTO fund_archive (name,sort) VALUES ('银行类型',5);
INSERT INTO fund_archive (name,sort) VALUES ('组织',6);
INSERT INTO fund_archive (name,sort) VALUES ('人员',7);
INSERT INTO fund_archive (name,sort) VALUES ('账号',8);
INSERT INTO fund_archive (name,sort) VALUES ('银行',9);
SELECT @influence_id := id FROM ly_resources WHERE resKey = 'influence' AND type = 1;
update ly_resources set resUrl = '/fund_influence/list.shtml' WHERE resKey = 'influence' AND type = 1;
INSERT INTO ly_resources (buttonId, description, icon, ishide, level, name, parentId, resKey, resUrl, type) VALUES (0, '<button type="button" id="add" class="btn btn-primary marR10">新增</button>', null, 0, 10, '新增', @influence_id, 'add', '/fund_influence/add.shtml', '2');
INSERT INTO ly_resources (buttonId, description, icon, ishide, level, name, parentId, resKey, resUrl, type) VALUES (0, '<button type="button" id="edit" class="btn btn-primary marR10">修改</button>', null, 0, 20, '修改', @influence_id, 'edit', '/fund_influence/edit.shtml', '2');
INSERT INTO ly_resources (buttonId, description, icon, ishide, level, name, parentId, resKey, resUrl, type) VALUES (0, '<button type="button" id="delete" class="btn btn-primary marR10">删除</button>', null, 0, 30, '删除', @influence_id, 'delete', '', '2');
-- fix --
update ly_resources set description = '<button type="button" id="add" class="btn btn-primary marR10">新增</button>' , name = '新增' where resUrl = '/fund_voucher_transfer_config/add.shtml' and resKey = 'add' and type = 2;
<file_sep>/src/main/webapp/js/fund/borrow_money/add.js
//获取此窗口对象以及父节点表格
var thisDialog = frameElement.dialog; //当前窗口
var targetGrid = thisDialog.get('data').grid; //用户列表
var form_validator;
$(function() {
//根据选择部门信息带出相关信息
//初始化把select赋值为空
$("#dept_id").bind('change',function(event){
$("#dept").val($(this).find("option:selected").text());
var org_id = event.target.value;
var url = rootPath + '/fund_borrow_down/fundOrgBank.shtml?org_id='+org_id;
var result = CommnUtil.ajax(url, {}, "json");
if (result.bank) {
var data = result.bank;
$("input[name=account_name]").val(data.accountName);
$("input[name=bank_type_name]").val(data.bankTypeName);
$("input[name=account_opening_bank]").val(data.accountOpeningBank);
$("input[name=account_number]").val(data.accountNumber);
}
else{
$("input[name=account_name]").val('');
$("input[name=bank_type_name]").val('');
$("input[name=account_opening_bank]").val('');
$("input[name=account_number]").val('');
}
if(result.inChargePerson){
var inChargePerson = result.inChargePerson;
$("#apply_user").val(inChargePerson.name);
$("#apply_user_id").val(inChargePerson.id);
}
else{
$("#apply_user").val('');
$("#apply_user_id").val('');
}
}).val('');
$("#refund_date").datetimepicker({
language: 'zh-CN',//显示中文
format: 'yyyy-mm-dd ',//显示格式
startView: 'month',
minView: "month",//设置只显示到月份
autoclose: true,//选中自动关闭
todayBtn: true,//显示今日按钮
});
$("button[type='reset']").click(function(event){
thisDialog.close();
});
form_validator = $("#form").validate({
errorClass:'error-msg',
submitHandler : function(form) {// 必须写在验证前面,否则无法ajax提交
ly.ajaxSubmit(form, {// 验证新增是否成功
type : "post",
dataType : "json",
success : function(data) {
if (data == "success") {
resetForm();
targetGrid.reloadAll();
$.ligerDialog.confirm('添加成功!是否关闭窗口?', function(yes) {
if(yes == true){
thisDialog.close();
}
});
} else {
$.ligerDialog.error('添加失败!' + data);
}
}
});
},
rules : {
"refund_date" : {
required : true,
validateNullOrWhiteSpace: true,
},
"borrow_money" : {
required : true,
number : true,
validateNullOrWhiteSpace: true,
},
"borrow_interest" : {
required : true,
number : true,
validateNullOrWhiteSpace: true,
},
"account_opening_bank" : {
required : true,
validateNullOrWhiteSpace: true,
},
"account_name" : {
required : true,
validateNullOrWhiteSpace: true,
},
"account_number" : {
required : true,
number : true,
validateNullOrWhiteSpace: true,
},
"bank_type_name" : {
required : true,
validateNullOrWhiteSpace: true,
},
"comment" : {
maxlength: 200,
},
"borrow_cause" : {
maxlength: 200,
}
},
messages : {
"refund_date" : {
required : "预计还款日期不能为空"
},
"borrow_money" : {
required : "申请借款金额不能为空"
},
"borrow_interest" : {
required : "借款利率不能为空"
},
"bank_type_name" : {
required : "收款银行不能为空"
},
"account_opening_bank" : {
required : "开户行不能为空"
},
"account_name" : {
required : "收款账户不能为空"
},
"account_number" : {
required : "收款银行账号不能为空"
},
"comment" : {
required: "备注过长",
},
"borrow_cause" : {
required: "借款原因太长",
}
},
errorPlacement : function(error, element) {
var error_container = $("<div><label class='col-xs-4'></label></div>");
error.addClass('col-xs-6');
error_container.append(error);
element.closest('.form-group').append(error_container);
},
success : function(label) {
label.remove();
},
});
});
/**
* 重置新增表单
*/
function resetForm(){
$("#form")[0].reset();
$("#apply_org_id").val('');
}<file_sep>/src/main/java/cn/zllog/beanEntity/Column.java
package cn.zllog.beanEntity;
public class Column implements Comparable{
private String tableName; //表名
private String columnName; //列名
private String entityName; //实体名
private String fieldName; //字段名
private String entityType; //字段
private String display;
private String entityTypeName;
private String associateArchive; //关联基础档案
private Integer position; //表头(1)或表体(2) (在这里设置这个参数并不灵活)
public Column(){
}
public Column(String tableName, String columnName, String entityName, String fieldName, String entityType, String display,String entityTypeName,String associateArchive) {
this.tableName = tableName;
this.columnName = columnName;
this.entityName = entityName;
this.fieldName = fieldName;
this.entityType = entityType;
this.entityTypeName = entityTypeName;
this.display = display;
this.associateArchive = associateArchive;
}
public String getTableName() {
return tableName;
}
public void setTableName(String tableName) {
this.tableName = tableName;
}
public String getColumnName() {
return columnName;
}
public void setColumnName(String columnName) {
this.columnName = columnName;
}
public String getEntityName() {
return entityName;
}
public void setEntityName(String entityName) {
this.entityName = entityName;
}
public String getFieldName() {
return fieldName;
}
public void setFieldName(String fieldName) {
this.fieldName = fieldName;
}
public String getEntityType() {
return entityType;
}
public void setEntityType(String entityType) {
this.entityType = entityType;
}
public String getDisplay() {
return display;
}
public void setDisplay(String display) {
this.display = display;
}
public Integer getPosition() {
return position;
}
public void setPosition(Integer position) {
this.position = position;
}
public String getEntityTypeName() {
return entityTypeName;
}
public void setEntityTypeName(String entityTypeName) {
this.entityTypeName = entityTypeName;
}
public String getAssociateArchive() {
return associateArchive;
}
public void setAssociateArchive(String associateArchive) {
this.associateArchive = associateArchive;
}
@Override
public int compareTo(Object o) {
Column column = (Column)o;
if (this.getPosition() > column.getPosition()) {
return 1;
} else if (this.getPosition() == column.getPosition()) {
return this.getFieldName().compareTo(column.getFieldName());
} else {
return -1;
}
}
}
<file_sep>/src/main/webapp/js/fund/quota_report/detail.js
var listGrid; //表格
var listGrid_data = []; //默认列表的数据
var mainTab = frameElement.tab;//框架的tab对象
var columns = [
{display: '编码', name: 'fundQuota.sn', show: true,},
{display: '名称', name: 'fundQuota.name', show: true,},
{display: '指标属性', name: 'fundQuota.quotaTypeStr', show: true,},
{display: '本期金额', name: 'money', editor: { type: 'float' }}
];
$(function() {
// var rules = [];
// var groupData = {};
var id = $("#report_id").val();
// rules.push({field: 'id', op: 'equal', value: id, type:'int' });
// groupData.rules = rules;
//指标table
listGrid = $("#report_table").ligerGrid({
columns:columns,
url: rootPath + '/fund_quota_report/findReportDetail.shtml',
checkbox:false,
usePager:false,
enabledSort:false,
parms : {id: id},
width: '100%',
});
});
<file_sep>/src/main/webapp/js/system/demo/list.js
var pageii = null;
var roleGrid = null;
var roleGrid1 = null;
$(function() {
roleGrid = $("#grid1").ligerFilterGrid({
columns: [
{display: 'id', name: 'id', width: 100,filterable:false } ,
{ display: '角色名', name: 'name', width: 200,type:'string'},
{ display: '状态', name: 'state', width: 200,type:'int',editor: {type:'combobox',valueField: 'key' ,textField:'text',options:{selectBoxWidth:180}}, data:[
{ key:0,text:'启用'},{ key:1,text:'禁用'}],
render: function(rowdata,index,data){
return data == 1 ? '禁用' : '启用';
}},
{ display: 'roleKey', name: 'roleKey', minWidth: 200,type:'date',editor: {type:'date'} },
{ display: '描述', name: 'description', width: 200,type:'number',editor: {type:'number',options:{type: 'int', }} },
],
data:{
records:[
{id:1,name:'张三',state:0,roleKey:'rolekey',description:'张三'},
{id:2,name:'李四',state:0,roleKey:'rolekey1',description:'李四'},
{id:3,name:'王五',state:0,roleKey:'rolekey2',description:'王五'}
]
},
sortName: 'name',
sortOrder: 'desc'
});
roleGrid1 = $("#grid2").ligerMdmGrid({
columns: [
{display: 'id', name: 'id', width: 100,filterable:false } ,
{ display: '角色名', name: 'name', width: 200,type:'string',operator:"equal,notequal" ,editor: {type:'string'}},
{ display: '状态', name: 'state', width: 200,type:'string',operator:"startwith,endwith",editor: {type:'date',options: null },
render: function(rowdata,index,data){
return data == 1 ? '禁用' : '启用';
}},
{ display: 'roleKey', name: 'roleKey', minWidth: 200,type:'string',editor: {type:'string'} },
{ display: '描述', name: 'description', width: 200,editor: {type:'string'}},
],
data:{
records:[
{id:1,name:'张三',state:0,roleKey:'rolekey',description:'张三'},
{id:2,name:'李四',state:0,roleKey:'rolekey1',description:'李四'},
{id:3,name:'王五',state:0,roleKey:'rolekey2',description:'王五'}
]
},
sortName: 'id',
sortOrder: 'desc',
});
$("#pageloading").hide();
$("#search").click("click", function() {// 绑定查询按扭
searchDemo();
});
$("#reset").click("click", function() {// 绑定查询按扭
resetDemo();
});
$("#addDemo").click("click", function() {
addDemo();
});
$("#editDemo").click("click", function() {
editDemo();
});
$("#delDemo").click("click", function() {
delDemo();
});
$("#permissions").click("click", function() {
permissions();
});
var data = [
{ text: '张三', id: '1' },
{ text: '李四', id: '2' },
{ text: '赵武2', id: '3' },
{ text: '赵武3', id: '4' },
{ text: '赵武4', id: '5' },
{ text: '赵武5', id: '6' },
{ text: '赵武6', id: '7' },
{ text: '赵武7', id: '8' },
{ text: '赵武8', id: '9' },
{ text: '赵武9', id: '10' },
{ text: '赵武10', id: '11' },
];
$("#listbox1, #listbox2" ).ligerSearchListBox({
isShowCheckBox: true,
isMultiSelect: true,
height: 200,
searchCls: 'form-control',
enableCheckAll:true,
title:'标题',
hasBorder:true,
data:data
});
$("#listbox3" ).ligerSearchListBox({
isShowCheckBox: true,
isMultiSelect: true,
height: 200,
searchCls: 'form-control',
enableCheckAll:true,
title:'标题',
hasBorder:true,
});
//后设置数据
//liger.get("listbox1").setData(data);
});
function searchDemo(){
var searchParams = $("#searchForm").serializeJson();// 初始化传参数
roleGrid.search(searchParams);
}
function resetDemo(){
$("#searchForm")[0].reset();
roleGrid.clearParm();
roleGrid.reloadAll();
}
function addDemo() {
$.ligerDialog.open({
title: '新增',
url: rootPath + '/demo/addUI.shtml',
width: 800,
height: 430,
data: {
grid: roleGrid //将列表传给弹窗
}
});
}
function editDemo() {
var selected = roleGrid.getSelectedRows();
if(selected.length == 0){
layer.msg("请选中待编辑的项");
return;
}
if (selected.length > 1) {
layer.msg("只能选中一项进行编辑");
return;
}
$.ligerDialog.open({
title: '编辑',
url: rootPath + '/demo/editUI.shtml?id=' + selected[0].id,
width: 800,
height: 430,
data: {
grid: roleGrid //将列表传给弹窗
}
});
}
function delDemo() {
var selected = roleGrid.getSelectedRows();
if(selected.length == 0){
layer.msg("请选中待删除的项");
return;
}
var ids = $.map(selected,function(item){return item.id;});
$.ligerDialog.confirm('确定是否删除?', function(yes) {
if(yes == true) {
var url = rootPath + '/demo/deleteEntity.shtml?ids=' + ids.join(',');
var result = CommnUtil.ajax(url, {}, "json");
if (result == "success") {
roleGrid.reloadAll();
layer.msg('删除成功');
} else {
layer.msg('删除失败');
}
}
});
}
function permissions() {
var selected = roleGrid.getSelectedRows();
if(selected.length == 0){
layer.msg("请至少选中一项");
return;
}
if (selected.length > 1) {
layer.msg("只能选中一项进行配置");
return;
}
$.ligerDialog.open({
title: '分配权限',
url: rootPath + '/demo/permissions.shtml?roleId='+ selected[0].id,
width: 700,
height: 540,
data: {
grid: roleGrid //将用户列表传给弹窗
}
});
}
function moveToLeft()
{
var box2 = liger.get("listbox2"), box3 = liger.get("listbox3");
var selecteds = box3.getSelectedItems();
if (!selecteds || !selecteds.length) return;
box3.removeItems(selecteds);
box2.addItems(selecteds);
}
function moveToRight()
{
var box2 = liger.get("listbox2"), box3 = liger.get("listbox3");
var selecteds = box2.getSelectedItems();
if (!selecteds || !selecteds.length) return;
console.log("selecteds",selecteds);
box2.removeItems(selecteds);
box3.addItems(selecteds);
}
<file_sep>/src/main/webapp/js/fund/received_payments_initial/list.js
var mainTab = frameElement.tab;//框架的tab对象
var tab_id = frameElement.id;
var mainData = frameElement.openerData; //框架的mainData对象
var columns = [
{display: '单据编号', name: 'number', width: 300, show: true,
render: function(rowdata,index,data){
return '<a href="#" onclick="detail('+rowdata.id+')">'+data+'</a>';
}
},
{display: '交易日期', name: 'bill_date', width: 100, show: true, export_name:'bill_date_str',
render: function(rowdata,index,data){
return new Date(data).format("yyyy-MM-dd");
}
},
{display: '交易时间', name: 'bill_time', width: 100, show: true, export_name:'bill_time_str',
render: function(rowdata, index, data){
return data;
}
},
{display: '对方账户名', name: 'opposite_account', width: 100, show: true},
{display: '回款金额', name: 'money', width: 100, show: true},
{display: '回款客商', name: 'org_name', width: 200, show: true},
{display: '已认款金额', name: 'recognize_money', width: 100, show: true},
{display: '回款方式', name: 'type', width: 100, show: true,export_name:'type_str',
editor: {type:'combobox',valueField: 'key' ,textField:'text',options:{selectBoxWidth:180}},
data:[{ key:1,text:'现金回款'},{ key:2,text:'银行转账'},{ key:3,text:'票据收款'}],
render:function(rowdata,index,data){
return {1 : '现金回款', 2 : '银行转账',3:'票据收款'}[data]
}
},
{display: '对方账号', name: 'opposite_account_number', width: 200, show: true},
{display: '对方银行', name: 'opposite_bank', width: 200, show: true},
{display: '收款账号', name: 'receive_account_number', width: 100, show: true},
{display: '收款银行', name: 'receive_bank', width: 200, show: true},
{display: '收款账户名', name: 'receive_account', width: 200, show: true},
{display: '回款单状态', name: 'state', width: 100, show: true,export_name:'state_str',
editor: {type:'combobox',valueField: 'key' ,textField:'text',options:{selectBoxWidth:180}},
data:[{ key:0,text:'待允许认款'},{ key:1,text:'待认款'},{ key:2,text:'认款中'},{ key:3,text:'认款完成'},{ key:4,text:'认款失败'}],
render:function(rowdata,index,data){
return {0: '待允许认款', 1 : '待认款', 2:'认款中', 3:'认款完成', 4:'认款失败'}[data]
}
},
{display: '生成方式', name: 'generate_type', width: 150, show: true, export_name:'generate_type_str',
editor: {type:'combobox',valueField: 'key' ,textField:'text',options:{selectBoxWidth:180}},
data:[{ key:1,text:'手动新增'},{ key:2,text:'导入'}],
render:function(rowdata,index,data){
return {1 : '手动新增',2:'导入'}[data]
}
},
{display: '制单人', name: 'create_user', width: 100, show: true},
{display: '摘要', name: 'remark', width: 200, show: true},
{display: '交易流水号', name: 'bill_number', width: 100, show: true},
{display: '制单时间', name: 'create_date', width: 200, show: true,export_name:'create_date_str'},
{display: '凭证状态', name: 'voucher_state', width: 150, show: true, export_name:'voucher_state_str',
editor: {type:'combobox',valueField: 'key' ,textField:'text',options:{selectBoxWidth:180}},
data:[{ key:0,text:'未生成'},{ key:1,text:'已生成'},{ key:2,text:'已冲销'}],
render:function(rowdata,index,data){
return {0 : '未生成', 1 : '已生成', 2:'已冲销'}[data]
}
},
{display: '传输状态', name: 'transfer_state', width: 150, show: true, export_name:'transfer_state_str',
editor: {type:'combobox',valueField: 'key' ,textField:'text',options:{selectBoxWidth:180}},
data:[{ key:0,text:'未传输'},{ key:1,text:'已传输'}],
render:function(rowdata,index,data){
return {0 : '未传输', 1 : '已传输'}[data]
}
},
{display: '所属公司', name: 'company', width: 200, show: true}
];
var listGrid; //列表对象
$(function () {
listGrid = $("#gridArea").ligerFilterGrid({
columns: columns,
url: rootPath + '/fund/received_payments_initial/findByPage.shtml',
sortName: 'id',
sortOrder: 'desc',
originalColumns: columns, //保留原始表单列
summaryUrl: rootPath + '/common/summary.shtml', //统计url
summaryTable: 'fund_receive_payment', //统计表名(表名不是java entity)
summaryShowRecordCount : true, //是否显示统计条数
summary: [ //统计字段
{text:'合计回款金额',field:'money'},
{text:'合计已认款金额',field:'recognize_money'}
],
customColumn : { //表单配置
listUrl : rootPath + '/fund_custom_column/list.shtml', //自定义列查询url
submitUrl: rootPath + '/fund_custom_column/submit.shtml', //自定义列提交url
clazz: 'FundReceivePayment', //自定义列要存储的唯一主键
id: 'formManage', //自定义列配置插件id
dialogTitle: '表单配置', //弹窗title
leftId: 'listBox-left', //表单配置左列表id(有默认值,可以不配置)
leftTitle: '隐藏的列', //表单配置左列表title(有默认值,可以不配置)
rightId: 'listBox-right', //表单配置右列表id(有默认值,可以不配置)
rightTitle: '显示的列', //表单配置右列表title(有默认值,可以不配置)
callback : function(){ //自定义列保存成功回调函数
mainTab.reload(tab_id);
}
},
seniorSearch: { //高级搜索,与ligerGrid本身的高级搜索互不相干
id: 'seniorSearchButton', //高级搜索插件id
searchSight: { //场景配置
id: 'search_sight', //场景配置插件id
listUrl: rootPath + '/fund_search_sight/listJson.shtml', //场景列表url
submitUrl: rootPath + '/fund_search_sight/addEntity.shtml', //场景提交url
clazz: 'FundReceivePayment', //场景要存储的唯一主键
callback: function(form){ //场景配置保存成功回调函数
form.reset();
mainTab.reload(tab_id);
}
}
}
});
$("#pageloading").hide();
// 绑定查询按扭
$("#search").bind("keydown", function (event) {
if(event.keyCode == "13") {
search($(this).val());
}
});
//新增
$("#add").click("click", function () {
add();
});
//修改
$("#edit").click("click", function () {
edit();
});
//删除
$("#delete").click("click", function () {
del();
});
});
/**
* 新增
*/
function add() {
$.ligerDialog.open({
title: '新增回款期初数据',
url: rootPath + '/fund/received_payments_initial/add.shtml',
width: 800,
height: 640,
data: {
grid: listGrid, //将列表对象传给弹窗,
mainData: mainData
}
});
}
/**
* 修改
*/
function edit() {
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待编辑的项");
return;
}
if (selected.length > 1) {
layer.msg("只能选中一项进行编辑");
return;
}
var exist_error = false;
$.each(selected, function(index,item){
if (item.state != 0 && item.state != 3){
exist_error = true;
return;
}
});
if(!exist_error){
$.ligerDialog.open({
title: '编辑回款期初数据',
url: rootPath + '/fund/received_payments_initial/edit.shtml?id=' + selected[0].id,
width: 800,
height: 640,
data: {
grid: listGrid //将用户列表传给弹窗
}
});
}
else{
layer.msg("单据状态不合法,不能修改");
}
}
/**
* 详情
* @param id
*/
function detail(id){
$.ligerDialog.open({
title: '详情',
url: rootPath + '/fund/received_payments_initial/detail.shtml?id=' + id,
width: 800,
height: 640,
data: {
grid: listGrid //将用户列表传给弹窗
}
});
}
/**
* 删除
*/
function del() {
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待删除的项");
return;
}
var exist_error = false;
$.each(selected, function(index,item){
if (item.state != 0 || item.generate_type != 1){
exist_error = true;
return;
}
});
if(!exist_error){
var ids = $.map(selected, function (item) {
return item.id;
});
ids = ids.join(',');
$.ligerDialog.confirm('确定是否删除?', function (yes) {
if (yes == true) {
var url = rootPath + '/fund/received_payments_initial/delete.shtml?ids=' + ids;
var result = CommnUtil.ajax(url, {}, "json");
if (result == "success") {
listGrid.reloadAll();
layer.msg('删除成功');
} else {
layer.msg('删除失败');
}
}
});
}
else{
layer.msg("存在状态不合法的单据,不能删除");
}
}
/**
* 自定义查询
* @param value
*/
function search(value) {
var rules = [];
var custom_rules = {};
if(value) {
rules.push({
field: 'money', op: 'equal', value: value, type:'float'
})
}
custom_rules.rules = rules;
listGrid.custom_rules = custom_rules;
listGrid.searchData();
}
<file_sep>/src/main/webapp/js/system/role/list.js
var pageii = null;
var roleGrid = null;
$(function() {
roleGrid = $("#gridArea").ligerGrid({
columns: [
{display: 'id', name: 'id', width: 100 } ,
{ display: '角色名', name: 'name', width: 200 },
{ display: '状态', name: 'state', width: 200,render: function(rowdata,index,data){
return data == 1 ? '启用' : '禁用';
}},
{ display: 'roleKey', name: 'roleKey', minWidth: 200, },
{
display : "角色分类",
name : "roleType",
render: function(rowdata,index,data) {
var roleTypeName="";
switch (rowdata.roleType){
case 0:roleTypeName="超级管理员";break;
case 1:roleTypeName="基础角色";break;
case 2:roleTypeName="分级管理员";break;
}
return roleTypeName;
}
},
{ display: '描述', name: 'description', width: 200,},
],
onSelectRow :onSelectRow,
url: rootPath + '/role/findByPage.shtml',
sortName: 'id',
sortOrder: 'desc',
});
$("#pageloading").hide();
$("#search").click("click", function() {// 绑定查询按扭
searchRole();
});
$("#reset").click("click", function() {// 绑定查询按扭
resetRole();
});
$("#addFun").click("click", function() {
addRole();
});
$("#editFun").click("click", function() {
editRole();
});
$("#delFun").click("click", function() {
delRole();
});
$("#permissionsFun").click("click", function() {
permissions();
});
$("#enableFun").click("click", function() {
enableFun();
});
$("#disableFun").click("click", function() {
disableFun();
});
});
function onSelectRow(rowdata, rowindex, rowobj) {
if(rowdata.roleType==0){
roleGrid.unselect(rowobj);
alert("超管角色不许修改");
return;
}
}
function searchRole(){
LG.searchClick($("#searchForm"),roleGrid);
/*var searchParams = $("#searchForm").serializeJson();// 初始化传参数
roleGrid.search(searchParams);*/
}
function resetRole(){
$("#searchForm")[0].reset();
roleGrid.clearParm();
roleGrid.reloadAll();
}
function addRole() {
$.ligerDialog.open({
title: '新增',
url: rootPath + '/role/addUI.shtml',
width: 800,
height: 430,
data: {
grid: roleGrid //将列表传给弹窗
}
});
}
function editRole() {
var selected = roleGrid.getSelectedRows();
if(selected.length == 0){
layer.msg("请选中待编辑的项");
return;
}
if (selected.length > 1) {
layer.msg("只能选中一项进行编辑");
return;
}
$.ligerDialog.open({
title: '编辑',
url: rootPath + '/role/editUI.shtml?id=' + selected[0].id,
width: 800,
height: 430,
data: {
grid: roleGrid //将列表传给弹窗
}
});
}
function delRole() {
var selected = roleGrid.getSelectedRows();
if(selected.length == 0){
layer.msg("请选中待删除的项");
return;
}
var ids = $.map(selected,function(item){return item.id;});
$.ligerDialog.confirm('确定是否删除?', function(yes) {
if(yes == true) {
var url = rootPath + '/role/deleteEntity.shtml?ids=' + ids.join(',');
var result = CommnUtil.ajax(url, {}, "json");
if (result == "success") {
roleGrid.reloadAll();
layer.msg('删除成功');
} else {
layer.msg('删除失败');
}
}
});
}
function enableFun() {
var selected = roleGrid.getSelectedRows();
if(selected.length == 0){
layer.msg("请选中待启用的项");
return;
}
var ids = $.map(selected,function(item){return item.id;});
$.ligerDialog.confirm('确定是否启用?', function(yes) {
if(yes == true) {
var url = rootPath + '/role/enableEntity.shtml?ids=' + ids.join(',');
var result = CommnUtil.ajax(url, {}, "json");
if (result == "success") {
roleGrid.reloadAll();
layer.msg('启用成功');
} else {
layer.msg('启用失败');
}
}
});
}
function disableFun() {
var selected = roleGrid.getSelectedRows();
if(selected.length == 0){
layer.msg("请选中待禁用的项");
return;
}
var ids = $.map(selected,function(item){return item.id;});
$.ligerDialog.confirm('确定是否禁用?', function(yes) {
if(yes == true) {
var url = rootPath + '/role/disableEntity.shtml?ids=' + ids.join(',');
var result = CommnUtil.ajax(url, {}, "json");
if (result == "success") {
roleGrid.reloadAll();
layer.msg('禁用成功');
} else {
layer.msg('禁用失败');
}
}
});
}
function permissions() {
var selected = roleGrid.getSelectedRows();
if(selected.length == 0){
layer.msg("请至少选中一项");
return;
}
if (selected.length > 1) {
layer.msg("只能选中一项进行配置");
return;
}
$.ligerDialog.open({
title: '分配权限',
url: rootPath + '/role/permissions.shtml?roleId='+ selected[0].id,
width: 700,
height: 540,
data: {
grid: roleGrid //将用户列表传给弹窗
}
});
}
<file_sep>/src/main/webapp/js/fund/cost_ticket_adjust/add.js
//获取此窗口对象以及父节点表格
var thisDialog = frameElement.dialog; //当前窗口
var targetGrid = thisDialog.get('data').grid; //用户列表
var form_validator;
$(function() {
$("#relate_cost_ticket_adjust").click(clickCostTicket);
$("#apply_org_id").bind('change',function(event){
$("#apply_org").val($(this).find("option:selected").text());
});
$("#b").blur(function(){
console.log(11)
aa();
});
$("input[name='jiashui_total_monye']").bind("keydown", function (event) {// 绑定查询按扭
//event.preventDefault();
if(event.keyCode == "13") {
aa();
}
});
var columns = [
{display: '单据编号', name: 'number', width: 300, show: true},
{display: '费用票类型', name: 'cost_ticket_type_manager', width: 300, show: true},
{display: '价税合计金额', name: 'jiashui_total_monye', width: 300, show: true},
{display: '业务部门', name: 'operation_org', width: 300, show: true},
{display: '发票类型', name: 'receipt_type', width: 300, show: true},
{display: '发票号码', name: 'receipt_number', width: 300, show: true},
{display: '开票日期', name: 'created_time', width: 300, show: true, export_name:'created_time_str',
render: function(rowdata,index,data){
return new Date(data).format("yyyy-MM-dd");
}
},
{display: '税额', name: 'tax_amount', width: 300, show: true},
{display: '发票明细', name: 'receipt_details', width: 300, show: true},
{display: '销售方名称', name: 'sell_name', width: 300, show: true},
{display: '操作人', name: 'create_user', width: 300, show: true},
{display: '提交时间', name: 'submit_time', width: 300, show: true},
{display: '单据状态', name: 'state', width: 300, show: true},
];
$("#relate_receive_payment").click(function(){
var grid = null;
var dialogGridContainer = $('<div id="dialogGrid" ></div>');
var grid_columns = $.extend([],columns);
//构建已经选择的数据集合,查询时候进行过滤
var groupData = { };
var rules = [];
rules.push({"field":"state","op":"equal","value":1,"type":"int"});
groupData.rules = rules;
groupData.op = "and";
//初始化弹窗
var dialog = $.ligerDialog.open({
title: '选择单据',
content: dialogGridContainer,
width: 800,
height: 500,
opener: thisDialog,
buttons:[
{
text:'确定',
cls:'btn btn-success',
onclick:function(){
if (!grid){
layer.msg('获取数据失败');
}
else{
var selected_rows = grid.getSelectedRows();
if(selected_rows.length == 0){
layer.msg("请至少选择一条数据");
return;
}
else{
updateMainForm(selected_rows[0]);
dialog.close();
}
}
}
},
{
text:'取消',
onclick:function(){
dialog.close();
}
}
],
/*data: {
'grid':grid
}*/
});
//初始化弹窗中的grid
grid = $("#dialogGrid").ligerGrid({
columns: grid_columns,
url: rootPath + '/fund_cost_ticket_adjust/findByPage.shtml',
parms:{
where: JSON2.stringify(groupData)
},
sortName: 'id',
sortOrder: 'desc',
height:400,
});
});
$("button[type='reset']").click(function(event){
thisDialog.close();
});
form_validator = $("#form").validate({
errorClass:'error-msg',
submitHandler : function(form) {// 必须写在验证前面,否则无法ajax提交
ly.ajaxSubmit(form, {// 验证新增是否成功
type : "post",
dataType : "json",
success : function(data) {
if (data == "success") {
resetForm();
targetGrid.reloadAll();
$.ligerDialog.confirm('添加成功!是否关闭窗口?', function(yes) {
if(yes == true){
thisDialog.close();
}
});
} else {
$.ligerDialog.error('添加失败!' + data);
}
}
});
},
rules : {
"apply_org_id" : {
required : true,
validateNullOrWhiteSpace: true,
},
"dept_charge_user" : {
required : true,
validateNullOrWhiteSpace: true,
},
"receipt_user" : {
required : true,
validateNullOrWhiteSpace: true,
},
"apply_money" : {
required : true,
number: true
},
"remark" : {
maxlength: 255,
}
},
messages : {
"apply_org_id" : {
required : "申请部门不能为空",
validateNullOrWhiteSpace: "申请部门不能为空",
},
"dept_charge_user" : {
required : "部门负责人不能为空",
validateNullOrWhiteSpace: "部门负责人不能为空",
},
"receipt_user" : {
required : "收款户名不能为空",
validateNullOrWhiteSpace: "收款户名不能为空",
},
"apply_money" : {
required : "申请提现金额不能为空",
number: "申请提现金额只能输入数字"
},
"remark" : {
maxlength: "备注过长",
}
},
errorPlacement : function(error, element) {
var error_container = $("<div><label class='col-xs-4'></label></div>");
error.addClass('col-xs-6');
error_container.append(error);
element.closest('.form-group').append(error_container);
},
success : function(label) {
label.remove();
},
});
});
/**
* 自动计算
*/
function aa(){
var a = $("#a").val();
var b = $("#b").val();
console.log(a)
console.log(b)
$("#c").val(b-a);
}
/**
* 点击选择费用票
*/
function clickCostTicket(){
$.ligerDialog.open({
title: '选择费用票明细',
url: rootPath + '/fund_cost_ticket_adjust/extract.shtml',
width: 800,
height: 760
});
}
/**
* 重置新增表单
*/
function resetForm(){
$("#form")[0].reset();
$("#apply_org_id").val('');
}
/**
* 更新主表单信息
*/
function updateMainForm(data){
$("input[name=cost_ticket_type]").val(data.number);
$("input[name=receipt_type]").val(data.receipt_type);
$("input[name=jiashui_total_monye]").val(data.jiashui_total_monye);
$("input[name=tax_amount_manager]").val(data.tax_amount_manager);
$("input[name=receipt_number_manager]").val(data.receipt_number_manager);
$("input[name=receipt_details_manager]").val(data.receipt_details_manager);
$("input[name=created_time_manager]").val(data.created_time_manager);
$("input[name=sell_name_manager]").val(data.sell_name_manager);
}
function borrowDocument(){
$.fn.ligerDrag = null;
$.ligerDialog.open({
title: '费用票明细',
url: rootPath + "/fund_cost_ticket_adjust/extract.shtml",
modal: true,
isResize: false,
width: 750,
height: 580,
opener: thisDialog
});
}
<file_sep>/src/main/webapp/js/fund/cost_ticket_adjust/extract.js
var listGrid, //列表对象
custom_rules = {}, //自定义查询条件
clazz = 'FundCostTicketManager', //默认类
default_params = {}; //默认查询条件
var columns = [
{display: '单据编号', name: 'number', width: 300, show: true,
render: function(rowdata,index,data){
return data;
}
},
{display: '费用票类型', name: 'cost_ticket_type', width: 300, show: true},
{display: '价税合计金额', name: 'jiashui_total_monye', width: 300, show: true},
{display: '业务部门', name: 'operation_org', width: 300, show: true},
{display: '发票类型', name: 'receipt_type', width: 300, show: true},
{display: '发票号码', name: 'receipt_number', width: 300, show: true},
{display: '开票日期', name: 'created_time', width: 300, show: true, export_name:'created_time_str',
render: function(rowdata,index,data){
return new Date(data).format("yyyy-MM-dd");
}
},
{display: '税额', name: 'tax_amount', width: 300, show: true},
{display: '发票明细', name: 'receipt_details', width: 300, show: true},
{display: '销售方名称', name: 'sell_name', width: 300, show: true},
{display: '操作人', name: 'create_user', width: 300, show: true},
{display: '提交时间', name: 'submit_time', width: 300, show: true},
{display: '单据状态', name: 'state', width: 300, show: true},
];
var mainTab = frameElement.tab;//框架的tab对象
var tab_id = frameElement.id;
var mainData = frameElement.openerData; //框架的mainData对象
var thisDialog = frameElement.dialog; //当前窗口
$(function () {
$("#log").on("click","#search",clickSeach)
.on("click","#clear",clickClear)
.on("click","#reset",clickReset)
.on("click","#submit",clickSubmit);
borrowInit();
});
/**
* 回显数据
*/
function showData(data){
console.log(data)
//$("input[name='']",window.parent.document).val(data.number);
//$("#number",window.parent.document).val(data.number);
$("input[name='number']",window.parent.document).val(data.number);
//$("#cost_ticket_type_manager",window.parent.document).val(cost_ticket_type_manager);
$("input[name='cost_ticket_type']",window.parent.document).val(data.cost_ticket_type);
//$("#jiashui_total_monye",window.parent.document).val(data.jiashui_total_monye);
$("#b",window.parent.document).val(data.jiashui_total_monye);
$("#a",window.parent.document).val(data.tax_amount);
var a = $("#a",window.parent.document).val();
var b = $("#b",window.parent.document).val();
$("#c",window.parent.document).val(a-b);
$("input[name='operation_org']",window.parent.document).val(data.operation_org);
$("input[name='receipt_type']",window.parent.document).val(data.receipt_type);
$("input[name='receipt_number']",window.parent.document).val(data.receipt_number);
$("input[name='created_time']",window.parent.document).val(data.created_time);
$("input[name='receipt_details']",window.parent.document).val(data.receipt_details);
$("input[name='sell_name']",window.parent.document).val(data.sell_name);
$("input[name='create_user']",window.parent.document).val(data.create_user);
$("input[name='submit_time']",window.parent.document).val(data.submit_time);
$("input[name='state']",window.parent.document).val(data.state);
}
/**
* 确定费用票
*/
function clickSubmit(){
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请先选择费用票");
return;
}
showData(selected[0]);
thisDialog.close();
}
/**
* 关闭弹窗
*/
function clickReset(){
thisDialog.close();
}
/**
* 点击重置
*/
function clickClear(){
$("#number").val('');
$("#money").val('');
}
/**
* 点击查询
*/
function clickSeach(){
var rules = [];
var number = $("#number").val();
var money = $("#money").val();
if(number){
rules.push({
field: 'number', op: 'like', value: number, type:'string'
})
}
if(money){
rules.push({
field: 'jiashui_total_monye', op: 'equal', value: money, type:'float'
})
}
custom_rules.rules = rules;
searchData();
}
/**
* 查询数据(将页面所有查询条件汇总)
*/
function searchData(){
var hash ={};
var rules = [];
var groupData = {};
$.each(custom_rules.rules,function(k,v){
if(!hash[v.field+"_"+v.op+"_"+v.value]){
hash[v.field+"_"+v.op+"_"+v.value] = true
rules.push(v);
}
});
groupData.rules = rules;
groupData.op = "and";
listGrid.search({where: JSON2.stringify(groupData)});
}
/**
* 初始化数据
*/
function borrowInit(){
listGrid = $("#log_grid").ligerFilterGrid({
columns: columns,
url: rootPath + '/fund_cost_ticket_adjust/findCostTicket.shtml',
parms: default_params,
height: 370,
sortName: 'id',
sortOrder: 'desc',
isSingleCheck: true
});
}
/**
* 清除
*/
function clearBorrow(){
if(listGrid){
delete listGrid.winfilter;
delete listGrid.filter;
}
listGrid = {}, //列表对象
default_params = {}, //默认查询条件
clazz = 'FundCostTicketManager', //默认类
borrowType = cost_ticket_manager_id,
borrowTitle="借款修改(线上)", //默认修改标题
title="借款新增(线上)"; //默认标题
}
$(function() {
$('#searchKey').focus(function() {
$('#lbSearch').text('');
});
$('#searchKey').blur(function() {
var str = $(this).val();
str = $.trim(str);
if(str == '')
$('#lbSearch').text('搜神马?');
});
})
<file_sep>/src/main/webapp/js/system/person/list.js
var listGrid, //列表对象
grid_data, //列表原始对象
filter_grid_data, //列表筛选对象
tree, //树对象
custom_rules = {}, //自定义查询条件
tree_select_ids = []; //
$(function() {
$(".l-layout-center").width($("#layout1").width()-205);
/*$("#code,#name,#idNumber,#orgId").bind("keydown", function (event) {// 绑定查询按扭
if(event.keyCode == "13") {
search();
}
});*/
var url = rootPath + '/organization/orgTrees.shtml';
var data = CommnUtil.ajax(url, null,"json");
if (data.result>0) {
var list=data.list;
getTree(list);
} else {
layer.msg("获取菜单信息错误,请联系管理员!");
}
listGrid = $("#gridArea").ligerGrid({
columns: [
{ display: '编码', name: 'code', width: 200 },
{ display: '姓名', name: 'name', width: 200,},
{ display: '性别', name: 'sex', width: 200,
render:function(rowdata,index,data){
return data ? '男' : '女';
}
},
{ display: '身份证号', name: 'idNumber', width: 200,},
{ display: '归属组织', name: 'organization', width: 200,},
{ display: '来源', name: 'source', width: 200,
render:function(rowdata,index,data){
return { 1 :'本地新增',2:'MDM同步'}[data]
}
},
{ display: '状态', name: 'status', width: 200,
render:function(rowdata,index,data){
return {0 : '草稿', 1 : '启用',2:'禁用'}[data]
}
},
{ display: '创建人', name: 'creatorName', width: 200,},
{ display: '创建日期', name: 'gmtCreate', width: 200,
render: function(rowdata,index,data){
return new Date(data).format("yyyy-MM-dd");
}
},
],
url: rootPath + '/person/findByPage.shtml',
sortOrder: 'desc',
onLoaded:function(g){
g.toggleLoading(false);
grid_data = g.data.records;
}
});
$("#pageloading").hide();
$("#search").bind("click", function (event) {// 绑定查询按扭
search();
});
$("#reset").click("click", function() {// 绑定重置按扭
reset();
});
$("#add").click("click", function() {
addPerson();
});
$("#edit").click("click", function() {
editPerson();
});
$("#delete").click("click", function() {
delPerson();
});
//启用
$("#enable").click("click", function () {
enable();
});
//禁用
$("#disable").click("click", function () {
disable();
});
});
/**
* 自定义查询
* @param value
*/
function search() {
var code = $("#code").val();
var name = $("#name").val();
var idNumber = $("#idNumber").val();
filter_grid_data = grid_data.filter(function(item){
var exist_code = false;
var exist_name = false;
var exist_idNumber = false;
exist_code = code? item.code.indexOf(code) > -1 : true;
exist_name = name? item.name.indexOf(name) > -1 : true;
exist_idNumber = idNumber? item.idNumber.indexOf(idNumber) > -1 : true;
return exist_code && exist_name && exist_idNumber
});
listGrid.loadData({'records':filter_grid_data});
}
/**
* 查询数据(将页面所有查询条件汇总)
*/
function searchData(){
var rules = [];
var orgId = $("#orgId").val();
if(orgId){
rules.push({
field: 'organizationId', op: 'in', value: orgId, type:'string'
})
}
custom_rules.rules = rules;
var hash ={};
var exclude_repeat_rules = [];
var groupData = {};
$.each(custom_rules.rules,function(k,v){
if(!hash[v.field+"_"+v.op+"_"+v.value]){
hash[v.field+"_"+v.op+"_"+v.value] = true
exclude_repeat_rules.push(v);
}
});
groupData.rules = exclude_repeat_rules;
groupData.op = "and";
listGrid.options.dataType = 'server';
listGrid.search({where: JSON2.stringify(groupData)});
$("#searchForm")[0].reset();
}
function getTree(data) {
tree = $("#tree1").ligerTree({
data: data,
idFieldName: 'id',
parentIDFieldName: 'parentOrgId',
textFieldName: 'orgName',
onSelect: onSelect,//常用事件
isExpand: 2,
checkbox: false,
}
)
;
}
function onSelect(note)
{
tree_select_ids = [];
trace(note.data);
$("#orgId").val(tree_select_ids.join(","));
searchData();
}
//遍历树节点的所有子节点
function trace(node){
if(node.children && node.children.length > 0){
$.map(node.children, function (child) {
trace(child);
});
}
tree_select_ids.push(node.id)
}
function reset(){
$("#searchForm")[0].reset();
search();
}
function addPerson() {
$.ligerDialog.open({
title: '新增人员',
url: rootPath + '/person/add.shtml?orgId='+$("#orgId").val(),
width: 800,
height: 500,
data: {
grid: listGrid //将用户列表传给弹窗
}
});
}
function editPerson() {
var selected = listGrid.getSelectedRows();
if(selected.length == 0){
layer.msg("请选中待编辑的项");
return;
}
if (selected.length > 1) {
layer.msg("只能选中一项进行编辑");
return;
}
if(selected[0].status == 1){
layer.msg("只能修改草稿或者禁用的行");
return;
}
$.ligerDialog.open({
title: '编辑人员',
url: rootPath + '/person/edit.shtml?id=' + selected[0].id,
width: 800,
height: 500,
data: {
grid: listGrid //将用户列表传给弹窗
}
});
}
function delPerson() {
var selected = listGrid.getSelectedRows();
if(selected.length == 0){
layer.msg("请选中待删除的项");
return;
}
var exist_error = false;
$.each(selected, function(index,item){
if (item.state != 0){
exist_error = true;
return;
}
});
if(!exist_error){
var ids = $.map(selected,function(item){return item.id;});
$.ligerDialog.confirm('确定是否删除?', function(yes) {
if(yes == true) {
var url = rootPath + '/person/deleteEntity.shtml?ids=' + ids;
var result = CommnUtil.ajax(url, {}, "json");
if (result == "success") {
listGrid.reloadAll();
layer.msg('删除成功');
} else {
layer.msg('删除失败');
}
}
});
}
else{
layer.msg("只能删除草稿状态的数据");
}
}
/**
* 启用
*/
function enable() {
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待启用的项");
return;
}
var exist_error = false;
$.each(selected, function(index,item){
if (item.status == 1){
exist_error = true;
return;
}
});
if(!exist_error){
var ids = $.map(selected, function (item) {
return item.id;
});
$.ligerDialog.confirm('确定是否启用?', function (yes) {
if (yes == true) {
var url = rootPath + '/person/enable.shtml?ids=' + ids;
var result = CommnUtil.ajax(url, {}, "json");
if (result == "success") {
listGrid.reloadAll();
layer.msg('启用成功');
} else {
layer.msg('启用失败');
}
}
});
}
else{
layer.msg("存在状态不合法的单据,不能启用");
}
}
/**
* 禁用
*/
function disable() {
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待禁用的项");
return;
}
var exist_error = false;
$.each(selected, function(index,item){
if (item.status != 1){
exist_error = true;
return;
}
});
if(!exist_error){
var ids = $.map(selected, function (item) {
return item.id;
});
ids = ids.join(',');
$.ligerDialog.confirm('确定是否禁用?', function (yes) {
if (yes == true) {
var url = rootPath + '/person/disable.shtml?ids=' + ids;
var result = CommnUtil.ajax(url, {}, "json");
if (result == "success") {
listGrid.reloadAll();
layer.msg('禁用成功');
} else {
layer.msg('禁用失败');
}
}
});
}
else{
layer.msg("存在状态不合法的单据,不能禁用");
}
}
<file_sep>/src/main/webapp/js/fund/auto_approval/add.js
//获取此窗口对象以及父节点表格
var thisDialog = frameElement.dialog; //当前窗口
var targetGrid = thisDialog.get('data').grid; //用户列表
$(function() {
$("button[type='reset']").click(function(event){
thisDialog.close();
});
jQuery.validator.addMethod("checkSelected", function(value, element, param) {
if(value!=-1){
return true;
} else {
return false;
}
});
$("form").validate({
ignore: [], //对隐藏域也可以验证
errorClass:'error-msg',
submitHandler : function(form) {// 必须写在验证前面,否则无法ajax提交
ly.ajaxSubmit(form, {// 验证新增是否成功
type : "post",
dataType : "json",
success : function(data) {
if (data == "success") {
resetForm();
targetGrid.reloadAll();
$.ligerDialog.confirm('添加成功!是否关闭窗口?', function(yes) {
if(yes == true){
thisDialog.close();
}
});
} else {
$.ligerDialog.error('添加失败!' + data);
}
}
});
},
rules : {
"lyOrganization" : {
required : true,
validateNullOrWhiteSpace: true,
},
"billType" : {
required : true,
checkSelected: true,
},
"approvalNodeOrder" : {
required : true,
checkSelected: true,
},
"approvalPersonId" : {
required : true,
checkSelected: true,
},
"approvalDely" : {
required : true,
number: true,
}
},
messages : {
"lyOrganization" : {
required : "所属公司不能为空",
validateNullOrWhiteSpace: "所属公司不能为空",
},
"billType" : {
required : "必须选择一个审批单据",
checkSelected: "必须选择一个审批单据",
},
"approvalNodeOrder" : {
required : "必须选择一个审批节点",
checkSelected: "必须选择一个审批节点",
},
"approvalPersonId" : {
required : "必须选择一个审核人员账号",
checkSelected: "必须选择一个审核人员账号",
},
"approvalDely" : {
required : "必须填写一个审核延长时间",
number: "只能输入数字",
},
},
errorPlacement : function(error, element) {
//error.addClass('col-sm-4');
element.closest('.form-group').append(error);
},
success : function(label) {
label.remove();
},
});
$('#billType').change(function () {
// console.log('saassa:',$(this).val())
if($(this).val()==-1){
$('#approvalNodeOrder').empty();
$('#approvalNode').val('-1');
}else {
ly.ajax({
async: false, //请勿改成异步,下面有些程序依赖此请求数据
type: "POST",
data: "",
url: rootPath + '/fund_approval_flow/getApprovalFlowNodes.shtml?approvalFlowId=' + $(this).val(),
dataType: 'json',
success: function (json) {
// console.log('xxxx:', json)
if(json.length>0) {
var init_option = $("<option>").val('-1').text('=请选择=');
$('#approvalNodeOrder').append(init_option);
for (var i in json) {
var option = $("<option>");
option.val(json[i].orderNum);
option.text(json[i].nodeName);
$('#approvalNodeOrder').append(option);
// console.log('saassa111:', $('#approvalNode').val())
$('#approvalNode').val(json[i].fundApprovalFlowConfig.id);
// console.log('saassa222:', $('#approvalNode').val())
}
}else{
$('#approvalNodeOrder').empty();
$('#approvalNode').val('-1');
}
}
});
// console.log('saassa222:', $('#approvalNode').val())
}
});
$('#approvalNodeOrder').change(function () {
if($(this).val()!=-1){
$('#approvalPersonId').empty();
ly.ajax({
async: false, //请勿改成异步,下面有些程序依赖此请求数据
type: "POST",
data: "",
url: rootPath + '/fund_approval_flow/getPersonByNode.shtml?approvalFlowId=' + $('#approvalNode').val() + "&orderNum=" + $(this).val(),
dataType: 'json',
success: function (json) {
// console.log('xxxx:', json)
if(json.length>0) {
var init_option = $("<option>").val('-1').text('=请选择=');
$('#approvalPersonId').append(init_option);
for (var i in json) {
var option = $("<option>");
option.val(json[i].id);
option.text(json[i].name);
$('#approvalPersonId').append(option);
}
}else{
$('#approvalPersonId').empty();
$('#approvalPerson').val('');
}
}
});
}
});
$('#approvalPersonId').change(function () {
if($(this).val()!=-1){
$('#approvalPerson').val($("#approvalPersonId").find("option:selected").text());
}else{
$('#approvalPerson').val('')
}
});
});
/**
* 重置新增表单
*/
function resetForm(){
$("#form")[0].reset();
}<file_sep>/src/main/webapp/js/system/user/edit.js
//获取此窗口对象以及父节点表格
var thisDialog = frameElement.dialog; //当前窗口
var targetGrid = thisDialog.get('data').grid; //用户列表
$(function() {
$("#repeat").bind("click", function() {
repeat();
});
$("form").validate({
submitHandler : function(form) {// 必须写在验证前面,否则无法ajax提交
ly.ajaxSubmit(form, {// 验证新增是否成功
type : "post",
dataType : "json",//ajaxSubmi带有文件上传的。不需要设置json
success : function(data) {
if (data == "success") {
targetGrid.reloadAll();
$.ligerDialog.confirm('更新成功!是否关闭窗口?', function(yes) {
if(yes == true){
thisDialog.close();
}
});
} else {
$.ligerDialog.error('添加失败!');
}
}
});
},
errorPlacement : function(error, element) {// 自定义提示错误位置
$(".l_err").show();
$(".l_err").html(error.html());
},
success : function(label) {// 验证通过后
$(".l_err").hide();
}
});
});
//替换人员
function repeat(){
var personId = $('#personId').val();
var url = rootPath + '/user/repeatPerson.shtml?personId='+personId;
var data = CommnUtil.ajax(url, null,"json");
if (data) {
var data_obj = JSON2.parse(data)
$('#person').val(data_obj.fullOrganization);
$('#personId').val(data_obj.id);
$('#repeat').attr("disabled","disabled");
layer.msg("替换成功!");
} else {
layer.msg("系统无相关人员不能替换!");
}
}
/*function bindCompany(id){
var url = rootPath + '/organization/companylists.shtml';
var data = CommnUtil.ajax(url, null,"json");
if (data.result>0) {
var list=data.list;
var h = "";
for ( var i = 0; i < list.length; i++) {
debugger;
if(parseInt(id,10)==parseInt(list[i].id,10)){
h+="<option value='" + list[i].id + "' selected='selected'>"
+ list[i].orgName + "</option>";
}else{
h+="<option value='" + list[i].id + "'>"+ list[i].orgName + "</option>";
}
}
$("#orgId").html(h);
} else {
layer.msg("获取菜单信息错误,请联系管理员!");
}
}*/
<file_sep>/src/main/webapp/js/fund/quota_report/list.js
var listGrid, //列表对象
grid_data, //列表原始对象
filter_grid_data, //列表筛选对象
tree, //树对象
custom_rules = {}, //自定义查询条件
tree_select_ids = []; //
var mainTab = frameElement.tab;//框架的tab对象
$(function() {
$(".l-layout-center").width($("#layout1").width()-205);
listGrid = $("#gridArea").ligerGrid({
columns: [
{ display: '组织', name: 'lyOrganization.orgName', width: 250 },
{ display: '场景名称', name: 'fundQuotaScene.name', width: 200,
render:function (rowdata,index,data) {
return '<a href="javascript:showView(' + rowdata.id + ');" title="查看明细">' + data + '</a>'
}
},
{ display: '周期', name: 'fundQuotaScene.cycleType', width: 150,
render:function(rowdata,index,data){
if(rowdata.fundQuotaScene.cycleType ==0 ){
return "日";
}
if(rowdata.fundQuotaScene.cycleType ==1 ){
return "月";
}
if(rowdata.fundQuotaScene.cycleType ==2 ){
return "季";
}
if(rowdata.fundQuotaScene.cycleType ==3 ){
return "年";
}
}
},
{ display: '状态', name: 'state', width: 150,
render:function(rowdata,index,data){
if(rowdata.state ==0 ){
return "禁用";
}
if(rowdata.state ==1 ){
return "启用";
}
if(rowdata.state ==2 ){
return "草稿";
}
}
},
{ display: '填报时间', name: 'reportDate', width: 250,
},
{ display: '填报人', name: 'lyPerson.name', width: 200 },
],
url: rootPath + '/fund_quota_report/findByPage.shtml',
sortOrder: 'desc',
onLoaded:function(g){
g.toggleLoading(false);
grid_data = g.data.records;
}
});
$("#pageloading").hide();
$("#search").bind("click", function (event) {// 绑定查询按扭
search();
});
$("#reset").click("click", function() {// 绑定重置按扭
reset();
});
$("#add").click("click", function() {
add();
});
$("#edit").click("click", function() {
editReport();
});
$("#delete").click("click", function() {
delReport();
});
//启用
$("#enable").click("click", function () {
enable();
});
//禁用
$("#disable").click("click", function () {
disable();
});
});
/**
* 自定义查询
* @param value
*/
function search() {
var sceneName = $("#sceneName").val();
var cycleType = $("#cycleType").val();
var state = $("#state").val();
var rules = [];
var groupData = {};
if(sceneName){
rules.push({
field: 'fundQuotaScene.name', op: 'like', value: sceneName, type:'string'
})
}
if(cycleType){
rules.push({
field: 'fundQuotaScene.cycleType', op: 'equal', value: cycleType, type:'int'
})
}
if(state){
rules.push({
field: 'state', op: 'equal', value: state, type:'int'
})
}
groupData.rules = rules;
listGrid.search({where: JSON2.stringify(groupData)});
}
function reset(){
$("#searchForm")[0].reset();
search();
}
function add() {
var tab_id = "quota_report_add";
var url = rootPath + '/fund_quota_report/add.shtml';
if(mainTab.isTabItemExist(tab_id)){
mainTab.setTabItemSrc(tab_id,url);
mainTab.selectTabItem(tab_id);
mainTab.reload(tab_id);
}
else{
mainTab.addTabItem({
tabid: tab_id,
text: '指标填报',
url: url,
callback: function () {
//添加tab页之后的回调,可以添加自定义方法,来向tab页中添加内容
}
});
}
}
function editReport() {
var selected = listGrid.getSelectedRows();
if(selected.length == 0){
layer.msg("请选中待编辑的项");
return;
}
if (selected.length > 1) {
layer.msg("只能选中一项进行编辑");
return;
}
if(selected[0].state != 2){
layer.msg("只能修改草稿的行");
return;
}
$.ligerDialog.open({
title: '编辑人员',
url: rootPath + '/fund_quota_report/edit.shtml?id=' + selected[0].id,
width: 1200,
height: 900,
data: {
grid: listGrid //将用户列表传给弹窗
}
});
}
function delReport() {
var selected = listGrid.getSelectedRows();
if(selected.length == 0){
layer.msg("请选中待删除的项");
return;
}
var exist_error = false;
$.each(selected, function(index,item){
if (item.state != 2){
exist_error = true;
return;
}
});
if(!exist_error){
var ids = $.map(selected,function(item){return item.id;});
$.ligerDialog.confirm('确定是否删除?', function(yes) {
if(yes == true) {
var url = rootPath + '/fund_quota_report/deleteEntity.shtml?ids=' + ids;
var result = CommnUtil.ajax(url, {}, "json");
if (result == "success") {
listGrid.reloadAll();
layer.msg('删除成功');
} else {
layer.msg('删除失败');
}
}
});
}else {
layer.msg("存在状态不合法的单据,不能删除");
}
}
/**
* 启用
*/
function enable() {
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待启用的项");
return;
}
var exist_error = false;
$.each(selected, function(index,item){
if (item.state == 1){
exist_error = true;
return;
}
});
if(!exist_error){
var ids = $.map(selected, function (item) {
return item.id;
});
$.ligerDialog.confirm('确定是否启用?', function (yes) {
if (yes == true) {
var url = rootPath + '/fund_quota_report/enable.shtml?ids=' + ids;
var result = CommnUtil.ajax(url, {}, "json");
listGrid.reloadAll();
$.ligerDialog.success(result);
}
});
}
else{
layer.msg("存在状态不合法的单据,不能启用");
}
}
/**
* 禁用
*/
function disable() {
var selected = listGrid.getSelectedRows();
if (selected.length == 0) {
layer.msg("请选中待禁用的项");
return;
}
var exist_error = false;
$.each(selected, function(index,item){
if (item.state != 1){
exist_error = true;
return;
}
});
if(!exist_error){
var ids = $.map(selected, function (item) {
return item.id;
});
ids = ids.join(',');
$.ligerDialog.confirm('确定是否禁用?', function (yes) {
if (yes == true) {
var url = rootPath + '/fund_quota_report/disable.shtml?ids=' + ids;
var result = CommnUtil.ajax(url, {}, "json");
if (result == "success") {
listGrid.reloadAll();
layer.msg('禁用成功');
} else {
layer.msg('禁用失败');
}
}
});
}
else{
layer.msg("存在状态不合法的单据,不能禁用");
}
}
//查看明细
function showView(id) {
$.ligerDialog.open({
width: 1200,
height: 900,
title: '详情',
url: rootPath + '/fund_quota_report/viewUI.shtml',
urlParms: {id: id},
data: {
grid: listGrid //将用户列表传给弹窗
}
});
}<file_sep>/src/main/resources/sqlFiles/1.0.15-wx-drop-unique-index.sql
-- 根据最新业务需求需要删除唯一索引。因为枚举值有可能重复。在不同的公司中 --
alter table fund_fare_method drop index idx_methodName ;
alter table fund_fare_ticket_method drop index idx_methodName ;
alter table fund_payment_method drop index idx_methodName ;
alter table fund_received_method drop index idx_methodName ;
alter table fund_voucher_method_setting drop index idx_methodName ;
-- 防止下次重启索引会被重建。直接创建名字为idx_methodName的普通索引 --
CREATE INDEX idx_methodName ON fund_fare_method(methodName(255));
CREATE INDEX idx_methodName ON fund_fare_ticket_method(methodName(255));
CREATE INDEX idx_methodName ON fund_payment_method(methodName(255));
CREATE INDEX idx_methodName ON fund_received_method(methodName(255));
CREATE INDEX idx_methodName ON fund_voucher_method_setting(methodName(255));<file_sep>/src/main/webapp/js/system/mdm_supplier/view.js
//列表对象
var listGrid;
$(function () {
listGrid = $("#historyGridArea").ligerGrid({
columns: [
{display: 'id', name: 'id', width: 100},
{
display: '名称', name: 'name', width: 200, render: function (rowdata, index, data) {
var alink = '<a href="' + getShowHistoryVersionURL(rowdata.id)
+ '" target="_blank" >' + data + '</a>';
return alink;
}
},
{
display: '版本', name: 'version', width: 100, render: function (rowdata, index, data) {
var alink = '<a href="' + getShowHistoryVersionURL(rowdata.id)
+ '" target="_blank" style="text-decoration: underline;" title="当前版本: ' + data + '">' + data + '</a>';
if (index == 0) {
return alink + '<span style="color:red">(当前版本)</span>';
} else {
return alink;
}
}
},
{
display: '变更时间', name: 'gmtModified', minWidth: 150, render: function (rowdata, index, data) {
return new Date(data).format("yyyy-MM-dd hh:mm:ss");
}
},
{display: '变更人', name: 'operatorName', minWidth: 150}
],
url: rootPath + '/mdm_supplier/findHistoryByPage.shtml',
urlParms: {mdmSupplierId: mdmSupplier.id},
sortName: 'version',
sortOrder: 'desc',
checkbox: false,
rownumbers: false,
height: '420',
detail: {
height: 'auto',
onShowDetail: function (record, container, callback) {
var url = rootPath + '/mdm_supplier/historyDetailUI.shtml';
var result = CommnUtil.ajax(url, {historyId: record.id});
$(container).append($(result).css('maring', 20));
}
}
});
});
/**
* 查看历史版本详情
* @param historyId
*/
function getShowHistoryVersionURL(historyId) {
return rootPath + '/mdm_supplier/historyViewUI.shtml?id=' + historyId;
} | dfa7ff6b45e9aac4f1ede5fe56379022d619fe6e | [
"SQL",
"JavaScript",
"Markdown",
"Maven POM",
"Java"
] | 89 | JavaScript | xiaopangzhi795/zlblog | c0e10cb5af95ff15458adb0d999ecb8d4e27bb53 | c34f3d057ba76adacb553e7edc482f262bc532f5 |
refs/heads/master | <file_sep>def to_opn(input)
stack = Array.new
ad = Array.new
arr = Array.new
priority = Hash["(" => 0, "+" => 1, "-" => 1, "*" => 2, "/" => 2, "^" => 3]
operators = %w(+ - / * ^)
input.delete(" ")
while input.length != 0
case input
when /^\s*([^\+\-\*\/\(\)\^\s]+)\s*(.*)/
ad.push($1.to_f)
input = $2
when /^\s*([\+\-\*\/\(\)\^])\s*(.*)/
ad.push($1.to_s)
input = $2
end
end
ad.each do |i|
arr << i if i.is_a?(Float)
stack << i if i == "("
if i == ")"
while stack.last != "("
arr.push(stack.pop)
end
stack.pop
end
if operators.include? i
#stack << i if stack.empty?
while operators.include?(stack.last) && priority[stack.last] >= priority[i]
arr.push(stack.pop)
end
stack.push(i)
end
end
while el = stack.pop
arr.push(el)
end
puts arr
end
def calculation(arr)
new_arr = Array.new
i = 0
while i < arr.length
case arr[i].to_s
when '+'
x = new_arr[-2].to_f + new_arr[-1].to_f
new_arr.pop
new_arr.pop
new_arr.push(x)
when '-'
x = new_arr[-2].to_f - new_arr[-1].to_f
new_arr.pop
new_arr.pop
new_arr.push(x)
when '*'
x = new_arr[-2].to_f * new_arr[-1].to_f
new_arr.pop
new_arr.pop
new_arr.push(x)
when '/'
x = new_arr[-2].to_f / new_arr[-1].to_f
new_arr.pop
new_arr.pop
new_arr.push(x)
when '^'
x = new_arr[-2].to_f ** new_arr[-1].to_f
new_arr.pop
new_arr.pop
new_arr.push(x)
else
new_arr.push(arr[i])
end
i = i + 1
end
puts new_arr
end
| ef4e89c79a348eb667d5811e367636302732410e | [
"Ruby"
] | 1 | Ruby | DS101/calc | 205d09d502eccc9b33b0416d35f0a6a42bbb3d6b | 1e665473adb14839304a94292fb212555899d5af |
refs/heads/master | <repo_name>perkygva/EDA_Project2<file_sep>/plot1.R
#Exploratory Data Analysis - Project 2
#Plot 1
#Packages
library(dplyr)
#Download, unzip files then read tables
zipfile = "EPA.zip"
if (!file.exists("EPA.zip")){
download.file("https://d396qusza40orc.cloudfront.net/exdata%2Fdata%2FNEI_data.zip",
dest = zipfile)
if(!file.exists("Source_Classification_Code.rds")) {
unzip("EPA.zip")
}
}
SCC = readRDS("Source_Classification_Code.rds")
NEI = readRDS("summarySCC_PM25.rds")
Annuals = aggregate(Emissions~year,NEI, sum)
barplot(height=Annuals$Emissions, names.arg = Annuals$year, space=NULL, main = "Emission per Year",
col = "lightblue", xlab="Year", ylab = "ton", axes = T, axisnames = T, axis.lty = 1)
dev.copy(png, file="plot1.png", height=480, width=480)
dev.off()<file_sep>/plot4.R
#Exploratory Data Analysis - Project 2
#Plot 4
#Packages
library(dplyr)
library(gridExtra)
library(ggplot2)
#Download, unzip files then read tables
zipfile = "EPA.zip"
if (!file.exists("EPA.zip")){
download.file("https://d396qusza40orc.cloudfront.net/exdata%2Fdata%2FNEI_data.zip",
dest = zipfile)
if(!file.exists("Source_Classification_Code.rds")) {
unzip("EPA.zip")
}
}
SCC = readRDS("Source_Classification_Code.rds")
NEI = readRDS("summarySCC_PM25.rds")
coal_filter = SCC[grep("[Cc]oal", SCC$Short.Name, value=F),1]
NEIcoal = NEI[NEI$SCC %in% coal_filter, ]
AnnualsbyCoal = aggregate(Emissions~year+type, NEIcoal, sum)
ggplot(AnnualsbyCoal, aes(x = as.factor(year), y=Emissions, fill = type)) + geom_bar(stat="identity") + labs(title = "Coal Emissions by type", x = "", y = "Emmissions")
dev.copy(png, file="plot4.png", height=480, width=480)
dev.off()
<file_sep>/plot2.R
#Exploratory Data Analysis - Project 2
#Plot 2
#Packages
library(dplyr)
#Download, unzip files then read tables
zipfile = "EPA.zip"
if (!file.exists("EPA.zip")){
download.file("https://d396qusza40orc.cloudfront.net/exdata%2Fdata%2FNEI_data.zip",
dest = zipfile)
if(!file.exists("Source_Classification_Code.rds")) {
unzip("EPA.zip")
}
}
SCC = readRDS("Source_Classification_Code.rds")
NEI = readRDS("summarySCC_PM25.rds")
Baltimore = subset(NEI, fips == "24510")
unique(Baltimore$fips)
Baltimore_annual = aggregate(Emissions~year,Baltimore, sum)
barplot(height=Annuals$Emissions, names.arg = Annuals$year, space=NULL, main = "Emission per Year in Baltimore, MA",
col = "orange", xlab="Year", ylab = "ton", axes = T, axisnames = T, axis.lty = 1)
dev.copy(png, file="plot2.png", height=480, width=480)
dev.off()
<file_sep>/plot6.R
#Exploratory Data Analysis - Project 2
#Plot 6
#Packages
library(dplyr)
library(gridExtra)
library(ggplot2)
#Download, unzip files then read tables
zipfile = "EPA.zip"
if (!file.exists("EPA.zip")){
download.file("https://d396qusza40orc.cloudfront.net/exdata%2Fdata%2FNEI_data.zip",
dest = zipfile)
if(!file.exists("Source_Classification_Code.rds")) {
unzip("EPA.zip")
}
}
SCC = readRDS("Source_Classification_Code.rds")
NEI = readRDS("summarySCC_PM25.rds")
#Filter and Aggregation
cities = NEI[NEI$fips == "24510" | NEI$fips == "06037" & NEI$type == "ON-ROAD", ]
auto_filter= SCC[grep("[Vv]ehicle", SCC$Short.Name, value=F),1]
cities_auto_filter = cities[cities$SCC %in% auto_filter, ]
AN_CMV= aggregate(Emissions~year+fips, cities_auto_filter, sum)
FIPS = as.factor(AN_CMV$fips)
levels(FIPS) = list(BA = "24510", LA = "06037")
labels = c("24510" = "Baltimore", "06037"="Los Angeles")
#Plot and create png
ggplot(AN_CMV, aes(x = as.factor(year), y=Emissions, fill = FIPS)) +
geom_bar(stat="identity", position = "dodge") +
facet_grid(.~fips, labeller = labeller(fips = labels)) + geom_text(aes(label=round(Emissions, 2), vjust = 0))
dev.copy(png, file="plot6.png", height=480, width=480)
dev.off()
<file_sep>/plot3.R
#Exploratory Data Analysis - Project 2
#Plot 3
#Packages
library(dplyr)
library(gridExtra)
library(ggplot2)
#Download, unzip files then read tables
zipfile = "EPA.zip"
if (!file.exists("EPA.zip")){
download.file("https://d396qusza40orc.cloudfront.net/exdata%2Fdata%2FNEI_data.zip",
dest = zipfile)
if(!file.exists("Source_Classification_Code.rds")) {
unzip("EPA.zip")
}
}
SCC = readRDS("Source_Classification_Code.rds")
NEI = readRDS("summarySCC_PM25.rds")
Baltimore = subset(NEI, fips == "24510")
unique(Baltimore$fips)
AnnualsbyType = aggregate(Emissions~year+type, Baltimore, sum)
g = ggplot(AnnualsbyType, aes(x = as.factor(year), Emissions, fill = type)) + geom_bar(position="dodge", stat="identity")
plot3.1 = g + labs(title = "Baltimore: Emissions by Type", x = "", y = "Emissions", legend.title = "Emission Type") + guides(fill = guide_legend(title="Emission Type"))
plot3.2 = g + facet_grid(.~type) + labs(title = "Baltimore: Emissions by Type ", x = "", y = "Emissions", legend.title = "Emission Type") + guides(fill = guide_legend(title="Emission Type"))
grid.arrange(plot3.1, plot3.2)
dev.copy(png, file="plot3.png", height=480, width=480)
dev.off()
<file_sep>/plot5.R
#Exploratory Data Analysis - Project 2
#Plot 5
#Packages
library(dplyr)
library(gridExtra)
library(ggplot2)
#Download, unzip files then read tables
zipfile = "EPA.zip"
if (!file.exists("EPA.zip")){
download.file("https://d396qusza40orc.cloudfront.net/exdata%2Fdata%2FNEI_data.zip",
dest = zipfile)
if(!file.exists("Source_Classification_Code.rds")) {
unzip("EPA.zip")
}
}
SCC = readRDS("Source_Classification_Code.rds")
NEI = readRDS("summarySCC_PM25.rds")
#Filter and Aggregation
Baltimore = NEI[NEI$fips == "24510" & NEI$type == "ON-ROAD", ]
auto_filter= SCC[grep("[Vv]ehicle", SCC$Short.Name, value=F),1]
BALMV = Baltimore[Baltimore$SCC %in% auto_filter, ]
AN_BALMV = aggregate(Emissions~year+type, BALMV, sum)
#Plot and create png
ggplot(AN_BALMV, aes(x = as.factor(year), y=Emissions, fill = type)) + geom_bar(stat="identity") + labs(title = "Total Motor Vehicle Emissions", x = "", y = "Emmissions") +
geom_text(aes(label=round(Emissions, 2), vjust=3))
dev.copy(png, file="plot5.png", height=480, width=480)
dev.off()
| 0342f2024f4c1918589ac6f5add971c3a63cf0c3 | [
"R"
] | 6 | R | perkygva/EDA_Project2 | 1d3b75d88c2c35423fecfe7d32653dece938c7f1 | 05c6193677a705d879f06244ef83df4c8ceb1867 |
refs/heads/master | <file_sep>import './App.css';
import React, { useEffect, useState } from "react";
import { BrowserRouter as Router, Switch, Route, Link } from 'react-router-dom';
import {Button,Collapse,Row,Col} from 'antd'
import Dashboard from "./dashboard";
function Auth () {
const [email, setEmail] = useState('');
const [password, setPassword] = useState('');
const [retypePassword, setRetypePassword] = useState('');
const onSignIn = async () => {
};
const onSignUp = async () => {
};
return (
<div>
<form onSubmit={onSignIn}>
<Row type={'flex'} align={'center'} className={'mt-5'}>
<Col span={24}>
<input type="email" value={email} onChange={(ev) => setEmail(ev.currentTarget.value)} required className={'border w-full rounded'} placeholder={'Email address'}/>
</Col>
<Col span={24} className={'mt-5'}>
<input type="password" value={password} onChange={(ev) => setPassword(ev.currentTarget.value)} required className={'border w-full rounded'} placeholder={'<PASSWORD>'}/>
</Col>
<Col span={24} className={'mt-5'}>
<Button htmlType={'submit'} className={'border-0 bg-edorble-yellow-500 hover:bg-edorble-yellow-600 hover:text-black w-full rounded font-bold'}>Submit</Button>
</Col>
</Row>
</form>
<form onSubmit={onSignUp}>
<Row type={'flex'} align={'center'} className={'mt-5'}>
<Col span={24}>
<input type="email" value={email} onChange={(ev) => setEmail(ev.currentTarget.value)} required className={'border w-full rounded'} placeholder={'Email address'}/>
</Col>
<Col span={24} className={'mt-5'}>
<input type="password" value={password} onChange={(ev) => setPassword(ev.currentTarget.value)} required className={'border w-full rounded'} placeholder={'Password'}/>
</Col>
<Col span={24} className={'mt-5'}>
<input type="password" value={retypePassword} onChange={(ev) => setRetypePassword(ev.currentTarget.value)} required className={'border w-full rounded'} placeholder={'Retype Password'}/>
{(password != retypePassword) && <small className={'text-red-500 font-bold'}>Passwords don't match</small>}
</Col>
<Col span={24} className={'mt-5'}>
<Button htmlType={'submit'} disabled={password != retypePassword} className={'border-0 bg-edorble-yellow-500 hover:bg-edorble-yellow-600 hover:text-black w-full rounded font-bold'}>Submit</Button>
{/*<Button loading={loading} disabled={password != retypePassword} type="primary" htmlType={'submit'} className={'border-0 w-full rounded font-bold'}>Submit</Button>*/}
</Col>
</Row>
</form>
</div>
)
}
function App() {
return (
<Router>
<div>
{/* A <Switch> looks through its children <Route>s and
renders the first one that matches the current URL. */}
<Switch>
<Route path="/dashboard">
<Dashboard />
</Route>
<Route path="/">
<Auth />
</Route>
</Switch>
</div>
</Router>
);
}
export default App; | 1bef0017846650b2ebb63838a9f0e76c77ed2410 | [
"JavaScript"
] | 1 | JavaScript | apnicholls/recipebookapril-web | 67e83058e79f526e1de74e11f1c66694345e135b | 7d23612857bd0494f03ea62edc2a95055b525496 |
refs/heads/master | <repo_name>Aadil123/Customer-code<file_sep>/Customer.java
package in.ac.kletech.customer;
abstract class Customer {
int iCustomerID;
String sCustomername;
String sPhoneNum;
float fBillAmount;
public Customer(int iCustomerID, String sCustomername, String sPhoneNum, float fBillAmount) {
this.iCustomerID = iCustomerID;
this.sCustomername = sCustomername;
this.sPhoneNum = sPhoneNum;
this.fBillAmount = fBillAmount;
}
void printCustomerInfo()
{
System.out.println("Illegal operation performed");
}
public String toString()
{
return "Name: "+sCustomername+" ID: "+iCustomerID+" Ph no: "+sPhoneNum+" Bill: Rs."+fBillAmount;
}
abstract void computeBillAmt();
}
class RegularCustomer extends Customer{
double discount;
public RegularCustomer(int iCustomerID, String sCustomername, String sPhoneNum, float fBillAmount,double discount) {
super(iCustomerID, sCustomername, sPhoneNum, fBillAmount);
this.discount=discount;
}
void computeBillAmt() {
this.discount=0.5f*this.fBillAmount;
this.fBillAmount=(float) (this.fBillAmount-(this.discount));
}
void printCustomerInfo()
{
this.toString();
}
}
class PriviligedCustomer extends Customer{
String memberCardtype;
public PriviligedCustomer(int iCustomerID, String sCustomername, String sPhoneNum, float fBillAmount, String memberCardType ) {
super(iCustomerID, sCustomername, sPhoneNum, fBillAmount);
this.memberCardtype=memberCardType;
}
void computeBillAmt() {
if(this.memberCardtype=="Gold")
this.fBillAmount=this.fBillAmount-(0.1f*this.fBillAmount);
else if(this.memberCardtype=="Gold Premium")
this.fBillAmount=this.fBillAmount-(0.15f*this.fBillAmount);
}
void printCustomerInfo()
{
this.toString();
}
}<file_sep>/CustomerDemo.java
package in.ac.kletech.customer;
public class CustomerDemo {
public static void main(String[] args) {
Customer[] c=new Customer[5];
RegularCustomer reg1=new RegularCustomer(12, "abs", "123", 1241,0.5);
RegularCustomer reg2=new RegularCustomer(13, "qwe", "1124", 2525, 0.5);
RegularCustomer reg3=new RegularCustomer(14, "sdf", "1341", 2545, 0.5);
PriviligedCustomer pri1=new PriviligedCustomer(15, "ef", "234", 3463, "Gold");
c[0]=reg1;
c[1]=reg2;
c[2]=reg3;
c[3]=pri1;
for(int i=0;i<(c.length-1);i++)
{
c[i].computeBillAmt();
c[i].printCustomerInfo();
}
c[4]=reg1;
c[4].computeBillAmt();
c[4].printCustomerInfo();
}
}
| 466d2f526546b2c7e8414b63507e61d867714dec | [
"Java"
] | 2 | Java | Aadil123/Customer-code | 8004e504dde357a2083edd060d9e93abed390986 | 241d9257588f0057b38ea0cab61463b36dcc501a |
refs/heads/master | <repo_name>SanguRe/Work<file_sep>/WorkDateRus/MainWindow.xaml.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Controls.Primitives;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Navigation;
using System.Windows.Shapes;
using WorkDateRus.Entities;
using WorkDateRus.Viewmodels;
namespace WorkDateRus
{
/// <summary>
/// Interaction logic for MainWindow.xaml
/// </summary>
public partial class MainWindow : Window
{
private WorkDaysEntities db = new WorkDaysEntities();
public MainWindow()
{
InitializeComponent();
UpdateNew();
}
private void UpdateNew()
{
List<Sehedule> sehedules = db.Sehedule.ToList();
List<UserWorkList> users = new List<UserWorkList>();
DGMain.ItemsSource = users;
foreach (var item in sehedules)
{
if (users.FirstOrDefault(p => p.UserId == item.WorkerId) != null)
{
users.FirstOrDefault(p => p.UserId == item.WorkerId).sehedules.Add(item);
}
else
{
UserWorkList user = new UserWorkList();
user.UserId = item.WorkerId;
user.Name = item.Worker.FirstName;
user.sehedules = new List<Sehedule>();
user.sehedules.Add(item);
users.Add(user);
}
}
foreach (var user in users)
{
user.sehedules = user.sehedules.Where(p => p.StartDate > DateTime.Now.Date).OrderBy(p => p.StartDate).ToList();
}
for (int i = 0; i <= 4; i++)
{
DGMain.Columns[i + 1].Header = DateTime.Now.Date.AddDays(i);
}
}
private void Update()
{
List<Sehedule> sehe = db.Sehedule.ToList().Where(i => i.StartDate >= DateTime.Now && i.EndDate <= DateTime.Now.AddDays(5)).ToList();
DGMain.ItemsSource = sehe;
DGMain.Columns.Add(new DataGridTextColumn()
{
Header = "Name",
Binding = new Binding("Worker.FirstName")
});
for (int i = 0; i <= 4; i++)
{
var templateColumn = new DataGridCheckBoxColumn()
{
//Header = DataTime
};
DGMain.Columns.Add(templateColumn);
}
}
class UserWorkList
{
public int UserId { get; set; }
public string Name { get; set; }
public List<Sehedule> sehedules { get; set; }
}
}
}
<file_sep>/WorkDateRus/Classes/SheduleTime.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using WorkDateRus.Entities;
namespace WorkDateRus.Entities
{
public partial class Sehedule
{
public string StartEnd {
get
{
return StartDate.ToString("HH:mm") + " - " + EndDate.ToString("HH:mm");
}
}
}
}
| 186e9c9af6e811566fe5685561b225f1c6dd045b | [
"C#"
] | 2 | C# | SanguRe/Work | 86bf8a2b454ad87c01eadecc6d53012fad930fa1 | af2afdecd16b64c7e021517bad70dd710543eb29 |
refs/heads/master | <repo_name>AlexSysChi/spring-boot<file_sep>/src/main/resources/db/migration/V2__alter_PERSON_table_clear.sql
delete from PERSON;
<file_sep>/src/main/resources/db/migration/V3__alter_PERSON_table_with_content.sql
insert into PERSON (id, name)
values ('15c74793-e807-400b-a8f2-43c7553f2fa5', 'NAME_1'),
('075cb72e-03ad-4074-a128-c08c9553e4c9', 'NAME_2'),
('0f6431ec-f7db-45b9-af00-51c15b3fa8d2', 'NAME_3'),
('b53962ee-906c-47c9-9c02-d717ad1360ee', 'NAME_4'),
('e22e208e-94ff-4405-b371-4cdafd444680', 'NAME_5')
;<file_sep>/src/main/java/com/sidenis/springbootapp/dao/MockPersonRepositoryImpl.java
package com.sidenis.springbootapp.dao;
import com.sidenis.springbootapp.model.Person;
import org.springframework.stereotype.Repository;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
@Repository("mockAccessService")
public class MockPersonRepositoryImpl implements PersonRepository {
private static List<Person> storedPersons = new ArrayList<>();
@Override
public int addPersonWithId(UUID id, Person person) {
storedPersons.add(new Person(id, person.getName()));
return 1;
}
@Override
public List<Person> getAllPersons() {
return storedPersons;
}
@Override
public Optional<Person> selectPersonById(UUID id) {
return storedPersons.stream()
.filter(person -> person.getId().equals(id))
.findFirst();
}
@Override
public int deletePersonById(UUID id) {
Optional<Person> deletionCandidate = selectPersonById(id);
if (deletionCandidate.isPresent()) {
storedPersons.remove(deletionCandidate.get());
return 1;
}
return 0;
}
@Override
public int updatePersonById(UUID id, Person personUpdates) {
Optional<Person> optionalPerson = selectPersonById(id);
if (!optionalPerson.isPresent()) {
return 0;
} else {
Person person = optionalPerson.get();
storedPersons.set(storedPersons.indexOf(person), new Person(id, personUpdates.getName()));
return 1;
}
}
}
| 93322fdf5aaa82ad9091aa690dadd408cbf95471 | [
"Java",
"SQL"
] | 3 | SQL | AlexSysChi/spring-boot | c32118c799133a2b66060588039da2a20352524b | c756039cd65119851e701ce55ef4ee317f10b5ca |
refs/heads/master | <repo_name>windsurfer7563/data-science-journey-2017<file_sep>/README.md
Данные репозиторий содержит примеры решений задач проводимых в рамках конкурса [SDSJ](https://sdsj.ru/), а также вспомогательные файлы позволяющие упростить решение и его отладку перед отправкой на [платформу](https://contest.sdsj.ru).
Струтура репозитория:
1. [Мое решение для задача А](taskA/)
2. [Бейзлайн решения для задача B и вспомогательные файлы, упрощающие первые в шаги в решении задачи](taskB/)
<file_sep>/taskA/make_sentence_model.py
import json
import semantics as sem
import re
from nltk import sent_tokenize
import gensim
from multiprocessing import Pool
import tqdm
word_with_dots = {'тыс','г','кг', 'м','А','Б','В','Г','Д','Е','Ж','З','И','Й','К','Л','М','Н','О','П','Р','С','Т','У','Ф','Х','Ц','Ч','Ш','Э','Ю','Я'}
accentuations = {'Á':'А', 'á':'а', 'Ó':'О', 'ó':'о', 'É':'Е', 'é':'е', 'ý':'у', 'и́':'и','ы́':'ы', 'э́':'э', 'ю́':'ю', 'я́':'я'}
def uniq_words(text):
return set(re.findall("\w+", text))
def read_data_model(file_name: str) -> dict:
file = open(file_name, mode='r', encoding='utf-8')
return json.load(file)
def write_data_model(file_name: str, data_model: dict):
file = open(file_name, mode='w', encoding='utf-8')
json.dump(data_model, file, separators=(',', ':'), ensure_ascii=False)
def make_bags(texts: list) -> list:
bags = []
for txt in texts:
words = sem.canonize_words(uniq_words(txt))
bags.append(words)
return bags
def read_paragraphs(data) -> dict:
paragraphs={}
questions = {}
print(data.shape)
for idx, row in tqdm.tqdm(data.iterrows(), total=data.shape[0]):
p_id = row["paragraph_id"]
if p_id not in paragraphs:
text = row['paragraph']
for word in word_with_dots:
text = text.replace(word + ". ", word + "_ ")
for k,v in accentuations.items():
text = text.replace(k, v)
sentences = sent_tokenize(text)
bags = make_bags(sentences)
paragraphs[p_id] = {'sentences' : sentences,
'bags': bags,
}
q_id = row["question_id"]
if q_id not in questions:
text = row['question']
#for word in word_with_dots:
# text = text.replace(word + ". ", word + " ")
for k,v in accentuations.items():
text = text.replace(k, v)
sentences_q = [text]
bags_q = make_bags(sentences_q)
questions[q_id] = {'sentences' : sentences_q,
'bags': bags_q,
}
return paragraphs, questions
def read_paragraphs_multi(data, workers = None):
workers = Pool(workers)
paragraphs = workers.map_async(read_paragraphs, data)
#workers.close()
workers.join()
return paragraphs.get()
def custom_w2v_model(paragraphs, questions):
sentences = []
for p in paragraphs.values():
for b in p['bags']:
sentences.append(b)
for q in questions.values():
for q in q['bags']:
sentences.append(b)
return gensim.models.Word2Vec(sentences, size=100, window=5, min_count=5, workers=4, hs=1, negative=0)
<file_sep>/taskB/dockers/sberbank/sdsj-python/Dockerfile
FROM kaggle/python
RUN pip install pymorphy2 tqdm
<file_sep>/taskB/Readme.md
Описание структуры:
В папке [dockers](dockers/) лежат примеры Dockerfile, которые используются на сервере.
В папке [simple-baseline](simple-baseline/) лежит пример бейзлайна на основе простой эвристики ([Подробная инструкция](simple-baseline/Readme.md)).
Файл [squad.py](squad.py) содержит код для рассчета метрики для одной пары параграфа вопроса.
## Описание формата решения
Каждое решение должно быть оформлено в виде zip-архива, в корне которого должен быть metadata.json.
Пример:
`
{
"image": "sberbank/sdsj-python",
"entrypoint": "python3 predict.py"
}
`
В нем есть два обязательных поля `entrypoint` - команда для запуска внутри docker контейнера. `image` - образ докера, который будет использоваться для запуска контейнера. Указывать можно любые образы доступные на docker hub или те, которые лежат в папке [dockers](dockers/). Остальные файлы в архиве доступны на использование в процессе исполнения вашего решения.
## Ограничения
Каждому решению отводится ограниченное количество ресурсов.
Оперативной памяти доступно 8гб.
Размер распакованного архива не должен привышать 1гб и на размер упакованного архива такое же ограничение.
Доступно 2 cpu ядра.
Размер файла с предскзааниями не должен превышать 52 мегобайта (решение, в котором в качестве ответа подставляется полный параграф).
Ограничение на время исполнения равно 20 минутам (включая чтение своих моделей, данных и запись предсказания).
## Как прогонять решения без отправки в систему:
1. Разделить данные на трейн/валидейт: можно это сделать скриптом `python3 split_train.py path_to_train.csv`. В результате, в этой же папке, где находится скрипт, создадутся два файла: `train_without_validate.csv`, `validate.csv`. Теперь обучаться будет на первом файле, а оценивать по второму.
2. Создаете новое решение. Снова обучаете модель, но уже с новыми признаками и сохраняете сопуствующие данные (модель предсказания и IDF-веса слов).
3. Чтобы прогнать решение можно запустить следующий скрипт: `python3 check_solution.py -t docker --submission_folder simple-baseline --data_file validate.csv`, данные скрипт запустит ваше решение наподобие того, как это работает на платформе: запустит ваше решение в докере(требуется установленный и запущенный докер, и ряд библиотек для питона(см. requirements.txt)) на файле `validate.csv`, решением данный скрипт считает все что находится в папке `simple-baseline` и будет запущен `predict.py` от туда.
Другие варианты запуска:
`python3 check_solution.py -t simple --submission_folder simple-baseline --data_file validate.csv` запустит ваше решение без докера.
`python3 check_solution.py -t simple --submission_file output_simple.zip --data_file validate.csv` запустит ваше решение без докера, решение будет взято из файла `output_simple.zip` (аналогичная опция работает и для запуска на докере).
По окончанию применения скрипта будет выведена строчка вида: `{'f1': 0.3361121166011774}`
## Для тех кто привык работать с `Jupyter Notebook`:
Чтобы каждый раз не копировать код из ноутбуков в `py`-файлы можно воспользоваться библиотекой dill, она в отличие от pickle позволяет сохранять не только данные и объекты, но и функции/классы/ламбды. Тем самым вы можете написать простой predict.py, который
1. импортирует все необходимые стандартные библиотеки, которые вы собираетесь использовать
2. загружает ваш код, который вы предварительно сериализовали: `code = dill.load(...)`
3. делает код доступным: `for obj_name in code: globals()[obj_name] = code[obj_name]`
4. далее следует обычный код загрузки тестовых данных, построению признаков и применению моделей, который будет меняться намного реже
URGENT: для работы выше описанного решения рекомендуется использовать одиннаковые версии питона, в противном случае могут быть проблемы с данным решением.
<file_sep>/taskA/Readme.md
В файле baseline.ipynb представлен пример кода для построения предсказаний для задачи A на языке python3.
| 2ae4603734156f1c5a85077271d112331b359fa9 | [
"Markdown",
"Python",
"Dockerfile"
] | 5 | Markdown | windsurfer7563/data-science-journey-2017 | 1e9f411fda9a13c99f194b71ca0cf4455aa02557 | edb2f721ddcaebaada21ef054cca82a8d3e66562 |
refs/heads/master | <repo_name>necrommunity/python-replay-parser<file_sep>/README.md
[](https://996.icu)
# Replay Parser
A replay parser for Crypt of the Necrodancer: AMPLIFIED, which will store all replays into a database for viewing later.
## Building Config File
Copy the `config_template.ini` as `config.ini`. Update any of the defaults if your replay folder doesn't exist there. If you aren't sure, remove the `REPLAY_FOLDER` value, so it reads as `REPLAY_FOLDER=`. The script will ask you to find the folder and then save it to the config file.
> This will not run if it doesn't find `config.ini` and I am too lazy to make a default :)
## TODO
1. ~~Make script loop indefinitely to continue to pull in new replay files~~
2. Add older replay parsing, since it only parses the Amplified full release correctly
3. Add in co-op replays, but probably not.
4. Probably a ton of cleanup at some point
5. ~~Since Chrome is bad you can't run the html files locally, so you'll have to start a webserver some how. Easiest way is to have python installed~~
1. ~~Python 2.7 -- No idea~~
2. ~~Python 3.7.3 -- `cd python-replay-parser; python -m http.server`~~
3. ~~Once started, open `http://localhost:8000/view_runs.html`~~
## Replay File Information
For ease of viewing, created a new line for each "\n" present in the replay data. Example is a run that ended in 1-3 on Bard
```
94 <- Replay version
-7 <- Mode type (referenced as "t_replay->startingLevel" in code)
1 <- Starting zone
0 <- Starting gold
0 <- Has broadsword
57219 <- Run time in milliseconds
3 <- Total songs played
v Start of first floor information
1492725043 <- First floor seed
1 <- Number of players
480 <- Camera width in pixels assumedly
270 <- Camera height in pixels assumedly
47 <- Keys pressed
9|46|2:2,4:1,6:1,8:1,10:2,12:1,14:1,16:1,18:1,20:1,22:2,24:2,26:2,28:2,30:2,32:2,34:2,36:2,38:2,40:2,42:2,44:1,46:1,48:2,50:2,52:2,54:2,56:2,58:2,60:2,62:2,64:2,66:2,68:1,70:1,72:1,74:2,76:2,78:1,80:2,82:2,84:3,86:2,88:2,90:1,92:1, <- Successful character moves, first # is character, 2nd is # of successful moves, rest is the moves and the beat they occured on
2|11,15, <- Unsuccessful character moves (missed beats etc), first # is unsuccessful moves, followed by the beats they occured on
25 <- No idea
1,0,3,1,3,2,2,0,1,1,0,0,2,1,2,0,0,0,1,0,0,1,1,0,0, <- No idea, probably bat RNG
0 <- No idea
v Start of second floor information
1976763447
1
480
270
51
9|50|2:2,4:2,6:3,8:3,10:2,12:2,14:2,16:2,18:1,20:1,22:2,24:3,26:3,28:2,30:1,32:1,34:0,36:3,38:3,40:2,42:2,44:2,46:3,48:3,50:3,52:3,54:3,56:3,58:3,60:3,62:3,64:1,66:1,68:0,70:1,72:3,74:2,76:3,78:2,80:3,82:3,84:3,86:3,88:3,90:3,92:3,94:2,96:3,98:0,100:3,
0|
11
0,0,1,1,2,1,1,0,0,1,1,
0
v Start of third floor information
224999924
1
480
270
33
9|33|2:2,4:2,6:3,8:2,10:2,12:2,14:2,16:3,18:3,20:3,22:1,24:0,26:1,28:0,30:0,32:1,34:1,36:1,38:1,40:1,42:1,44:1,46:1,48:2,50:1,52:2,54:2,56:0,58:0,60:2,62:9,64:9,66:9,
0|
46
0,2,3,0,0,1,2,3,0,2,0,1,3,1,0,0,0,1,0,2,1,0,3,3,3,3,3,1,3,3,0,0,2,2,0,1,1,1,0,3,0,3,0,2,0,0,
0
```
<file_sep>/main.py
#!py -3
import math
import os
import sqlite3
import sys
import time
from configparser import ConfigParser
try:
from tk import filedialog
except:
from tkinter import filedialog
import hashlib
from dateutil import parser
from datetime import datetime
import json
import threading
import http.server
import socketserver
import webbrowser
PORT = 8080
Handler = http.server.SimpleHTTPRequestHandler
CONFIG = "config.ini"
class ParsedReplay:
"""ParsedReplay holds all needed information about each run that has been parsed"""
def __init__(self):
self.version = 0
self.amplified = True
self.amplified_full = True
self.folder = ""
self.file = ""
self.f_hash = 0
self.run_date = ""
self.f_run_date = ""
self.run_type = 0
self.f_run_type = ""
self.char1 = 0
self.f_char1 = ""
self.char2 = 0
self.f_char2 = ""
self.players = 0
self.seed = 0
self.songs = 0
self.end_zone = -1
self.f_end_zone = ""
self.run_time = 0
self.f_run_time = ""
self.key_presses = 0
self.score = 0
self.killed_by = 0
self.f_killed_by = ""
self.win = False
self.bugged = False
self.bugged_reason = ""
self.imported_date = ""
def __str__(self):
"""A simple way to output useful data when debugging :)"""
return("Date: {}, Seed: {}, Char: {}, Type: {}, EndZone: {}, RunTime: {}, KeyPresses: {}".format(
self.f_run_date,
self.seed,
self.f_char1,
self.f_run_type,
self.f_end_zone,
self.f_run_time,
self.key_presses
))
def to_json(self):
"""A simple way to output the json needed for each replay :)"""
j = {
'version': self.version,
'amplified': self.amplified,
'amplifiedFull': self.amplified_full,
'file': self.file,
'fHash': self.f_hash,
'runDate': self.run_date,
'fRunDate': self.f_run_date,
'runType': self.run_type,
'fRunType': self.f_run_type,
'char1': self.char1,
'fChar1': self.f_char1,
'seed': self.seed,
'runTime': self.run_time,
'fRunTime': self.f_run_time,
'songs': self.songs,
'endZone': self.f_end_zone,
'keyPresses': self.key_presses,
'score': self.score,
'killedBy': self.f_killed_by,
'win': self.win,
'bugged': self.bugged,
'buggedReason': self.bugged_reason,
'importDate': self.imported_date
}
return j
def start_server():
""" Start an http.server thread """
httpd = socketserver.TCPServer(("", PORT), Handler)
print("HTTP Server Started", PORT)
httpd.serve_forever()
def setup_database(db):
"""This function setups up the database if it doesn't exist"""
try:
conn = sqlite3.connect(db)
bugged = """
CREATE TABLE IF NOT EXISTS bugged (
id INTEGER PRIMARY KEY ASC ON CONFLICT ABORT AUTOINCREMENT NOT NULL ON CONFLICT ABORT UNIQUE ON CONFLICT ABORT,
run_id INTEGER REFERENCES run (id),
bugged BOOLEAN,
bugged_reason TEXT,
bugged_data TEXT
);
"""
run = """
CREATE TABLE IF NOT EXISTS run (
id INTEGER PRIMARY KEY ASC ON CONFLICT ABORT AUTOINCREMENT NOT NULL ON CONFLICT ABORT UNIQUE ON CONFLICT ABORT,
version INTEGER,
amplified BOOLEAN,
amplified_full BOOLEAN,
folder TEXT,
file TEXT,
f_hash INTEGER,
run_date INTEGER,
f_run_date INTEGER,
run_type INTEGER,
f_run_type TEXT,
seed INTEGER,
songs INTEGER,
end_zone TEXT,
run_time INTEGER,
f_run_time TEXT,
players INTEGER,
char1 INTEGER,
f_char1 TEXT,
char2 INTEGER,
f_char_2 TEXT,
win BOOLEAN,
killed_by INTEGER,
f_killed_by TEXT,
key_presses INTEGER,
score INTEGER,
imported_date INTEGER
);
"""
run_tag = """
CREATE TABLE IF NOT EXISTS run_tag (
id INTEGER PRIMARY KEY ASC ON CONFLICT ABORT AUTOINCREMENT NOT NULL ON CONFLICT ABORT UNIQUE ON CONFLICT ABORT,
run_id INTEGER REFERENCES run (id),
tag_id INTEGER REFERENCES tag (id)
);
"""
tag = """
CREATE TABLE IF NOT EXISTS tag (
id INTEGER PRIMARY KEY ASC ON CONFLICT ABORT AUTOINCREMENT NOT NULL ON CONFLICT ABORT UNIQUE ON CONFLICT ABORT,
name TEXT UNIQUE ON CONFLICT ABORT,
color TEXT,
color_hex TEXT
);
"""
tag_data = [
("Win", "Green", "#3CB371"),
("Death By Enemy", "Red", "#FA8072"),
("Death By Curse", "Grey", "#708090")
]
c = conn.cursor()
c.execute("SELECT name FROM sqlite_master WHERE type='table'")
test = c.fetchall()
if len(test) < 1:
print("Creating new DB")
c = conn.cursor()
c.execute(run)
c.execute(tag)
c.execute(bugged)
c.execute(run_tag)
c.executemany(
'INSERT INTO tag (name, color, color_hex) values (?, ?, ?)', tag_data)
conn.commit()
return conn
except Exception as e:
print("Error: {}".format(e))
sys.exit()
def setup_replay_folder(r_folder, config):
"""This function configures where the replays are located if not default and writes it to the config file"""
if not os.path.exists(r_folder):
try:
print("Getting replay folder")
folder = filedialog.askdirectory()
config.set('DEFAULT', 'REPLAY_FOLDER', folder)
with open(CONFIG, 'w') as cfg:
config.write(cfg)
return folder
except Exception as e:
print("Could not open folder: {}".format(e))
else:
return r_folder
def get_run_hashes(db):
"""This function gets the hashes from the database so we don't write old replays to the db"""
hashes = []
c = db.cursor()
c.execute("SELECT r.f_hash FROM run r")
for run_hash in c.fetchall():
hashes.append(run_hash[0])
return hashes
def get_tags(db):
"""This function gets all the current tags from the database"""
tags = {}
c = db.cursor()
c.execute("SELECT t.id, t.name, t.color, t.color_hex FROM tag t")
for tag in c.fetchall():
tags[tag[0]] = tag
return tags
def get_replays(db):
""" This function gets the replay data from the database"""
replays = {}
c = db.cursor()
try:
c.execute("""
SELECT
r.version,
r.amplified,
r.amplified_full,
r.folder,
r.file,
r.f_hash,
r.run_date,
r.f_run_date,
r.run_type,
r.f_run_type,
r.seed,
r.songs,
r.end_zone,
r.run_time,
r.f_run_time,
r.players,
r.char1,
r.f_char1,
r.win,
r.killed_by,
r.f_killed_by,
r.key_presses,
r.score,
b.bugged,
b.bugged_reason,
r.imported_date
FROM
run r
LEFT JOIN bugged b
ON b.run_id = r.id
ORDER BY
r.run_date DESC
;""")
for run in c.fetchall():
p_replay = ParsedReplay()
p_replay.version = run[0]
p_replay.amplified = bool(run[1])
p_replay.amplified_full = bool(run[2])
p_replay.folder = run[3]
p_replay.file = run[4]
p_replay.f_hash = run[5]
p_replay.run_date = run[6]
p_replay.f_run_date = run[7]
p_replay.run_type = run[8]
p_replay.f_run_type = run[9]
p_replay.seed = run[10]
p_replay.songs = run[11]
ez = run[12].split("-")
p_replay.end_zone = {'zone': ez[0], 'floor': ez[1]}
p_replay.f_end_zone = run[12]
p_replay.run_time = run[13]
p_replay.f_run_time = run[14]
p_replay.players = run[15]
p_replay.char1 = run[16]
p_replay.f_char1 = run[17]
p_replay.win = bool(run[18])
p_replay.killed_by = run[19]
p_replay.f_killed_by = run[20]
p_replay.key_presses = run[21]
p_replay.score = run[22]
p_replay.bugged = bool(run[23])
p_replay.bugged_reason = run[24]
p_replay.imported_date = run[25]
replays[p_replay.f_hash] = p_replay
except Exception as e:
print("Couldn't populate replay from db \'{}\': {}".format(run[5], e))
return replays
def get_files(replays):
"""This function gets the listing of files needed to be parsed"""
try:
files = os.listdir(replays)
return files
except Exception as e:
print("Could not get replay files: {}".format(e))
def get_char_name(c):
"""This function acts as a case statement for character's formatted name because Python :)"""
switcher = {
0: "Cadence",
1: "Melody",
2: "Aria",
3: "Dorian", # Dad
4: "Eli", # Best
5: "Monk", # Bad
6: "Dove",
7: "Coda",
8: "Bolt",
9: "Bard",
10: "Nocturna",
11: "Diamond",
12: "Mary",
13: "Tempo"
}
return switcher.get(c, "Unknown")
def get_type_name(t):
"""This function acts as a case statement for the formatted run type because Python :)"""
t = int(t)
switcher = {
1: "Zone 1",
2: "Zone 2",
3: "Zone 3",
4: "Zone 4",
5: "Zone 5",
6: "All-Zones",
7: "Daily",
8: "Seeded All-Zones",
-7: "All-Zones",
-8: "Dance Pad",
-9: "Daily",
-10: "Seeded All-Zones",
-50: "Story Mode",
-52: "No Return",
-53: "Seeded No Return",
-55: "Hard Mode",
-56: "Seeded Hard Mode",
-59: "Phasing",
-60: "Randomizer",
-61: "Mystery",
-62: "Seeded Phasing",
-63: "Seeded Randomizer",
-64: "Seeded Mystery"
}
return switcher.get(t, "Unknown")
def get_end_zone(songs, char, t, replay):
"""This function returns the zone that the replay ended on"""
if not replay.amplified_full:
print("Too lazy to code non-amplified full release")
replay.bugged = True
replay.bugged_reason = "Too lazy to code for non-amplified full release"
return replay
zones = 5
if char in [0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13]:
zone = t if t < 5 and t > 0 else math.floor(((songs - 1) / 4) + 1)
floor = ((songs - 1) % 4) + 1
if char == 2 and (t >= zones+1 or t < 5):
zone = (zones + 1) - zone
if zone > zones:
if zone > zones + 1 or floor > 2:
replay.bugged, replay.bugged_reason = True, "Number of songs is bugged: {}".format(
songs)
zone = zones
floor = 5
elif zone < 1: # Aria
replay.bugged, replay.bugged_reason = True, "Number of songs is bugged: {}".format(
songs)
zone = 1
floor = 4
replay.end_zone = {'zone': zone, 'floor': floor}
replay.f_end_zone = "{}-{}".format(zone, floor)
elif char in [6]: # Dove
zone = t if t < zones + \
1 and t > 0 else math.floor(((songs - 1)/3) + 1)
floor = ((songs - 1) % 3) + 1
replay.end_zone = {'zone': zone, 'floor': floor}
replay.f_end_zone = "{}-{}".format(zone, floor)
return(replay)
def get_time_from_replay(ms_time):
"""This function returns the formatted run time as seen as the end screen in game"""
if ms_time < 0:
return "00:00:00.000"
millis = int(((ms_time/1000) % 1)*100)
seconds = math.floor((ms_time/1000) % 60)
minutes = math.floor((ms_time/(1000*60)) % 60)
hours = math.floor((ms_time/(1000*60*60)) % 24)
time_to_return = ""
time_to_return += '{:>02}:'.format(str(hours)) if hours > 0 else "00:"
time_to_return += '{:>02}:'.format(str(minutes)) if minutes > 0 else "00:"
time_to_return += '{:>02}.'.format(str(seconds)) if seconds > 0 else "00."
time_to_return += '{:>02}'.format(str(millis)) if millis > 0 else "00"
return time_to_return
def get_key_presses(songs, data, replay):
"""This function returns the number of keys pressed during a run, because why not"""
if songs < 0:
return 0
keys = 0
for i in range(0, songs):
keys += int(data[(i+1)*11])
return keys
def save_run(run, db):
"""This function saves a replay to the database"""
try:
# run_id = -1
# bugged_id = -1
# tag_id = -1
# runtag_id = -1
c = db.cursor()
run_sql = """
INSERT INTO run
(
version,
amplified,
amplified_full,
folder,
file,
f_hash,
run_date,
f_run_date,
run_type,
f_run_type,
seed,
songs,
end_zone,
run_time,
f_run_time,
players,
char1,
f_char1,
win,
killed_by,
f_killed_by,
key_presses,
score,
imported_date
)
VALUES
(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)
;
"""
run_data = [
(
run.version,
True if run.amplified else False,
True if run.amplified_full else False,
run.folder,
run.file,
run.f_hash,
run.run_date,
run.f_run_date,
run.run_type,
run.f_run_type,
run.seed,
run.songs,
run.f_end_zone,
run.run_time,
run.f_run_time,
run.players,
run.char1,
run.f_char1,
run.win,
run.killed_by,
run.f_killed_by,
run.key_presses,
run.score,
run.imported_date
)
]
c.executemany(run_sql, run_data)
# Save the id of the last inserted row as the run_id
# run_id = c.lastrowid
# If the run was bugged, make a note of it
# if run.bugged:
# c.execute("hehe insert bugged stuff")
# bugged_id = c.lastrowid
# Insert the tag information
# c.execute("hehe do the tag stuff")
# tag_id = c.lastrowid
# if run_id > 0 and tag_id > 0:
# c.execute("hehe add the runtag stuffs")
# runtag_id = c.lastrowid
db.commit()
except Exception as e:
print("Couldn't insert run: {}, {}/{}\n{}".format(run.f_hash,
run.folder, run.file, e))
def save_to_json(replays, json_file):
"""This function outputs the replay data to a json file"""
try:
if os.path.exists(json_file):
os.remove(json_file)
with open(json_file, 'w+') as f:
f.write("{\n")
f.write("\"data\": [")
c_len = 0
for replay in replays:
f.write("{}{}\n".format(json.dumps(
replays[replay].to_json()), "," if c_len+1 < len(replays) else ""))
c_len += 1
f.write("]\n}")
except Exception as e:
print("Couldn't save to json: {}".format(e))
def calculate_seed(zone_1_seed, amplified):
"""This function calculates the seed based off the first floor seed"""
# seed.add(0x40005e47).times(0xd6ee52a).mod(0x7fffffff).mod(0x713cee3f); # Stolen from Alexis :D
add1 = int("0x40005e47", 16)
mult1 = int("0xd6ee52a", 16)
mod1 = int("0x7fffffff", 16)
mod2 = int("0x713cee3f", 16)
if amplified:
zone_1_seed += add1
zone_1_seed *= mult1
zone_1_seed %= mod1
seed = zone_1_seed % mod2
#print("Seed: {}".format(seed))
return seed
else:
print("Not calculating this seed: {}".format(zone_1_seed))
def parse_files(r_folder, r_files, all_replays, hashes, tags, db):
"""This function does all the heavy lifting and is where all replay parsing happens"""
for r_f in r_files:
try:
p_file = ParsedReplay()
# print("Parsing: \"{}/{}\"".format(r_folder, r_f))
split_name = r_f.split(".")[0].split("_")
with open("{}/{}".format(r_folder, r_f)) as r:
data = r.read()
split_data = data.split("\\n")
version = int(split_name[0])
amp = True if version > 75 else False
amp_full = True if version > 84 else False
dt = parser.parse("{} {}".format(
"/".join(split_name[3:6:]), ":".join(split_name[6:9])))
f_dt = "{}/{}/{} {}:{}".format(dt.year,
dt.month, dt.day, dt.hour, dt.minute)
t = int(split_name[9])
coop = True if int(split_data[8]) > 1 else False
char1 = int(split_data[12].split("|")[0])
players = int(split_data[8])
seed = int(split_data[7])
songs = int(int(split_data[6]))
run_time = int(split_data[5])
run_hash = hashlib.md5(
"{}/{} {}".format(r_folder, r_f, char1).encode()).hexdigest()
if char1 in [0, 10]:
win = True if songs == 22 and len(
split_data[248]) > 0 else False
elif char1 not in [0, 6, 10]:
win = True if songs == 20 and len(
split_data[226]) > 0 else False
elif char1 == 6:
win = True if songs == 15 and len(
split_data[171]) > 0 else False
if not coop and run_hash not in hashes:
p_file.version = version
p_file.amplified = amp
p_file.amplified_full = amp_full
p_file.folder = r_folder
p_file.file = r_f
p_file.f_hash = run_hash
p_file.run_date = int(dt.timestamp())
p_file.f_run_date = f_dt
p_file.run_type = t
p_file.f_run_type = get_type_name(t)
p_file.char1 = char1
p_file.f_char1 = get_char_name(char1)
p_file.players = players
p_file.seed = calculate_seed(seed, amp)
p_file.songs = songs
p_file.run_time = run_time
p_file.f_run_time = get_time_from_replay(run_time)
p_file.win = win
p_file = get_end_zone(songs, char1, t, p_file)
p_file.key_presses = get_key_presses(songs, split_data, p_file)
p_file.imported_date = int(datetime.now().timestamp())
# print(p_file.__dict__)
# print(p_file)
save_run(p_file, db)
all_replays[p_file.f_hash] = p_file
hashes.append(run_hash)
# else:
# print("Too lazy to code in co-op runs")
except Exception as e:
print("Couldn't parse file: {} -> {}".format(r_f, e))
return all_replays
def main():
# Start a local web server on PORT
try:
t = threading.Thread(target=start_server)
t.setDaemon(True)
t.start()
except KeyboardInterrupt:
t._stop()
webbrowser.open("http://localhost:{}/view_runs.html".format(PORT))
"""Pretty much everything was figured out by Grimy and/or AlexisYJ. Anything that looks complicated was them. Probably the simple stuff too :)"""
# Grab the config data
config = ConfigParser()
config.read(CONFIG)
dbfile = config.get('DEFAULT', 'DATABASE_FILE')
replay_folder = config.get('DEFAULT', 'REPLAY_FOLDER')
json_file = config.get('DEFAULT', 'JSON_FILE')
# Setup the db connection
db = setup_database(dbfile)
# Get hashes for runs from the db
run_hashes = get_run_hashes(db)
tags = get_tags(db)
replays = get_replays(db)
# Setup the replay folder/files
replay_folder = setup_replay_folder(replay_folder, config)
# Loop this forever
counter = 0
while True:
replay_files = get_files(replay_folder)
# Parse the replay files
replays = parse_files(replay_folder, replay_files,
replays, run_hashes, tags, db)
save_to_json(replays, json_file)
counter += 1
print("Looking for new replays: {}".format(counter))
time.sleep(30)
if __name__ == "__main__":
sys.exit(main())
<file_sep>/config_template.ini
[DEFAULT]
database_file = database.sqlite
replay_folder = C:/Program Files (x86)/Steam/steamapps/common/Crypt of the NecroDancer/replays
json_file = data.json
<file_sep>/view_stats.js
$(function () {
console.log("loaded");
$.ajaxSetup({
scriptCharset: "utf-8",
contentType: "application/json; charset=utf-8"
});
var jqxhr = $.getJSON("data.json", {}, function () {
})
.done(function (data) {
// do a bunch of stuff here
totalWins = 0;
totalRuns = 0;
totalTime = 0;
totalWinTime = 0;
charWins = {}
charDeaths = {}
charTime = {}
charFastest = {}
headers = [
"Date",
"Character",
"Seed",
"Ending Zone",
"Run Time",
"Win",
"Key Presses",
"Songs",
"File"
];
totalRuns = data["data"].length
$.each(data["data"], function (i, item) {
totalTime += item.runTime;
if (item.win === true) {
if ($.inArray(item.fChar1, $.map(charWins, function (element, index) { return index })) >= 0) {
charWins[item.fChar1] += 1;
} else {
charWins[item.fChar1] = 1;
}
totalWins += 1;
totalWinTime += item.runTime;
if ($.inArray(item.fChar1, $.map(charTime, function (element, index) { return index })) >= 0) {
charTime[item.fChar1]["times"].push(item.runTime);
charTime[item.fChar1]["steps"].push(item.keyPresses);
if (item.runTime < charTime[item.fChar1]["fastest"]) {
charTime[item.fChar1]["fastest"] = item.runTime;
}
if (item.keyPresses < charTime[item.fChar1]["least"]) {
charTime[item.fChar1]["least"] = item.keyPresses
}
} else {
charTime[item.fChar1] = {};
charTime[item.fChar1]["avgTime"] = 0;
charTime[item.fChar1]["avgSteps"] = 0;
charTime[item.fChar1]["fastest"] = item.runTime;
charTime[item.fChar1]["least"] = item.keyPresses;
charTime[item.fChar1]["steps"] = [];
charTime[item.fChar1]["steps"].push(item.keyPresses);
charTime[item.fChar1]["times"] = [];
charTime[item.fChar1]["times"].push(item.runTime);
}
} else {
if ($.inArray(item.fChar1, $.map(charDeaths, function (element, index) { return index })) >= 0) {
charDeaths[item.fChar1] += 1;
} else {
charDeaths[item.fChar1] = 1;
}
}
});
console.log(totalRuns, totalWins, totalRuns - totalWins, totalTime, totalWinTime, charWins, charDeaths, charTime);
$.each(charTime, function (i, char) {
cTime = 0;
cSteps = 0;
cLen = char["times"].length
cLenS = char["steps"].length
$.each(char["times"], function (j, time) {
cTime += time
});
$.each(char["steps"], function(j, steps) {
cSteps += steps;
});
charTime[i]["avgTime"] = cTime / cLen
charTime[i]["avgSteps"] = cSteps / cLenS
});
console.log(charTime);
output = `<table class="table table-striped table-bordered table-sm table-responsibe-sm table-hover">`;
output += `<thead>`;
output += `<tr>`;
output += `<th scope="col">Char</th>`;
output += `<th scope="col">Total Wins</th>`
output += `<th scope="col">Avg Win Time</th>`;
output += `<th scope="col">Fastest Win Time</th>`;
output += `<th scope="col">Avg Win Key Presses</th>`;
output += `<th scope="col">Least Win Key Presses</th>`;
output += `</tr>`;
output += `</thead>`;
output += `<tbody>`;
$.each(charTime, function(i, char) {
output += `<tr>`;
output += `<td>` + i + `</td>`;
output += `<td>` + char["times"].length + `</td>`;
output += `<td>` + formatTime(char["avgTime"]) + `</td>`;
output += `<td>` + formatTime(char["fastest"]) + `</td>`;
output += `<td>` + Math.trunc(char["avgSteps"]) + `</td>`;
output += `<td>` + Math.trunc(char["least"]) + `</td>`;
output += `</tr>`;
});
output += `</tbody>`;
output += `</table>`;
console.log(output);
$("#tableStats").html(output);
});
});
function formatTime(t) {
millis = Math.trunc(((t/1000) % 1)*100)
seconds = Math.floor((t/1000) % 60)
minutes = Math.floor((t/(1000*60)) % 60)
hours = Math.floor((t/(1000*60*60)) % 24)
return pad(hours, 2) + ":" + pad(minutes, 2) + ":" + pad(seconds, 2) + "." + pad(millis, 2);
};
function pad(n, width, z) {
z = z || '0';
n = n + '';
return n.length >= width ? n : new Array(width - n.length + 1).join(z) + n;
}<file_sep>/DATABASE.md
# Database Schema
## **run**
Each row represents a single replay file
> id: int, primary key, autoincrements
> > The unique ID for each run in the database
> version: int
> > The version of the replay file
> amplified: bool
> > Designates if this has the Amplified DLC in Early Access
> amplified_full: bool
> > Designates if this has the Amplified DLC after full release
> folder: text
> > The folder path to the replay
> file: text
> > The file name of the replay
> f_hash: int
> > A unique hash based off of folder, file and character of the run
> run_date: int
> > The date of when the run happened
> f_run_date: text
> > The run date in a pretty format for display
> run_type: int
> > The type of run that the replay represents
> f_fun_type: text
> > The type of run in a pretty format for display
> run_time: int
> > The length of the run in milliseconds
> f_run_time: text
> > The length of the run formatted as the end screen output
> seed: int
> > The integer representation of the seed
> songs: int
> > How many songs were played throughout the run
> players: int
> > How many characters were played in the run, irrelavent because I did not code in co-op
> char1: int
> > The character for player 1
> f_char1: text
> > The character for player 1, formatted for display
> char2: int
> > The character for player 2
> f_char2: text
> > The character for player 2, formatted for display
> win: bool
> > Designates if the run was completed or not
> killed_by: int
> > What you were killed by, not currently implemented
> f_killed_by: text
> > What you were killed by, formatted for display
> key_presses: int
> > The number of keys pressed throughout a run
> score: int
> > The score the run achieved
> imported_date: int
> > The date the run was imported
## **bugged**
Each row represents data for a bugged run
> id: int, primary key, autoincremented
> > The unique ID of each bugged run
> run_id: int, references run(id)
> > A reference to the run's id in the run table
> bugged_reason: text
> > The reason the run was bugged
> bugged_data: text
> > The raw data behind the bugged run
## **tag**
Each row represents a specific tag -- not currently implemented
> id: int, primary key, autoincremented
> > The unique ID of each tag
> name: text
> > The name of each tag
> color: text
> > The color of each tag
## **run_tag**
Each row represents the tag of the a run in the run table -- not currently implemented
> id: int, primary key, autoincremented
> > The unique ID of each run's tag
> run_id: int, references run(id)
> > The run's ID from the run table
> tag_id: int, references tag(id)
> > The tag's ID from the tag table<file_sep>/Pipfile
[[source]]
name = "pypi"
url = "https://pypi.org/simple"
verify_ssl = true
[dev-packages]
pylint = "*"
autopep8 = "*"
[packages]
python-dateutil = "==2.8.0"
python_version = ">'3.7.3"
pylint = "==2.3.1"
tkinter = "*"
tk = "*"
thread = "*"
[requires]
python_version = "3.7.3"
<file_sep>/view_runs.js
$(function(){
console.log("loaded");
var jqxhr = $.getJSON("data.json", function(){
})
.done(function(data){
// do a bunch of stuff here
//data = data
totalWins = 0;
totalRuns = 0;
headers = [
"Date",
"Character",
"Seed",
"Ending Zone",
"Run Time",
"Win",
"Key Presses",
"Songs",
"File"
];
output = "<table id=\"output_table\" class=\"table table-striped table-bordered table-sm table-responsibe-sm table-hover\">";
$.each(data["meta"], function(i, item){
headers.push(item);
})
output = output + "<thead class=\"thead-dark\">";
output = output + "<tr>";
$.each(headers, function(k, v){
output = output + "<th scope=\"col\">" + v + "</th>";
})
output = output + "</tr>";
output = output + "</thead>";
output = output + "<tbody>";
$.each(data["data"], function(i, item){
output = output + "<tr class=\"\">";
output = output + "<td scope=\"row\" class=\"td\">" + moment(item.runDate*1000).format("YYYY/MM/DD HH:mm:ss") + "</td>";
output = output + "<td class=\"td\">" + item.fChar1 + "</td>";
output = output + "<td class=\"td\">" + item.seed + "</td>";
output = output + "<td class=\"td\">" + item.endZone + "</td>";
output = output + "<td class=\"td\">" + item.fRunTime+ "</td>";
if (item.win == true) {
output = output + "<td class=\"td bg-success\"> Yes </td>";
} else {
output = output + "<td class=\"td bg-danger\"> No </td>";
}
output = output + "<td class=\"td\">" + item.keyPresses + "</td>";
output = output + "<td class=\"td\">" + item.songs + "</td>";
output = output + "<td class=\"td\">" + item.file + "</td>";
output = output + "</tr>";
totalRuns += 1;
if (item.win === true) totalWins += 1;
});
output = output + "</tbody>";
output = output + "<tfoot>";
output = output + "</tfoot>";
output = output + "</table>";
$('#table').html(output);
$('#output_table').DataTable({
paging: false,
order: [0, "desc"]
});
$('#wins').html("Total Wins: " + totalWins);
$('#runs').html("Total Runs: " + totalRuns);
})
});<file_sep>/requirements.txt
pylint==2.3.1
python_version > '3.7.3'
python-dateutil==2.8.0
| 7cabc64a5c92269ee5d84b5c56bfa7e811ff4786 | [
"Markdown",
"TOML",
"JavaScript",
"INI",
"Python",
"Text"
] | 8 | Markdown | necrommunity/python-replay-parser | 519e2763ca887ac8d8520488e2de50420c31b7df | 52fd12a78d52ebaab4085f99a3748e2e7fdcb6b4 |
refs/heads/master | <repo_name>UncleScrooge2012/Webpro<file_sep>/app/js/main.js
$(function(){
var mySwiper = new Swiper ('.swiper-container', {
// Optional parameters
loop: true,
// If we need pagination
pagination: {
el: '.swiper-pagination',
type: 'bullets',
},
// Navigation arrows
})
var mySwiper = new Swiper ('.people__swiper', {
// Optional parameters
loop: true,
// If we need pagination
pagination: {
el: '.swiper-pagination',
type: 'bullets',
},
// Navigation arrows
})
}); | b937fe6d8d5c15d53899a2cc55ad924bfde96e39 | [
"JavaScript"
] | 1 | JavaScript | UncleScrooge2012/Webpro | c7ea7d6705cc5a93f016c035f3f31bf94b72a668 | 0561ebb61c19af0e5cc10a44098f0d3b31802a25 |
refs/heads/master | <file_sep>let isFetching = false;
let isEndOfData = false;
buildLoadMoreHandler( document.querySelector( '.wp-block-newspack-blocks-homepage-articles' ) );
function buildLoadMoreHandler( blockWrapperEl ) {
const btnEl = blockWrapperEl.querySelector( '[data-next]' );
if ( ! btnEl ) {
return;
}
const postsContainerEl = blockWrapperEl.querySelector( '[data-posts]' );
btnEl.addEventListener( 'click', function( e ) {
if ( isFetching || isEndOfData ) {
return false;
}
isFetching = true;
blockWrapperEl.classList.remove( 'is-error' );
blockWrapperEl.classList.add( 'is-loading' );
AMP.getState( 'newspackHomepagePosts.exclude_ids' ).then( function( exclude_ids ) {
const requestURL = new URL( btnEl.getAttribute( 'data-next' ) );
requestURL.searchParams.set( 'exclude_ids', JSON.parse( exclude_ids ).join( ',' ) );
apiFetchWithRetry( { url: requestURL.toString(), onSuccess, onError }, 3 );
} );
function onSuccess( data ) {
AMP.getState( 'newspackHomepagePosts.exclude_ids' ).then( function( exclude_ids ) {
AMP.setState( {
newspackHomepagePosts: { exclude_ids: JSON.parse( exclude_ids ).concat( data.ids ) },
} );
} );
if ( isPostsDataValid( data ) ) {
data.items.forEach( item => {
const tempDIV = document.createElement( 'div' );
tempDIV.innerHTML = item.html.trim();
postsContainerEl.appendChild( tempDIV.childNodes[ 0 ] );
} );
if ( data.next ) {
btnEl.setAttribute( 'data-next', data.next );
}
if ( ! data.items.length || ! data.next ) {
isEndOfData = true;
blockWrapperEl.classList.remove( 'has-more-button' );
}
isFetching = false;
blockWrapperEl.classList.remove( 'is-loading' );
}
}
function onError() {
isFetching = false;
blockWrapperEl.classList.remove( 'is-loading' );
blockWrapperEl.classList.add( 'is-error' );
}
} );
}
function apiFetchWithRetry( options, n ) {
const xhr = new XMLHttpRequest();
xhr.onreadystatechange = () => {
if ( xhr.readyState !== 4 || n === 0 ) {
return;
}
if ( xhr.status >= 200 && xhr.status < 300 ) {
const data = JSON.parse( xhr.responseText );
options.onSuccess( data );
return;
}
options.onError();
apiFetchWithRetry( options, n - 1 );
};
xhr.open( 'GET', options.url );
xhr.send();
}
function isPostsDataValid( data ) {
if (
data &&
hasOwnProp( data, 'items' ) &&
hasOwnProp( data, 'next' ) &&
Array.isArray( data.items ) &&
data.items.length &&
hasOwnProp( data.items[ 0 ], 'html' ) &&
typeof data.items[ 0 ].html === 'string'
) {
return true;
}
return false;
}
function hasOwnProp( obj, prop ) {
return Object.prototype.hasOwnProperty.call( obj, prop );
}
<file_sep>/**
* External dependencies
*/
import { isEqual, isUndefined, pick, pickBy } from 'lodash';
/**
* Based global WP.com blog_public option, checks whether current blog is
* private or not.
*
* @return {boolean} a private WP.com blog flag
*/
export const isBlogPrivate = () =>
typeof window === 'object' &&
window.wpcomGutenberg &&
Number( window.wpcomGutenberg.blogPublic ) === -1;
/**
* Block attributes which influence posts query
*/
const POST_QUERY_ATTRIBUTES = [
'postsToShow',
'authors',
'categories',
'tags',
'specificPosts',
'specificMode',
'tagExclusions',
];
/**
* Does the props change necessitate a reflow?
* A reflow should happen if:
* 1. Query-changing attributes of a block change
* 2. The top-level blocks order changes. A Homepage Articles
* block might be nested somewhere.
*
* @param {Object} prevProps Edit component props
* @param {Object} props Edit component props
*/
export const shouldReflow = ( prevProps, props ) =>
! isEqual(
pick( prevProps.attributes, POST_QUERY_ATTRIBUTES ),
pick( props.attributes, POST_QUERY_ATTRIBUTES )
) || ! isEqual( prevProps.topBlocksClientIdsInOrder, props.topBlocksClientIdsInOrder );
/**
* Builds query criteria from given attributes.
*
* @param {Object} attributes block attributes
* @return {Object} criteria
*/
export const queryCriteriaFromAttributes = attributes => {
const {
postsToShow,
authors,
categories,
tags,
specificPosts,
specificMode,
tagExclusions,
} = pick( attributes, POST_QUERY_ATTRIBUTES );
const cleanPosts = sanitizePostList( specificPosts );
const isSpecificPostModeActive = specificMode && cleanPosts && cleanPosts.length;
const criteria = pickBy(
isSpecificPostModeActive
? {
include: cleanPosts,
orderby: 'include',
per_page: specificPosts.length,
}
: {
per_page: postsToShow,
categories,
author: authors,
tags,
tags_exclude: tagExclusions,
},
value => ! isUndefined( value )
);
criteria.suppress_password_protected_posts = true;
return criteria;
};
export const sanitizePostList = postList =>
postList.map( id => parseInt( id ) ).filter( id => id > 0 );
export const getBlockQueries = ( blocks, blockName ) =>
blocks.flatMap( block => {
const homepageArticleBlocks = [];
if ( block.name === blockName ) {
const postsQuery = queryCriteriaFromAttributes( block.attributes );
homepageArticleBlocks.push( { postsQuery, clientId: block.clientId } );
}
return homepageArticleBlocks.concat( getBlockQueries( block.innerBlocks, blockName ) );
} );
export const getEditorBlocksIds = blocks =>
blocks.flatMap( block => {
const homepageArticleBlocks = [];
homepageArticleBlocks.push( block.clientId );
return homepageArticleBlocks.concat( getEditorBlocksIds( block.innerBlocks ) );
} );
| 6221aff6d4d0a0dfc94b5ecdf9f7e5dca493ff2d | [
"JavaScript"
] | 2 | JavaScript | fahimxyz/newspack-blocks | 3c55a69d23d17c2cfb5e26385ffd27b0e31ba168 | 23db97b0d9837864daa1d0eaad9ef1393d8b7d94 |
refs/heads/main | <file_sep># todos-list-backend | 130cd9e5be89594e30c1a27edd7dbd6a97db5731 | [
"Markdown"
] | 1 | Markdown | Yordy2001/todos-list-backend | 45e1058dd4fd86ac973e27b2399f322fa624a9f9 | aa176533f3f7380169300f441c3f99b9c76cb82a |
refs/heads/master | <repo_name>mmuyakwa/bash-scripts<file_sep>/installers/install_pritunl.sh
#!/usr/bin/env bash
# This script installs openvpn-Access-Server.
# This Script is intended for systems WITHOUT a GUI
# Must be root or SUDO-User to run script successfully.
# Author: <NAME>, 2019-12-25
# License: MIT
# root is always user_id 0
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
$SUDO tee /etc/apt/sources.list.d/mongodb-org-4.0.list << EOF
deb http://repo.mongodb.org/apt/debian stretch/mongodb-org/4.0 main
EOF
$SUDO tee /etc/apt/sources.list.d/pritunl.list << EOF
deb http://repo.pritunl.com/stable/apt stretch main
EOF
$SUDO apt-get install dirmngr
$SUDO apt-key adv --keyserver hkp://keyserver.ubuntu.com --recv 9DA31620334BD75D9DCB49F368818C72E52529D4
$SUDO apt-key adv --keyserver hkp://keyserver.ubuntu.com --recv 7568D9BB55FF9E5287D586017AE645C0CF8E292A
$SUDO apt-get update
$SUDO apt-get --assume-yes install pritunl mongodb-server
$SUDO systemctl start mongodb pritunl
$SUDO systemctl enable mongodb pritunl
echo "Now run 'pritunl setup-key'"
echo "Set your Password: '$SUDO pritunl default-password'"
echo "Then open http://localhost"
echo "Login with:"
echo "user:pritunl"
<file_sep>/installers/install_defaults-Non_GUI.sh
#!/usr/bin/env bash
# This script installs my usual suspects on Debian-based Systems.
# This Script is intended for systems WITHOUT a GUI
# Must be root or SUDO-User to run script successfully.
# Author: <NAME>, 2018-01-19
# License: MIT
# root is always user_id 0
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
# Install my usual suspects
$SUDO apt-get install mc screen build-essential clamav software-properties-common locales-all curl git gdebi-core bc rename awscli apt-transport-https -y
# Set locale
$SUDO locale-gen de_DE.UTF-8
$SUDO update-locale LANG=de_DE.UTF-8
# Set Timezone to Berlin
$SUDO cp /usr/share/zoneinfo/Europe/Berlin /etc/localtime
# Run the following code to set your Keyboard-Language
# 'sudo dpkg-reconfigure keyboard-configuration'
<file_sep>/installers/docker/portainer/install_agent.sh
#!/usr/bin/env bash
## TODO add https://docs.docker.com/install/linux/linux-postinstall/ if SUDO-User
#sudo groupadd docker
#sudo usermod -aG docker $USER
docker run -d -p 9001:9001 --name portainer_agent --restart=always -v /var/run/docker.sock:/var/run/docker.sock -v /var/lib/docker/volumes:/var/lib/docker/volumes portainer/agent
echo 'Portainer is now available on Port 9000'
<file_sep>/README.md
# bash-scripts
My collection of **Shell-Scripts** I use on a regular basis.
[](https://github.com/mmuyakwa/bash-scripts/blob/master/LICENSE) [](https://encrypted.google.com/search?q=steffen+held) [](https://www.likando.de)
## extern_ip.sh
### Public-IP
`extern_ip.sh` is a simple small script which shows the current public IP of your host. (IP will be fetched from [http://checkip.dyndns.org/](http://checkip.dyndns.org/) )
user@client:~/scripts/bash$ sh extern_ip.sh
192.168.3.11
#### Usage
sh extern_ip.sh # Simply prints your public IP on the console.
## eZServerMonitor.sh
## firstrun.sh
## generate_key.sh
## git_it.sh
The innitial run of this script checks if a Git-Repo already exists.
If not, it runs
git init
The Repo will be created and it'll **remind** you to set the remote Repo.
echo "!---!";
echo "Set remote repository";
echo "e.g.:";
echo "git remote add origin <EMAIL>:mmuyakwa/bash-scripts.git";
echo "!---!";
At this stage the scripts **exits** the `initial run`.
If you run the script another time it will check for changes and stages these and commits them.
git status -s
git add -A
git commit -m "Committed at: $now (automatic via script)"
If you run this script with an string, that string ("`Your Message`") will be used for the commit-message.
git_it.sh "Your Message"
which will then
git commit -m "Your Message - Committed at: $now"
The last step is to check if a remote Repo ist set and push the repo to remote.
git push -u origin master
#### Flowchart

#### Usage
sh git_it.sh # Initializes the Git-Repository on first run.
# Stages, Committs and Pushes if remote Repo is set.
sh git_it.sh "Your Message" # Adds "Your Message" to the Commit-Message.
#### img
##### git_it-flow.png
## installers
### bash_profile
#### bash_aliases
#### install_aliases.sh
#### pfetch.sh
### compliance
#### install_inspec.sh
### docker
#### install_ctop.sh
#### install_docker-compose.sh
#### jenkins
##### docker-compose.yml
##### install_jenkins_debian.sh
#### nextcloud
##### docker-compose.yml
##### install_nextcloud.sh
#### portainer
##### install_agent.sh
##### install_portainer.sh
##### update_portainer.sh
### install_ansible.sh
### install_composer.sh
### Install Composer
`install_composer.sh` is a script I wipped up to quickly install **Composer** on development machines (mostly on VM's).
### install_defaults-GUI.sh
### install_defaults-Non_GUI.sh
### install_docker-server.sh
### install_dotnet-core-2.0.sh
### install_etcher.sh
### install_filebot.sh
### install_gofish.sh
### install_java.sh
### install_jellyfin-mediaserver.sh
### install_jitsi.sh
### install_LEMP.sh
### install_libreoffice-de.sh
### install_lynis.sh
### install_mariadb.sh
### install_monitorix.sh
### install_nvm.sh
### install_openvpn-server.sh
### install_pritunl.sh
### install_webmin.sh
### install_webrtc-server.sh
### install_yarn.sh
### kubernetes
#### aliases
#### init_kubernetes.sh
#### install_devspace.sh
#### install_kubernetes.sh
### mariadb-installieren.txt
### plex
#### install-tautulli.sh
#### install-update-plex.sh
### powerline
#### bashrc
#### font-patcher.py
#### install_powerline.sh
#### powerline-shell.json
### raspberry_pi
#### install_defaults_RPi.sh
## intern_ip.sh
## LICENSE
## make_workdir.sh
### Workbench-Script
Before starting a new project, I usually generate a Folder with the current date, where I put my project folders into.
e.g.: ~/Dokumente/Workbench/2018/01/2018-01-04/
There I generate my project folders for the day.
Only if a project pens our, will I move it to my **main** `projects folder`.
I generate the foldername with the current date via:
WorkDirPath=~/Dokumente/Workbench/$(date +"%Y")/$(date +"%m")/$(date +"%Y-%m-%d")
And create the folder via **mkdir** with the parameter `-p` which will show no errors if a folder already exists, while creating that path.
#### Usage
sh make_workdir.sh # Simply creates the folder with the path of the current date.
## prowl.sh
## README.md
## sshd
### configure-sshd-google-totp.sh
### configure-sshd-minimum.sh
### configure-sshd-standard.sh
## tiling_wm_apps
### install_default_minimal_apps.sh
### install_terminoligy_console.sh
## Todos
### RootKitHunter_installieren.txt
## update.sh
### Debian Update-Script
This script is intended for developer-machines.
**I would not recommend using this on a productive system.**
This script first checks if you are **root**.
If you are `not` **root**, all `apt-get`-commands will run with `sudo`, assuming you are in the **sudo-group**.
SUDO=''
if [ $(id -u) -ne 0 ]; then
SUDO='sudo'
echo "Your not root."
fi
The following `apt-get`-commands will be issued:
$SUDO apt-get update -y
$SUDO apt-get upgrade -y
$SUDO apt-get dist-upgrade -y
$SUDO apt-get autoremove -y
$SUDO apt-get autoclean -y
The script will not ask for further permission, because everything is automaticly approved.
#### Usage
sh update.sh # Updates the system and approves every change (Must be root or a SUDO-User).
## vultr
### info.txt
### update-rdp-firewall.sh
MIT License
<file_sep>/installers/kubernetes/install_kubernetes.sh
#!/usr/bin/env bash
#title: install_kubernetes.sh
#description: This script installs Kubernetes.
# Must be root or SUDO-User to run script successfully.
#author: <NAME>
#created: 2020-08-01
#updated: N/A
#version: 0.1
#license: MIT
#usage: ./install_kubernetes.sh
#==============================================================================
# root is always user_id 0
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
# $SUDO apt-get update && $SUDO apt-get install -y apt-transport-https curl
# curl -s https://packages.cloud.google.com/apt/doc/apt-key.gpg | $SUDO apt-key add -
# $SUDO cat <<EOF >/etc/apt/sources.list.d/kubernetes.list
# deb https://apt.kubernetes.io/ kubernetes-xenial main
# EOF
# $SUDO apt-get update
# $SUDO apt-get install -y kubelet kubeadm kubectl
# $SUDO apt-mark hold kubelet kubeadm kubectl
$SUDO apt-get update && $SUDO apt-get install -y apt-transport-https curl
curl -s https://packages.cloud.google.com/apt/doc/apt-key.gpg | $SUDO apt-key add -
echo "deb https://apt.kubernetes.io/ kubernetes-xenial main" | $SUDO tee -a /etc/apt/sources.list.d/kubernetes.list
$SUDO apt-get update
$SUDO apt-get install -y kubelet kubeadm kubectl
curl -LO https://storage.googleapis.com/kubernetes-release/release/$(curl -s https://storage.googleapis.com/kubernetes-release/release/stable.txt)/bin/linux/amd64/kubectl
chmod +x ./kubectl
$SUDO mv ./kubectl /usr/local/bin/kubectl
$SUDO systemctl daemon-reload
$SUDO systemctl restart kubelet
$SUDO systemctl enable kubelet && $SUDO systemctl start kubelet<file_sep>/installers/install_webrtc-server.sh
#!/usr/bin/env bash
# This script installs "Spreed WebRTC"-Server (Video WebChat) on Debian-based Systems.
# Must be root or SUDO-User to run script successfully.
# Author: <NAME>, 2019-04-07
# License: MIT
# https://github.com/strukturag/spreed-webrtc
# root is always user_id 0
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
git clone https://github.com/strukturag/spreed-webrtc.git
sleep 2
cd spreed-webrtc || exit
#sudo snap install go --classic
$SUDO apt install nodejs golang-go autoconf automake -y
sh autogen.sh
./configure
make
make get
make assets
make binary
cp server.conf.in server.conf
# Edit server.conf "listen = 127.0.0.1:8080" to "listen = 0.0.0.0:8080" to allow external access.
sed -i -e 's/127.0.0.1/0.0.0.0/g' server.conf
./spreed-webrtc-server
echo "Spreed WebRTC is now running on http://hostname:8080"
<file_sep>/all-update.sh
#!/usr/bin/env bash
#title: all-update.sh
#description: This script keeps my (debian) docker-systems up to date.
#author: <NAME>
#created: 2022-11-26
#updated: 2022-11-26
#version: 1.0
#license: MIT
#usage: ./all-update.sh
#==============================================================================
# Save current directory
set PWD = pwd
# Goto Script-Folder
cd "$HOME/scripts/bash-scripts"
# Check if "git" is installed
if [ -x "$(command -v git)" ]; then
echo "Pull GIT"
# Update the "git".
git pull origin master
fi
# Update System
sh update.sh
# Check if "docker" is installed
if [ -x "$(command -v docker)" ]; then
echo "Update docker"
# Check if Portainer is installed
if [ $( docker ps -a | grep portainer | wc -l ) -gt 0 ]; then
# Update Portainer-Container
cd "installers/docker/portainer"
sh update_portainer.sh
else
echo "Portainer seems not to be installed."
fi
fi
cd "$PWD"<file_sep>/installers/install_java.sh
#!/usr/bin/env bash
# This script installs Java-JDK (headless) on the system.
# Author: <NAME>, 2019-10-30
# License: MIT
#
# root is always user_id 0
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
$SUDO apt-get update -y
$SUDO apt-get install default-jdk-headless
<file_sep>/extern_ip.sh
#!/usr/bin/env bash
#title: extern_ip.sh
#description: Show public IP
#author: <NAME>
#created: 2018-01-04
#updated: N/A
#version: 1.0
#license: MIT
#usage: ./extern_ip.sh
#==============================================================================
wget http://checkip.dyndns.org/ -q -O - |
grep -Eo '\<[[:digit:]]{1,3}(\.[[:digit:]]{1,3}){3}\>'
<file_sep>/installers/install_mariadb.sh
#!/usr/bin/env bash
#title: install_mariadb.sh
#description: This script installs MariaDB.
# Must be root or SUDO-User to run script successfully.
#author: <NAME>
#created: 2018-06-12
#updated: N/A
#version: 0.5
#license: MIT
#usage: ./install_mariadb.sh
#==============================================================================
# root is always user_id 0
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
$SUDO apt-get install mariadb-server -y
$SUDO mysql_secure_installation
echo ''
echo 'REMEMBER!'
echo ''
echo 'Login via "mysql -u root -p"'
echo ''
echo 'Execute the following Command:'
echo ''
echo "GRANT ALL PRIVILEGES on *.* to 'root'@'localhost' IDENTIFIED BY 'YOUR_PASSWORD';"
echo 'FLUSH PRIVILEGES;'
<file_sep>/prowl.sh
#!/usr/bin/env bash
usage()
{
echo "
Usage: prowl.sh (-vr) [-s Subject] [-a Application] (-p Priority {-2 => 2}) message
Try 'prowl.sh -h' for more information."
exit 1
}
help()
{
echo "
Usage: prowl.sh (-vr) [-s Subject] [-a Application] (-p Priority {-2 => 2}) message
Options:
-s SUBJECT (Required)
The subject line of the message that is being sent
-a APPLICATION (Required)
The application the message is coming from
-p {-2 => 2}
The priority of the message.
-h
Shows this help text"
exit 1
}
#set the API key from the environment variable
if [ ! -z "$PROWL_APIKEY" ]; then
API_KEY=$PROWL_APIKEY
else
echo "Prowl API Key not set as an environment variable. Add \"export PROWL_APIKEY={key}\" to your .bash_profile or .profile"
exit 1
fi
#Set defaults
PRIORITY=0
# process options
while getopts s:a:p:vrh o
do case "$o" in
s) SUBJECT=$OPTARG;;
a) APPLICATION=$OPTARG;;
p) PRIORITY=$OPTARG;;
h) help;;
[?]) usage;;
esac
done
# shift the option values out
shift $(("$OPTIND" - 1))
#use everything but the options as the message to send
MESSAGE=$*
#Ensure subject is supplied as it's required
if [ -z "$SUBJECT" ]; then
echo "Subject is required. Use \"-s\" to set it."
usage
exit 1
fi
#Ensure app is supplied as it's required
if [ -z "$APPLICATION" ]; then
echo "Application is required. Use \"-a\" to set it."
usage
exit 1
fi
if [ "$PRIORITY" -lt "-2" ]; then
echo "Priority cannoy be lower than -2 (Very Low)"
usage
exit 1
fi
if [ "$PRIORITY" -gt "2" ]; then
echo "Priority cannoy be higher than 2 (Emergency)"
usage
exit 1
fi
#Ensure that a message was provided after argument parsing
if [ -z "$MESSAGE" ]; then
echo "No message was provided to send."
usage
exit 1
fi
# Send off the message to prowl
call=$(curl -s -d "apikey=$API_KEY&priority=$PRIORITY&application=$APPLICATION&event=$SUBJECT&description=$MESSAGE" https://api.prowlapp.com/publicapi/add)
<file_sep>/installers/bash_profile/bash_aliases
alias sshr='ssh-keygen -R' #hostname
alias update='sh ~/scripts/bash-scripts/update.sh'
alias workdir='bash ~/scripts/bash-scripts/make_workdir.sh'
alias gitit='sh ~/scripts/bash-scripts/git_it.sh'
alias gc='git clone'
alias yt="youtube-dl -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/mp4' -l"
alias sshh="ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null" #user@remote_machine
alias dotnew="dotnet new console -o" #AppName
alias dotlin="dotnet publish -c Release -r ubuntu.16.10-x64"
alias dotwin="dotnet publish -c Release -r win10-x64"
alias sshsetkey="ssh-copy-id -i ~/.ssh/id_rsa.pub -o StrictHostKeyChecking=no" #user@remote_machine
alias scpc="scp -o StrictHostKeyChecking=no" #file computername:path
alias prowl="sh ~/scripts/bash-scripts/prowl.sh -s $HOSTNAME -a Bash $(ifconfig | grep -Eo 'inet (Adresse:|address:|addr:)?([0-9]*\.){3}[0-9]*' | grep -Eo '([0-9]*\.){3}[0-9]*' | grep -v '127.0.0.1')"
<file_sep>/installers/install_monitorix.sh
#!/usr/bin/env bash
# This script installs Monitorix. (http://www.monitorix.org)
# Author: <NAME>, 2018-01-21
# License: MIT
#
# root is always user_id 0
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
$SUDO apt-get update
$SUDO apt-get install rrdtool perl libwww-perl libmailtools-perl libmime-lite-perl librrds-perl libdbi-perl libxml-simple-perl libhttp-server-simple-perl libconfig-general-perl libio-socket-ssl-perl
wget http://www.monitorix.org/monitorix_3.10.0-izzy1_all.deb
$SUDO dpkg -i monitorix*.deb
$SUDO apt-get -f install
rm monitorix*.deb
service monitorix restart
echo "Monitorix is now available under: http://localhost:8080/monitorix/"
<file_sep>/installers/powerline/install_powerline.sh
#!/usr/bin/env bash
#title: install_powerline.sh
#description: This script installs Powerline on the system.
# Must be root or SUDO-User to run script successfully.
#author: <NAME>
#created: 2018-02-12
#updated: 2020-09-21
#version: 1.8
#license: MIT
#usage: sh install_powerline.sh
#==============================================================================
# root is always user_id 0
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
# Install dependencies (powerline and pip [python]) and needed Fonts.
$SUDO apt-get install powerline fonts-powerline python3-pip -y
# Install "powerline-shell"
$SUDO pip3 install powerline-shell
# Use my config for Powerline in current profile
cp powerline-shell.json ~/.powerline-shell.json
# Add powerline-shell to bashrc
cat bashrc >> ~/.bashrc
# Add powerline-shell to bashrc for all
$SUDO cat bashrc >> /etc/bash.bashrc
<file_sep>/installers/install_etcher.sh
#!/usr/bin/env bash
# https://github.com/balena-io/etcher#debian-and-ubuntu-based-package-repository-gnulinux-x86x64
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
echo "deb https://deb.etcher.io stable etcher" | $SUDO tee /etc/apt/sources.list.d/balena-etcher.list
$SUDO apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 379CE192D401AB61
$SUDO apt-get update
$SUDO apt-get install balena-etcher-electron -y
<file_sep>/tiling_wm_apps/install_default_minimal_apps.sh
#!/usr/bin/env bash
#title: install_default_minimal_apps.sh
#description: This script installs Powerline on the system.
# Must be root or SUDO-User to run script successfully.
#author: <NAME>
#created: 2020-09-21
#updated: -
#version: 0.8
#license: MIT
#usage: sh install_default_minimal_apps.sh
#==============================================================================
# root is always user_id 0
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
# Install dependencies (powerline and pip [python]) and needed Fonts.
$SUDO apt-get install qutebrowser pcmanfm -y
<file_sep>/generate_key.sh
#!/usr/bin/env bash
#title: generate_key.sh
#description: Generate a key
#author: <NAME>
#created: 2020-09-19
#updated: N/A
#version: 1.0
#license: MIT
#usage: ./generate_key.sh
#==============================================================================
echo "$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 64 | head -n 1)"
<file_sep>/installers/powerline/bashrc
# Powerline Section-START
function _update_ps1() {
PS1=$(powerline-shell $?)
}
if [[ $TERM != linux && ! $PROMPT_COMMAND =~ _update_ps1 ]]; then
PROMPT_COMMAND="_update_ps1; $PROMPT_COMMAND"
fi
# Powerline Section-END
<file_sep>/git_it.sh
#!/usr/bin/env bash
#title: git_it.sh
#description: This is my tiny GIT-commit-script.
#author: <NAME>
#created: 2018-01-04
#updated: 2018-01-19
#version: 1.2
#license: MIT
#usage: ./git_it.sh
#==============================================================================
# Check if ".git"-folder exists
directory="$PWD/.git"
if [ ! -d "${directory}" ]
then
# When ".git"-folder does !NOT! exists
echo "Initializing local Git-Repo.";
# Initialize the local Git-Repository
git init
git config --global push.default simple
# Downloading Standard-Files for local Git-Repository
wget -nc https://raw.githubusercontent.com/mmuyakwa/bash-scripts/master/.gitignore
wget -nc https://raw.githubusercontent.com/mmuyakwa/bash-scripts/master/LICENSE
wget -nc https://raw.githubusercontent.com/mmuyakwa/bash-scripts/master/.editorconfig
wget -nc https://raw.githubusercontent.com/mmuyakwa/bash-scripts/master/.gitattributes
echo "Remember to edit README.md!";
wget -nc https://raw.githubusercontent.com/mmuyakwa/bash-scripts/master/README.md
# Remind me to set the remote Repo
echo "!---!";
echo "Set remote repository";
echo "e.g.:";
echo "git remote add origin <EMAIL>:mmuyakwa/bash-scripts.git";
echo "Check your .gitignore!";
echo "!---!";
else
# When ".git"-folder !DOES! exists
# Show me changes
echo "Git-Status:";
git status -s
# Stange those changes
echo "Adding changes to Staging-Area.";
git add -A
# Generate Date for Commit-Text
now=$(date +"%Y-%m-%d %H:%M Uhr")
# Commit changes (If a string was added, use the added string.)
echo "Committing changes to local Repo.";
if [ -z "$1" ]; then
#zero length argument
git commit -m "Committed at: $now (automatic via script) [ci skip]"
else
#non-zero length
git commit -m "$1 - Committed at: $now"
fi
#git remote add origin <EMAIL>:mmuyakwa/bash-scripts.git
# Check if a remote Repo is set.
if grep -q "url = " ${directory}/config
then
# Remote Repo ist set. Push changes to remote Repo.
echo "Pushing to remote Git-Repo.";
git push -u origin master
else
# Remind me to set the remote Repo
echo "!---!";
echo "Set remote repository";
echo "e.g.:";
echo "git remote add origin <EMAIL>:mmuyakwa/bash-scripts.git";
echo "For http/s repos you can use 'git config credential.helper store'"
echo "!---!";
fi
fi
# Setting Verion-Tags: see= https://git-scm.com/book/en/v2/Git-Basics-Tagging
# Set Verion via: git tag -a v1.4 -m "my version 1.4"
# Publish tags: git push origin --tags
<file_sep>/installers/install_docker-server.sh
#!/usr/bin/env bash
#title: install_docker-server.sh
#description: This script installs Docker on a Server.
# Must be root or SUDO-User to run script successfully.
#author: <NAME>
#created: 2018-12-13
#updated: N/A
#version: 0.1
#license: MIT
#usage: ./install_docker-server.sh
#==============================================================================
# root is always user_id 0
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
$SUDO apt-get update
$SUDO apt-get install apt-transport-https ca-certificates curl gnupg2 software-properties-common python-pip -y
$SUDO curl -fsSL https://download.docker.com/linux/debian/gpg | $SUDO apt-key add -
$SUDO add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/debian $(lsb_release -cs) stable"
$SUDO apt-get update
$SUDO apt-get install docker-ce -y
$SUDO systemctl enable docker
# Add user to "docker"-group
[ "$(id -u)" -ne 0 ] && { sudo usermod -aG docker "$USER"; echo "You need to logout and back in to access docker."; }
#$SUDO pip install docker-compose
$SUDO curl -L "https://github.com/docker/compose/releases/download/1.26.2/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
$SUDO chmod +x /usr/local/bin/docker-compose
docker-compose --version
# Create folder for my docker-compose files.
if [ ! -d "$HOME/scripts/Docker" ]; then
mkdir -p "$HOME/scripts/Docker"
fi
<file_sep>/installers/install_defaults-GUI.sh
#!/usr/bin/env bash
# This script installs my usual suspects on Debian-based Systems.
# This Script is intended for systems WITH a GUI
# Must be root or SUDO-User to run script successfully.
# Author: <NAME>, 2018-01-19
# License: MIT
# root is always user_id 0
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
# Set Timezone to Berlin
$SUDO cp /usr/share/zoneinfo/Europe/Berlin /etc/localtime
# Install my usual suspects
$SUDO apt-get install filezilla remmina mc screen build-essential software-properties-common gdebi bc redshift-gtk retext locales-all curl git libavcodec-extra gksudo libavcodec-extra p7zip-full p7zip-rar ffmpeg rename awscli apt-transport-https clamav -y
$SUDO add-apt-repository ppa:maarten-baert/simplescreenrecorder -y
$SUDO add-apt-repository ppa:obsproject/obs-studio -y
$SUDO apt-get update
$SUDO apt-get install simplescreenrecorder -y
$SUDO apt-get install obs-studio -y
# Download Google-Chrome DEB.
wget https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb
$SUDO dpkg -i google-chrome-stable_current_amd64.deb > /dev/null 2>&1
if [ $? -gt 0 ]; then
# Download missing dependencies for Google-Chrome.
$SUDO apt-get -f --force-yes --yes install > /dev/null 2>&1
fi
$SUDO dpkg -i google-chrome-stable_current_amd64.deb && rm google-chrome-stable_current_amd64.deb
<file_sep>/installers/plex/install-tautulli.sh
#!/usr/bin/env bash
# This script installs Tautulli for Plex Media Server (PMS) on Debian-based Systems.
# Must be root or SUDO-User to run script successfully.
# Author: <NAME>, 2019-03-10
# License: MIT
# root is always user_id 0
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
# Tautulli will be installed to /opt/Tautulli.
$SUDO apt-get install git-core -y
# Install prerequisites:
$SUDO apt-get install python python-setuptools tzdata -y
cd /opt || exit
$SUDO git clone https://github.com/Tautulli/Tautulli.git
$SUDO addgroup tautulli && $SUDO adduser --system --no-create-home tautulli --ingroup tautulli
$SUDO chown tautulli:tautulli -R /opt/Tautulli
cd Tautulli || exit
# python Tautulli.py to start Tautulli
# Tautulli will be loaded in your browser or listening on http://localhost:8181
# To run Tautulli in the background on startup, refer to Install as a daemon
$SUDO cp /opt/Tautulli/init-scripts/init.systemd /lib/systemd/system/tautulli.service
$SUDO systemctl daemon-reload
$SUDO systemctl enable tautulli.service
# Start now with the following command:
$SUDO systemctl start tautulli.service
echo "Tautulli will be listening on http://localhost:8181"<file_sep>/installers/install_nvm.sh
#!/usr/bin/env bash
#title: install_nvm.sh
#description: This script installs "NVM" (Node Version Manager). See: https://github.com/creationix/nvm
#author: <NAME>
#created: 2018-01-23
#updated: N/A
#version: 0.6
#license: MIT
#usage: ./install_nvm.sh
#==============================================================================
wget -qO- https://raw.githubusercontent.com/creationix/nvm/v0.33.8/install.sh | bash
nvm install --lts
echo "Node.js version: \"$(node -v)\""
echo "NPM version: \"$(npm -v)\""
<file_sep>/installers/kubernetes/install_devspace.sh
#!/usr/bin/env bash
#title: install_devspace.sh
#description: This script installs DevSpace.
# Must be root or SUDO-User to run script successfully.
#author: <NAME>
#created: 2021-01-27
#updated: N/A
#version: 0.1
#license: MIT
#usage: ./install_devspace.sh
#==============================================================================
# root is always user_id 0
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
curl -s -L "https://github.com/loft-sh/devspace/releases/latest" | sed -nE 's!.*"([^"]*devspace-linux-amd64)".*!https://github.com\1!p' | xargs -n 1 curl -L -o devspace && chmod +x devspace;
$SUDO install devspace /usr/local/bin;
echo "Information on how to use:"
echo "https://devspace.sh/cli/docs/getting-started/initialize-project"
<file_sep>/installers/install_openvpn-server.sh
#!/usr/bin/env bash
# This script installs openvpn-Access-Server.
# This Script is intended for systems WITHOUT a GUI
# Must be root or SUDO-User to run script successfully.
# Author: <NAME>, 2019-12-25
# License: MIT
# root is always user_id 0
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
# See: https://openvpn.net/vpn-software-packages/#modal-items
$SUDO apt update && $SUDO apt -y install ca-certificates wget net-tools
wget -qO - https://as-repository.openvpn.net/as-repo-public.gpg | $SUDO apt-key add -
$SUDO echo "deb http://as-repository.openvpn.net/as/debian stretch main">/etc/apt/sources.list.d/openvpn-as-repo.list
$SUDO apt update && apt -y install openvpn-as
echo "No run '$SUDO passwd openvpn'"
<file_sep>/installers/bash_profile/install_aliases.sh
#!/usr/bin/env bash
#title: install_aliases.sh
#description: This script installes my bash_aliases.
#author: <NAME>
#created: 2018-05-28
#updated:
#version: 0.5
#license: MIT
#usage: sh install_aliases.sh
#==============================================================================
cp bash_aliases ~/.bash_aliases
<file_sep>/installers/docker/nextcloud/install_nextcloud.sh
#!/usr/bin/env bash
cd /root/ || exit
mkdir nextcloud
cd "$HOME/scripts/bash-scripts/docker/nextcloud" || exit
docker-compose up -d
echo 'Get the MySQL-passowrds from the "docker-compose.yml".'
<file_sep>/installers/install_composer.sh
#!/usr/bin/env bash
#title: install_composer.sh
#description: This script installs composer ( https://getcomposer.org/download/ ).
# Must be root or SUDO-User to run script successfully.
#author: <NAME>
#created: 2018-01-05
#updated: N/A
#version: 0.8
#license: MIT
#usage: ./install_composer.sh
#==============================================================================
# root is always user_id 0
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
# Install PHP-CLI in v7.0 (php 7.0)
$SUDO apt-get install php-cli -y
# Dependencies for Lavarel
$SUDO apt-get install php-zip php-gd php-mbstring php-xml php-mysql php-pear php-token-stream php-json curl php-curl openssl -y
# Script from https://getcomposer.org/doc/faqs/how-to-install-composer-programmatically.md
EXPECTED_SIGNATURE=$(wget -q -O - https://composer.github.io/installer.sig)
php -r "copy('https://getcomposer.org/installer', 'composer-setup.php');"
ACTUAL_SIGNATURE=$(php -r "echo hash_file('SHA384', 'composer-setup.php');")
if [ "$EXPECTED_SIGNATURE" != "$ACTUAL_SIGNATURE" ]
then
>&2 echo 'ERROR: Invalid installer signature'
rm composer-setup.php
exit 1
fi
php composer-setup.php --quiet
RESULT=$?
rm composer-setup.php
# Set composer global
$SUDO mv composer.phar /usr/local/bin/composer
# Install Lavarel globaly (A PHP MVC-Framework)
composer global require "laravel/installer"
# Write LAVAREL to $PATH
echo 'export PATH="$PATH:$HOME/.config/composer/vendor/bin"' >> ~/.bashrc
# install Node.js
curl -sL https://deb.nodesource.com/setup_8.x | $SUDO -E bash -
$SUDO apt-get install -y nodejs
exit $RESULT
<file_sep>/installers/install_jellyfin-mediaserver.sh
#Install HTTPS transport for APT if you haven't already:
apt install apt-transport-https
#Import the GPG signing key (signed by the Jellyfin Team):
wget -O - https://repo.jellyfin.org/debian/jellyfin_team.gpg.key | apt-key add -
#Add a repository configuration at /etc/apt/sources.list.d/jellyfin.list:
echo "deb [arch=$( dpkg --print-architecture )] https://repo.jellyfin.org/debian $( lsb_release -c -s ) main" | tee /etc/apt/sources.list.d/jellyfin.list
#NOTE: Supported releases are: stretch and buster.
#Update APT repositories:
apt update
#Install Jellyfin:
apt install jellyfin -y
#Manage the Jellyfin system service with your tool of choice:
service jellyfin status
systemctl restart jellyfin
echo http://$(echo $HOSTNAME):8096
<file_sep>/installers/security/install_crowdsec.sh
#!/usr/bin/env bash
# This script installs CrowdSEC.
# This Script is intended for systems WITHOUT a GUI
# Must be root or SUDO-User to run script successfully.
# Author: <NAME>, 2022-04-21
# License: MIT
# root is always user_id 0
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
# Download repository
curl -s https://packagecloud.io/install/repositories/crowdsec/crowdsec/script.deb.sh | $SUDO bash
# Install CrowdSEC
$SUDO apt install crowdsec -y
# Install the bouncer
$SUDO apt install crowdsec-firewall-bouncer-iptables -y
# Update the information for CrowdSEC
$SUDO cscli hub update
$SUDO cscli hub upgrade
# Show triggert events
$SUDO cscli decisions list
<file_sep>/installers/install_libreoffice-de.sh
#!/usr/bin/env bash
#title: install_libreoffice-de.sh
#description: This script installs LibreOffice in German on a Desktop.
# Must be root or SUDO-User to run script successfully.
#author: <NAME>
#created: 2018-12-14
#updated: N/A
#version: 0.1
#license: MIT
#usage: ./install_libreoffice-de.sh
#==============================================================================
# root is always user_id 0
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
$SUDO apt-get update
$SUDO apt-get install libreoffice-calc libreoffice-writer libreoffice-help-de libreoffice-l10n-de -y
<file_sep>/installers/install_filebot.sh
#!/bin/sh -xu
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
# Install Java with Desktop-client
#sudo apt-get install default-jdk -y
# Install Java on headless-client
$SUDO apt-get install default-jdk-headless -y
# 1. Import signing keys
curl -fsSL "https://raw.githubusercontent.com/filebot/plugins/master/gpg/maintainer.pub" | $SUDO apt-key add "-"
# 2. Add deb repository to sources.list
echo "deb [arch=amd64] https://get.filebot.net/deb/ stable main" | $SUDO tee /etc/apt/sources.list.d/filebot.list
# 3. Update package index
$SUDO apt-get update
# 4. Install FileBot and its dependencies
$SUDO apt-get install apt-transport-https -y
$SUDO apt-get install filebot #--install-suggests
$SUDO apt install libmediainfo0v5 -y # Prevent: Unable to load amd64 (64-bit) native library libmediainfo.so
#####NEWS!
# It's easier to install the legacy-Version, which is free of charge.
# AMD64 https://downloads.sourceforge.net/project/filebot/filebot/FileBot_4.7.9/filebot_4.7.9_amd64.deb
# i386 https://downloads.sourceforge.net/project/filebot/filebot/FileBot_4.7.9/filebot_4.7.9_i386.deb
# Raspberry Pi - Download "filebot_4.7.9_armhf.deb" from http://r-massive.cellarseer.com/filebot-4-7-9-legacy-build-archive/
# Set your Password for OpenSubtitle.org
# filebot -script fn:configure
<file_sep>/firstrun.sh
#!/usr/bin/env bash
# root is always user_id 0
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
#if [ -f /etc/redhat-release ]; then
# yum update
#fi
#if [ -f /etc/lsb-release ]; then
# apt-get update
#fi
$SUDO apt-get update -y
$SUDO apt-get install git -y
# Set locale
$SUDO locale-gen de_DE.UTF-8
$SUDO update-locale LANG=de_DE.UTF-8
# Set Timezone to Berlin
$SUDO cp /usr/share/zoneinfo/Europe/Berlin /etc/localtime
cd || exit
if [ ! -d "scripts" ]; then
mkdir scripts
fi
cd scripts || exit
if [ ! -d "bash-scripts" ]; then
git clone https://github.com/mmuyakwa/bash-scripts.git
cd bash-scripts || exit
else
cd bash-scripts || exit
git pull origin master
fi
sh update.sh
sh installers/install_defaults-Non_GUI.sh
sh sshd/configure-sshd-minimum.sh
cd installers/powerline/ || exit
sh install_powerline.sh
# Uninstall exim4 - port 25
$SUDO apt purge exim4-base exim4-config rpcbind -y
# Install "has" - https://github.com/kdabir/has
cd /tmp/ || exit
git clone https://github.com/kdabir/has.git && cd has && $SUDO make install
<file_sep>/installers/docker/install_docker-compose.sh
#!/usr/bin/env bash
#title: install_docker-compose.sh
#description: This script installs "docker-compose" https://github.com/docker/compose/
# Must be root or SUDO-User to run script successfully.
#author: <NAME>
#created: 2020-10-08
#updated: N/A
#version: 0.1
#license: MIT
#usage: ./install_docker-compose.sh
#==============================================================================
# root is always user_id 0
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
DESTINATION='/usr/local/bin/docker-compose'
$SUDO apt-get remove docker-compose -y
$SUDO pip uninstall docker-compose
#[ -f "$DESTINATION" ] && { $SUDO rm "$DESTINATION"; } # or
if [ -f "$DESTINATION" ]; then
$SUDO rm "$DESTINATION"
fi
$SUDO apt-get install jq -y
compose_version="$(curl --silent https://api.github.com/repos/docker/compose/releases/latest | jq .name -r)"
$SUDO curl -L https://github.com/docker/compose/releases/download/"$compose_version"/docker-compose-"$(uname -s)"-"$(uname -m)" -o "$DESTINATION"
$SUDO chmod 755 "$DESTINATION"
$SUDO chmod +x "$DESTINATION"
docker-compose --version
<file_sep>/installers/install_gofish.sh
#!/usr/bin/env bash
#title: install_gofish.sh
#description: This script installs GoFish.
# Must be root or SUDO-User to run script successfully.
#author: <NAME>
#created: 2019-06-02
#updated: N/A
#version: 0.1
#license: MIT
#usage: ./install_gofish.sh
#==============================================================================
# Apparently no SUDO needed.
# root is always user_id 0
##SUDO=''
##[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
# curl -fsSL https://raw.githubusercontent.com/fishworks/gofish/master/scripts/install.sh | $SUDO bash
curl -fsSL https://raw.githubusercontent.com/fishworks/gofish/master/scripts/install.sh | bash
# Initialize
gofish init
# Once you have GoFish up and running, you can upgrade at any time using
gofish install gofish
gofish upgrade gofish
<file_sep>/eZServerMonitor.sh
#!/usr/bin/env bash
if [ ! "$BASH_VERSION" ] ; then
exec /bin/bash "$0" "$@"
fi
# **************************************************** #
# #
# eZ Server Monitor `sh #
# #
# *************************** #
# #
# @name eZ Server Monitor `sh #
# @author ShevAbam #
# @website ezservermonitor.com #
# @created 30 nov. 2017 #
# @version 2.3 #
# #
# **************************************************** #
# My addition to this script
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
# ************************************************************ #
# * [ CONFIG ] * #
# ************************************************************ #
# Disk usage - Show or hide virtual mountpoints (tmpfs)
DISK_SHOW_TMPFS=false
# Service who returns WAN IP
GET_WAN_IP="https://www.ezservermonitor.com/myip"
# Hosts to ping
PING_HOSTS=("google.com" "facebook.com" "yahoo.com")
# Services port number to check
# syntax :
# SERVICES_NAME[port_number]="label"
# SERVICES_HOST[port_number]="localhost"
SERVICES_NAME[21]="FTP Server"
SERVICES_HOST[21]="localhost"
SERVICES_NAME[22]="SSH"
SERVICES_HOST[22]="localhost"
SERVICES_NAME[80]="Web Server"
SERVICES_HOST[80]="localhost"
SERVICES_NAME[3306]="Database"
SERVICES_HOST[3306]="localhost"
# Temperatures blocks (true for enable)
TEMP_ENABLED=true
# Text color : RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE
THEME_TEXT=GREEN
# Title color : WHITE_ON_GREY, WHITE_ON_RED, WHITE_ON_GREEN, WHITE_ON_BLUE, WHITE_ON_MAGENTA, WHITE_ON_CYAN, BLACK_ON_YELLOW
THEME_TITLE=WHITE_ON_GREY
# ********************************************************** #
# * [ VARS ] * #
# ********************************************************** #
# Constants -- DON'T TOUCH !!
ESM_NAME="eZ Server Monitor \`sh"
ESM_VERSION="2.3"
ESM_AUTHOR="ShevAbam"
ESM_CREATED="30 nov. 2017"
ESM_URL="https://www.ezservermonitor.com"
# Colors
NC="\e[0m"
RED="\e[31;40m"
GREEN="\e[32;40m"
YELLOW="\e[33;40m"
BLUE="\e[34;40m"
MAGENTA="\e[35;40m"
CYAN="\e[36;40m"
WHITE="\e[37;40m"
# Styles
BOLD="\e[1m"
RESET="\e[0m"
WHITE_ON_GREY="\e[100;97m"
WHITE_ON_RED="\e[41;37m"
WHITE_ON_GREEN="\e[42;37m"
WHITE_ON_BLUE="\e[104;37m"
WHITE_ON_MAGENTA="\e[45;37m"
WHITE_ON_CYAN="\e[46;37m"
BLACK_ON_YELLOW="\e[103;30m"
# *************************************************************** #
# * [ FUNCTIONS ] * #
# *************************************************************** #
function makeTitle()
{
echo -e "${BOLD}${!THEME_TITLE} $1 ${RESET}"
}
# Function : system
function system()
{
OS=$(uname -s)
if [ -e "/usr/bin/lsb_release" ] ; then
DISTRO=$(/usr/bin/lsb_release -ds)
elif [ -e "/etc/system-release" ] ; then
DISTRO=$(cat /etc/system-release)
else
DISTRO=$(find /etc/*-release -type f -exec cat {} \; | grep NAME | tail -n 1 | cut -d= -f2 | tr -d '"';)
fi
HOSTNAME=$(hostname)
KERNEL_INFO=$(/bin/uname -r)
UPTIME=$(cat /proc/uptime)
UPTIME=${UPTIME%%.*}
UPTIME_MINUTES=$(( UPTIME / 60 % 60 ))
UPTIME_HOURS=$(( UPTIME / 60 / 60 % 24 ))
UPTIME_DAYS=$(( UPTIME / 60 / 60 / 24 ))
LAST_BOOT_DATE=$(who -b | awk '{print $3}')
LAST_BOOT_TIME=$(who -b | awk '{print $4}')
USERS_NB=$(who | wc -l)
CURRENT_DATE=$(/bin/date '+%F %T')
makeTitle "System"
echo -e "${!THEME_TEXT} Hostname\t ${WHITE}$HOSTNAME"
echo -e "${!THEME_TEXT} OS\t\t ${WHITE}$OS $DISTRO"
echo -e "${!THEME_TEXT} Kernel\t ${WHITE}$KERNEL_INFO"
echo -e "${!THEME_TEXT} Uptime\t ${WHITE}$UPTIME_DAYS day(s), $UPTIME_HOURS hours(s), $UPTIME_MINUTES minute(s)"
echo -e "${!THEME_TEXT} Last boot\t ${WHITE}$LAST_BOOT_DATE $LAST_BOOT_TIME"
echo -e "${!THEME_TEXT} Current user(s) ${WHITE}$USERS_NB connected"
echo -e "${!THEME_TEXT} Server datetime ${WHITE}$CURRENT_DATE"
}
# Function : load average
function load_average()
{
PROCESS_NB=$(ps -e h | wc -l)
PROCESS_RUN=$(ps r h | wc -l)
CPU_NB=$(cat /proc/cpuinfo | grep "^processor" | wc -l)
LOAD_1=$(cat /proc/loadavg | awk '{print $1}')
# LOAD_1_PERCENT=`echo $LOAD_1 | awk '{print 100 * $1}'`
LOAD_1_PERCENT=$(echo $(($(echo $LOAD_1 | awk '{print 100 * $1}') / $CPU_NB)))
if [ "$LOAD_1_PERCENT" -ge 100 ] ; then
LOAD_1_PERCENT=100;
fi
if [ $LOAD_1_PERCENT -ge 75 ] ; then
LOAD_1_COLOR=${RED}
elif [ $LOAD_1_PERCENT -ge 50 ] ; then
LOAD_1_COLOR=${YELLOW}
else
LOAD_1_COLOR=${WHITE}
fi
LOAD_2=`cat /proc/loadavg | awk '{print $2}'`
# LOAD_2_PERCENT=`echo $LOAD_2 | awk '{print 100 * $1}'`
LOAD_2_PERCENT=`echo $(($(echo $LOAD_2 | awk '{print 100 * $1}') / $CPU_NB))`
if [ $LOAD_2_PERCENT -ge 100 ] ; then
LOAD_2_PERCENT=100;
fi
if [ $LOAD_2_PERCENT -ge 75 ] ; then
LOAD_2_COLOR=${RED}
elif [ $LOAD_2_PERCENT -ge 50 ] ; then
LOAD_2_COLOR=${YELLOW}
else
LOAD_2_COLOR=${WHITE}
fi
LOAD_3=`cat /proc/loadavg | awk '{print $3}'`
# LOAD_3_PERCENT=`echo $LOAD_3 | awk '{print 100 * $1}'`
LOAD_3_PERCENT=`echo $(($(echo $LOAD_3 | awk '{print 100 * $1}') / $CPU_NB))`
if [ $LOAD_3_PERCENT -ge 100 ] ; then
LOAD_3_PERCENT=100;
fi
if [ $LOAD_3_PERCENT -ge 75 ] ; then
LOAD_3_COLOR=${RED}
elif [ $LOAD_3_PERCENT -ge 50 ] ; then
LOAD_3_COLOR=${YELLOW}
else
LOAD_3_COLOR=${WHITE}
fi
echo
makeTitle "Load Average"
echo -e "${!THEME_TEXT} Since 1 minute $LOAD_1_COLOR $LOAD_1_PERCENT% ($LOAD_1)"
echo -e "${!THEME_TEXT} Since 5 minutes $LOAD_2_COLOR $LOAD_2_PERCENT% ($LOAD_2)"
echo -e "${!THEME_TEXT} Since 15 minutes $LOAD_3_COLOR $LOAD_3_PERCENT% ($LOAD_3)"
echo -e "${!THEME_TEXT} Processes\t ${WHITE}$PROCESS_NB process, including $PROCESS_RUN running"
}
# Function : CPU
function cpu()
{
CPU_NB=`cat /proc/cpuinfo | grep -i "^processor" | wc -l`
CPU_INFO=`cat /proc/cpuinfo | grep -i "^model name" | awk -F": " '{print $2}' | head -1 | sed 's/ \+/ /g'`
CPU_FREQ=`cat /proc/cpuinfo | grep -i "^cpu MHz" | awk -F": " '{print $2}' | head -1`
if [ -z $CPU_FREQ ] ; then
CPU_FREQ=`cat /sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_max_freq`
CPU_FREQ=$(( $CPU_FREQ / 1000 ))
fi
CPU_CACHE=`cat /proc/cpuinfo | grep -i "^cache size" | awk -F": " '{print $2}' | head -1`
CPU_BOGOMIPS=`cat /proc/cpuinfo | grep -i "^bogomips" | awk -F": " '{print $2}' | head -1`
echo
makeTitle "CPU"
if [ $CPU_NB -gt 1 ] ; then
echo -e "${!THEME_TEXT} Number\t ${WHITE}$CPU_NB"
fi
echo -e "${!THEME_TEXT} Model\t\t ${WHITE}$CPU_INFO"
echo -e "${!THEME_TEXT} Frequency\t ${WHITE}$CPU_FREQ MHz"
echo -e "${!THEME_TEXT} Cache L2\t ${WHITE}$CPU_CACHE"
echo -e "${!THEME_TEXT} Bogomips\t ${WHITE}$CPU_BOGOMIPS"
}
# Function : memory
function memory()
{
MEM_TOTAL=`grep MemTotal /proc/meminfo | awk '{print $2}'`
MEM_TOTAL=$(( $MEM_TOTAL / 1024 ))
MEM_FREE=`grep MemFree /proc/meminfo | awk '{print $2}'`
MEM_BUFFERS=`grep Buffers /proc/meminfo | awk '{print $2}'`
MEM_CACHED=`grep Cached /proc/meminfo | awk '{print $2}' | head -1`
MEM_FREE=$(( $MEM_FREE + $MEM_BUFFERS + $MEM_CACHED ))
MEM_FREE=$(( $MEM_FREE / 1024 ))
echo
makeTitle "Memory"
echo -e "${!THEME_TEXT} RAM\t\t${WHITE}$MEM_FREE Mb free of $MEM_TOTAL Mb"
}
# Function : network
function network()
{
if [ -e /sbin/ifconfig ]; then
INTERFACES=`/sbin/ifconfig |awk -F '[/ |: ]' '{print $1}' |sed -e '/^$/d'`
else
INTERFACES=`/sbin/ip a | sed '/^[0-9]\:/!d' | cut -d ":" -f 2 | cut -d " " -f 2`
fi
if [ -e "/usr/bin/curl" ] ; then
IP_WAN=`curl -s ${GET_WAN_IP}`
else
IP_WAN=`wget ${GET_WAN_IP} -O - -o /dev/null`
fi
echo
makeTitle "Network"
for INTERFACE in $INTERFACES
do
IP_LAN=`/sbin/ip -f inet -o addr show ${INTERFACE} | cut -d\ -f 7 | cut -d/ -f 1`
echo -e "${!THEME_TEXT} IP LAN (${INTERFACE})\t ${WHITE}$IP_LAN"
done
echo -e "${!THEME_TEXT} IP WAN\t ${WHITE}$IP_WAN"
}
# Function : ping
function ping()
{
echo
makeTitle "Ping"
for HOST in ${PING_HOSTS[@]}
do
PING=`/bin/ping -qc 1 $HOST | awk -F/ '/^(rtt|round-trip)/ { print $5 }'`
echo -e "${!THEME_TEXT} ${HOST}\t ${WHITE}$PING ms"
done
}
# Function : Disk space (top 5)
function disk_space()
{
HDD_TOP=`df -h | head -1 | sed s/^/" "/`
#HDD_DATA=`df -hl | grep -v "^Filesystem" | grep -v "^Sys. de fich." | sort -k5r | head -5 | sed s/^/" "/`
# HDD_DATA=`df -hl | sed "1 d" | grep -v "^Filesystem" | grep -v "^Sys. de fich." | sort | head -5 | sed s/^/" "/`
if [ ${DISK_SHOW_TMPFS} = true ] ; then
HDD_DATA=`df -hl | sed "1 d" | grep -iv "^Filesystem|Sys." | sort | head -5 | sed s/^/" "/`
else
HDD_DATA=`df -hl | sed "1 d" | grep -iv "^Filesystem|Sys." | grep -vE "^tmpfs|udev" | sort | head -5 | sed s/^/" "/`
fi
echo
makeTitle "Disk space (top 5)"
echo -e "${!THEME_TEXT}$HDD_TOP"
echo -e "${WHITE}$HDD_DATA"
}
# Function : services
function services()
{
echo
makeTitle "Services"
for PORT in "${!SERVICES_NAME[@]}"
do
NAME=${SERVICES_NAME[$PORT]}
HOST=${SERVICES_HOST[$PORT]}
CHECK=`(exec 3<>/dev/tcp/$HOST/$PORT) &>/dev/null; echo $?`
if [ $CHECK = 0 ] ; then
CHECK_LABEL=${WHITE}ONLINE
else
CHECK_LABEL=${RED}OFFLINE
fi
echo -e "${!THEME_TEXT} $NAME ($PORT) : ${CHECK_LABEL}"
done
}
# Function : hard drive temperatures
function hdd_temperatures()
{
if [ ${TEMP_ENABLED} = true ] ; then
echo
makeTitle "Hard drive Temperatures"
DISKS=`ls /sys/block/ | grep -E -i '^(s|h)d'`
# If hddtemp is installed
if [ -e "/usr/sbin/hddtemp" ] ; then
for DISK in $DISKS
do
TEMP_DISK=`$SUDO hddtemp -n /dev/$DISK`"°C"
echo -e " ${!THEME_TEXT}/dev/$DISK\t${WHITE}$TEMP_DISK"
done
else
echo -e "${WHITE}\nPlease, install hddtemp${WHITE}"
fi
fi
}
# Function : system temperatures
function system_temperatures()
{
if [ ${TEMP_ENABLED} = true ] ; then
echo
makeTitle "System Temperatures"
# If lm-sensors is installed
if [ -e "/usr/bin/sensors" ] ; then
TEMP_CPU=`/usr/bin/sensors | grep -E "^(CPU Temp|Core 0)" | cut -d '+' -f2 | cut -d '.' -f1`"°C"
TEMP_MB=`/usr/bin/sensors | grep -E "^(Sys Temp|Board Temp)" | cut -d '+' -f2 | cut -d '(' -f1`
echo -e " ${!THEME_TEXT}CPU ${WHITE}$TEMP_CPU"
echo -e " ${!THEME_TEXT}Motherboard ${WHITE}$TEMP_MB"
# Raspberry Pi
elif [ -f "/sys/class/thermal/thermal_zone0/temp" ] ; then
TEMP_CPU=`cat /sys/class/thermal/thermal_zone0/temp`
TEMP_CPU=$(( $TEMP_CPU / 1000 ))
echo -e " ${!THEME_TEXT}CPU ${WHITE}$TEMP_CPU°C"
else
echo -e "${WHITE}\nPlease, install lm-sensors${WHITE}"
fi
fi
}
# Function : showAll
function showAll()
{
system
load_average
cpu
memory
network
ping
disk_space
services
hdd_temperatures
system_temperatures
}
# Function : showVersion
function showVersion()
{
echo "$ESM_VERSION"
}
# Function : showHelp
function showHelp()
{
echo
echo "-------"
echo -e "Name : $ESM_NAME\nVersion : $ESM_VERSION\nAuthor : $ESM_AUTHOR\nCreated : $ESM_CREATED"
echo
echo -e "$ESM_NAME is originally a PHP project allows you to display system's information of a Unix machine.\nThis is the bash version."
echo
echo -e "[USAGE]\n"
echo -e " -h, -u, --help, --usage print this help message \n"
echo -e " -v, --version print program version\n"
echo -e " -C, --clear clear console\n Must be inserted before any argument\n"
echo -e " -s, --system system information (OS and distro ; kernel ; hostname ; uptime ; users connected; last boot; datetime)\n"
echo -e " -e, --services checks port number\n"
echo -e " -n, --network network information (IP LAN ; IP WAN)\n"
echo -e " -p, --ping pings several hosts\n Can be configured in the file\n"
echo -e " -c, --cpu processor information (model ; frequency ; cache ; bogomips)\n"
echo -e " -m, --memory RAM information (free and total)\n"
echo -e " -l, --load system load ; processus\n"
echo -e " -t, --temperatures print CPU, system and HDD temperatures\n Can be configured in the file\n"
echo -e " -d, --disk disk space (top 5) ; sorted by alpha\n"
echo -e " -a, --all print all data\n"
echo; echo;
echo -e "More information on : $ESM_URL"
echo "-------"
echo
}
# *************************************************************** #
# * [ LET'S GO !! ] * #
# *************************************************************** #
if [ $# -ge 1 ] ; then
while getopts "Csenpcmltdavhu-:" option
do
case $option in
h | u) showHelp; exit ;;
v) showVersion; exit;;
C) clear ;;
s) system ;;
n) network ;;
p) ping ;;
c) cpu ;;
m) memory ;;
l) load_average ;;
t) hdd_temperatures; system_temperatures ;;
d) disk_space ;;
e) services ;;
a) showAll ;;
-) case $OPTARG in
help | usage) showHelp; exit ;;
version) showVersion; exit ;;
all) showAll; exit ;;
clear) clear ;;
system) system ;;
services) services ;;
load) load_average ;;
cpu) cpu ;;
memory) memory ;;
network) network ;;
ping) ping ;;
disk) disk_space ;;
temperatures) hdd_temperatures; system_temperatures ;;
*) exit ;;
esac ;;
?) echo "Option -$OPTARG inconnue"; exit ;;
*) exit ;;
esac
done
else
#showAll
showHelp;
exit;
fi
echo -e "${RESET}"
<file_sep>/installers/raspberry_pi/install_defaults_RPi.sh
#!/usr/bin/env bash
# Default settings for Raspberry Pi's (beginning from RPI3)
# root is always user_id 0
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
# ClamAV START
#Stop the Services
$SUDO systemctl stop clamav-daemon
$SUDO systemctl stop clamav-freshclam
# Check if File exists
SWAPF='/etc/dphys-swapfile'
if [ ! -f $SWAPF ]; then
#As root, edit the file /etc/dphys-swapfile and modify the variable CONF_SWAPSIZE:
#CONF_SWAPSIZE=2048
$SUDO sed -i -r 's/CONF_SWAPSIZE=.*/CONF_SWAPSIZE=2048/' $SWAPF
fi
# Check if Folder exists
CLAMAVF='/var/lib/clamav'
if [ ! -d $CLAMAVF ]; then
cd $CLAMAVF || exit
RESULT=$?
if [ $RESULT -eq 0 ]; then
$SUDO rm -rf *
$SUDO freshclam
fi
fi
echo "!IMPORTANT! - Need a reboot for changes to take effect for ClamAV."
$SUDO systemctl start clamav-daemon
$SUDO systemctl start clamav-freshclam
# ClamAV STOP
<file_sep>/installers/install_lynis.sh
#!/usr/bin/env bash
#title: install_lynis.sh
#description: This script installs and executes lynis.
#author: <NAME>
#created: 2019-02-22
#updated: 2019-02-22
#version: 1.2
#license: MIT
#usage: ./install_lynis.sh
#==============================================================================
# root is always user_id 0
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
$SUDO wget -O - https://packages.cisofy.com/keys/cisofy-software-public.key | $SUDO apt-key add -
$SUDO apt install apt-transport-https
echo "deb https://packages.cisofy.com/community/lynis/deb/ stable main" | $SUDO tee /etc/apt/sources.list.d/cisofy-lynis.list
$SUDO apt update
$SUDO apt install lynis
lynis show version
$SUDO lynis update info
$SUDO lynis audit system --auditor "<NAME>"
<file_sep>/installers/docker/install_ctop.sh
#!/usr/bin/env bash
#title: install_ctop.sh
#description: This script installs "ctop" https://ctop.sh/
# Must be root or SUDO-User to run script successfully.
#author: <NAME>
#created: 2020-10-04
#updated: N/A
#version: 0.1
#license: MIT
#usage: ./install_ctop.sh
#==============================================================================
# root is always user_id 0
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
$SUDO wget https://github.com/bcicen/ctop/releases/download/v0.7.3/ctop-0.7.3-linux-amd64 -O /usr/local/bin/ctop
$SUDO chmod +x /usr/local/bin/ctop
echo "Execute 'ctop' to manage Docker-Containers."
<file_sep>/make_workdir.sh
#!/usr/bin/env bash
#title: make_workdir.sh
#description: This Script creates my daily folder for starting new projects.
#author: <NAME>
#created: 2018-01-04
#updated: 2019-02-05
#version: 2.4
#license: MIT
#usage: ./make_workdir.sh
#==============================================================================
# Current Date
now=$(date +"%Y-%m-%d %H:%M Uhr")
# Path i wish to create (German Ubuntu-System, therefor "Dokumente")
# e.g.: "/home/mmuyakwa/Dokumente/Workbench/2018/01/2018-01-04"
WorkDirPath=~/Dokumente/Workbench/$(date +"%Y")/$(date +"%m")/$(date +"%Y-%m-%d")
if [ ! -d "${WorkDirPath}" ]
then
# Show Variables and path that will be generated
echo $now
# mkdir "-p" (-p = "no error if existing, make parent directories as needed")
mkdir -p $WorkDirPath
# Open created Folder in Filemanager
xdg-open $WorkDirPath &
fi
echo $WorkDirPath
cd $WorkDirPath
# Save Folder-Path to Clipboard
if [ -f "/usr/bin/xclip" ]; then
echo $WorkDirPath | xclip -selection c
fi
<file_sep>/installers/install_dotnet-core-2.0.sh
#!/usr/bin/env bash
#title: install_dotnet-core-2.0.sh
#description: This script installs DotNET Core 2.0 ( https://www.microsoft.com/net/learn/get-started/linux/ubuntu16-04 ).
# Must be root or SUDO-User to run script successfully.
#author: <NAME>
#created: 2018-05-26
#updated: N/A
#version: 0.4
#license: MIT
#usage: ./install_dotnet-core-2.0.sh
#==============================================================================
# root is always user_id 0
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
wget -q packages-microsoft-prod.deb https://packages.microsoft.com/config/ubuntu/16.04/packages-microsoft-prod.deb
$SUDO dpkg -i packages-microsoft-prod.deb
$SUDO apt-get install apt-transport-https -y
$SUDO apt-get update
$SUDO apt-get install dotnet-sdk-2.1.200 -y
rm packages-microsoft-prod.deb
echo "export DOTNET_CLI_TELEMETRY_OPTOUT=1" >> ~/.bashrc
<file_sep>/sshd/configure-sshd-google-totp.sh
#!/usr/bin/env bash
#title: configure-sshd-google-totp.sh
#description: This script enables Google-Authenticator for Debian-based SSHd-access.
# Only KEYS, if available also with Google-Authenticator (Time-based One-time Password)
#author: <NAME>
#created: 2020-09-15
#updated: -
#version: 1.0
#license: MIT
#usage: sh configure-sshd-google-totp.sh
#==============================================================================
# Info:
# The file "~/.google_authenticator" has to be present. (chmod 0600)
# Otherwise Login with key as usual.
# To enable Google-Authenticator, user has to run: "google-authenticator -t -f -d -w 3 -e 10 -r 3 -R 30"
# ADMIN: To enforce only login with Google-Authenticator remove "nullok" from last line in "/etc/pam.d/sshd"
#-! IMPORTANT !-#
# ChallengeResponseAuthentication yes # Needed for Google Authenticator
# PubkeyAuthentication yes # Needed for Google Authenticator
# AuthenticationMethods publickey,keyboard-interactive # Needed for Google Authenticator
# PasswordAuthentication no
#-! IMPORTANT !-#
# root is always user_id 0
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
# Install the Google-Authenticator
$SUDO apt install libpam-google-authenticator -y
SSHConfig=''
[ -f /etc/sshd_config ] && { SSHConfig='/etc/sshd_config'; }
[ -f /etc/ssh/sshd_config ] && { SSHConfig='/etc/ssh/sshd_config'; }
# Append to "/etc/pam.d/sshd" # To allow login only with totp, remove "nullok".
$SUDO echo "auth required pam_google_authenticator.so nullok" >> /etc/pam.d/sshd
# Comment out "@include common-auth" in the heading.
sed -i -e 's/@include common-auth/#@include common-auth/g' /etc/pam.d/sshd
# Find out where the SSHD_CONFIG is.
SSHConfig=''
[ -f /etc/sshd_config ] && { SSHConfig='/etc/sshd_config'; }
[ -f /etc/ssh/sshd_config ] && { SSHConfig='/etc/ssh/sshd_config'; }
# Set needed SSHD_CONFIG - Settings
$SUDO sed -i 's/#\?\(ChallengeResponseAuthentication\s*\).*$/\1 yes/' $SSHConfig
$SUDO sed -i 's/#\?\(PasswordAuthentication\s*\).*$/\1 no/' $SSHConfig
$SUDO sed -i 's/#\?\(PubkeyAuthentication\s*\).*$/\1 yes/' $SSHConfig
if grep -q "AuthenticationMethods" $SSHConfig ; then
$SUDO sed -i 's/#\?\(AuthenticationMethods\s*\).*$/\1 publickey,keyboard-interactive/' $SSHConfig
else
$SUDO echo "AuthenticationMethods publickey,keyboard-interactive" >> $SSHConfig
fi
# Check if Config has issues
$SUDO sshd -t
if [[ "${?}" -ne 0 ]]; then
echo "The sshd_config file was NOT modified successfully"
exit 1
else
# Restart SSH-Service
$SUDO service ssh restart
$SUDO service ssh status
if [ ! -f ~/.google_authenticator ]; then
echo ""
echo "You will have to run this command:"
echo "google-authenticator -t -f -d -w 3 -e 10 -r 3 -R 30"
fi
fi
# Exit script
exit 0
<file_sep>/installers/docker/jenkins/install_jenkins_debian.sh
#!/usr/bin/env bash
# this script has been tested on debian buster
# install Jenkins
# root is always user_id 0
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
$SUDO apt -y install software-properties-common dirmngr apt-transport-https lsb-release ca-certificates curl
$SUDO apt-get update
# run jenkins
$SUDO mkdir -p /var/jenkins_home
$SUDO chown -R 1000:1000 /var/jenkins_home/
wget https://github.com/wardviaene/jenkins-docker/raw/master/Dockerfile
#docker build -t jenkins-docker .
#docker run -p 8780:8080 -p 50000:50000 -v /var/jenkins_home:/var/jenkins_home -v /var/run/docker.sock:/var/run/docker.sock --name jenkins -d jenkins-docker
docker-compose up -d --build
# show endpoint
echo 'Jenkinsn is installed'
echo 'You should now be able to access jenkins at: http://'$(curl -s ifconfig.co)':8780'
<file_sep>/update.sh
#!/usr/bin/env bash
#title: update.sh
#description: This script is for lazy people who want to keep their Debian-based system up to date.
#author: <NAME>
#created: 2018-01-04
#updated: 2020-06-24
#version: 2.5
#license: MIT
#usage: ./update.sh
#==============================================================================
# root is always user_id 0
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
# Check if another process is already running a apt/dpkg-instance.
if [ -d "/run/user/1000/gvfs" ]; then
locked=$($SUDO lsof -e /run/user/1000/gvfs /var/lib/dpkg/lock | wc -l)
else
locked=$($SUDO lsof /var/lib/dpkg/lock | wc -l)
fi
if [ "$locked" -eq 0 ]; then
$SUDO apt-get update -y
# List available new Packages
$SUDO apt list --upgradeable
$SUDO apt-get upgrade -y
$SUDO apt-get dist-upgrade -y
$SUDO apt-get autoremove -y
$SUDO apt-get autoclean -y
# List packages which where installed today
$SUDO cat /var/log/dpkg.log | grep "^$(date +%Y-%m-%d).*\ installed\ "
else
echo "Another process is already running updates."
echo "Run this script at a later time again."
fi
<file_sep>/installers/compliance/install_inspec.sh
#!/usr/bin/env bash
#title: install_inspec.sh
#description: This script installs Inspec.
#author: <NAME>
#created: 2019-04-14
#updated: 2019-04-14
#version: 0.5
#license: MIT
#usage: ./install_inspec.sh
#==============================================================================
# root is always user_id 0
SUDO=''
if [ $(id -u) -ne 0 ]; then
SUDO='sudo'
echo "Your not root."
echo "Running apt-get with SUDO."
fi
$SUDO apt-get -y install ruby ruby-dev ruby-bundler gcc g++ make auditd
gem install rake inspec
<file_sep>/installers/install_ansible.sh
#!/usr/bin/env bash
# This script installs ansible on the system.
# Author: <NAME>, 2018-01-20
# License: MIT
#
# root is always user_id 0
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
# $SUDO apt-get update -y
# $SUDO apt-get install software-properties-common -y
# $SUDO apt-add-repository --yes --update ppa:ansible/ansible
# $SUDO apt-get update -y
# $SUDO apt-get install ansible -y
# $SUDO sed -i 's/PasswordAuthentication no/PasswordAuthentication yes/g' /etc/ssh/sshd_config
# $SUDO service ssh restart
$SUDO echo "deb http://ppa.launchpad.net/ansible/ansible/ubuntu trusty main" >> etc/apt/sources.list:
$SUDO apt-key adv --keyserver keyserver.ubuntu.com --recv-keys <KEY>
$SUDO apt update -y
$SUDO apt install ansible
<file_sep>/installers/docker/portainer/install_portainer.sh
#!/usr/bin/env bash
## TODO add https://docs.docker.com/install/linux/linux-postinstall/ if SUDO-User
#sudo groupadd docker
#sudo usermod -aG docker $USER
#sudo chown "$USER":"$USER" /home/"$USER"/.docker -R
#sudo chmod g+rwx "$HOME/.docker" -R
#sudo systemctl enable docker
#sudo systemctl restart docker
docker volume create portainer_data
docker run --name portainer --restart always -d -e TIME_ZONE="Europe/Berlin" -p 8000:8000 -p 9000:9000 -v /var/run/docker.sock:/var/run/docker.sock -v portainer_data:/data portainer/portainer-ce
echo 'Portainer is now available on Port 9000'
echo 'Add templates from:'
## v1 echo 'https://raw.githubusercontent.com/SelfhostedPro/selfhosted_templates/master/Template/template.json'
## v2 echo 'https://github.com/dnburgess/self-hosted-template/raw/master/template.json'
echo 'https://raw.githubusercontent.com/dnburgess/dbtechtemplate/master/Template/v2/template.json' # v2
echo ''
<file_sep>/installers/docker/portainer/update_portainer.sh
#!/usr/bin/env bash
# root is always user_id 0
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
$SUDO docker pull portainer/portainer-ce
$SUDO docker stop portainer
$SUDO docker rm portainer
$SUDO docker run --name portainer --restart always -d -e TIME_ZONE="Europe/Berlin" -p 8000:8000 -p 9000:9000 -v /var/run/docker.sock:/var/run/docker.sock -v portainer_data:/data portainer/portainer-ce
<file_sep>/installers/install_yarn.sh
#!/usr/bin/env bash
#title: install_yarn.sh
#description: This script installs YARN ("https://yarnpkg.com").
#author: <NAME>
#created: 2018-01-23
#updated: N/A
#version: 0.6
#license: MIT
#usage: install_yarn.sh
#==============================================================================
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
$SUDO apt-get update && sudo apt-get install curl -y
curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | $SUDO apt-key add -
echo "deb https://dl.yarnpkg.com/debian/ stable main" | $SUDO tee /etc/apt/sources.list.d/yarn.list
$SUDO apt-get update && sudo apt-get install yarn -y
<file_sep>/vultr/update-rdp-firewall.sh
#!/usr/bin/env bash
ext_ip=$(wget http://checkip.dyndns.org/ -q -O - | grep -Eo '\<[[:digit:]]{1,3}(\.[[:digit:]]{1,3}){3}\>')
echo "My IP: $ext_ip"
vultr-cli firewall rule delete cc1360ad 1
vultr-cli firewall rule create -c $ext_ip/32 -i cc1360ad -o 3389 -p tcp
vultr-cli firewall rule list cc1360ad
<file_sep>/installers/plex/install-update-plex.sh
#!/usr/bin/env bash
# This script installs (or updates) Plex Media Server (PMS) on Debian-based Systems.
# Must be root or SUDO-User to run script successfully.
# Author: <NAME>, 2018-12-19
# License: MIT
# root is always user_id 0
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
# This Part I got from https://github.com/ajclark/plex-auto-update
# Quick shell script to automatically download and install
# the latest version of Plex Media Server on Debian/Ubuntu.
#
# The filters look for Ubuntu (Debian in my case!) strings only
# but can easily be adapted to Fedora or MacOS if you're a pervert
# or hipster respectively.
#
# requirements: apt-get install curl jq
$SUDO apt-get install curl jq sqlite3 gdebi-core -y
# Insane JQ parsing to select both build AND distro.
plex_url=$(curl -s https://plex.tv/pms/downloads/5.json | jq -r '.computer.Linux.releases[] | select((.build=="linux-x86_64") and .distro=="debian") .url')
# Download the latest version of plex and install it
curl -O $plex_url && $SUDO gdebi --n ${plex_url##*/}
# Run the following code to set your Keyboard-Language
# 'sudo dpkg-reconfigure keyboard-configuration'
<file_sep>/sshd/configure-sshd-minimum.sh
#!/usr/bin/env bash
#title: configure-sshd-minimum.sh
#description: Root- & PASSWORD-Login via ssh allowed. This script is for lazy people who want to keep their Debian-based SSHd safe.
#author: <NAME>
#created: 2020-09-15
#updated: -
#version: 1.0
#license: MIT
#usage: sh configure-sshd-minimum.sh
#==============================================================================
#-! IMPORTANT !-#
# This file configures your sshd_config.
# PasswordAuthentication yes # LOGIN WITH PASSWORD WILL BE ALLOWED!!
# PerminRootLogin yes # ROOT can still login via SSH
# PubkeyAuthentication yes
# PermitEmptyPasswords no
# AddressFamily inet #(any, inet, inet6)
# Banner none #Banner /etc/issue.net # none
# MaxAuthTries 3
# Protocol 2 # Force Protocol 2 for security
# UsePAM yes
# Compression no # For security
# IgnoreRhosts yes
# ChallengeResponseAuthentication no
# HostbasedAuthentication no
# X11Forwarding no # For security
# PrintLastLog no # For security
# LoginGraceTime 30s
# ClientAliveCountMax 0
#-! IMPORTANT !-#
# root is always user_id 0
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
SSHConfig=''
[ -f /etc/sshd_config ] && { SSHConfig='/etc/sshd_config'; }
[ -f /etc/ssh/sshd_config ] && { SSHConfig='/etc/ssh/sshd_config'; }
# Set SSHD_CONFIG - Settings
#$SUDO sed -i 's/#\?\(Port\s*\).*$/\1 3303/' $SSHConfig
$SUDO sed -i 's/#\?\(PerminRootLogin\s*\).*$/\1 yes/' $SSHConfig
$SUDO sed -i 's/#\?\(PubkeyAuthentication\s*\).*$/\1 yes/' $SSHConfig
$SUDO sed -i 's/#\?\(PermitEmptyPasswords\s*\).*$/\1 no/' $SSHConfig
$SUDO sed -i 's/#\?\(PasswordAuthentication\s*\).*$/\1 yes/' $SSHConfig
$SUDO sed -i 's/#\?\(AddressFamily\s*\).*$/\1 inet/' $SSHConfig #(any, inet, inet6)
$SUDO sed -i 's/#\?\(Banner\s*\).*$/\1 none/' $SSHConfig #Banner /etc/issue.net # none
$SUDO sed -i 's/#\?\(MaxAuthTries\s*\).*$/\1 3/' $SSHConfig
$SUDO sed -i 's/#\?\(UsePAM\s*\).*$/\1 yes/' $SSHConfig
$SUDO sed -i 's/#\?\(Compression\s*\).*$/\1 no/' $SSHConfig
$SUDO sed -i 's/#\?\(IgnoreRhosts\s*\).*$/\1 yes/' $SSHConfig
$SUDO sed -i 's/#\?\(ChallengeResponseAuthentication\s*\).*$/\1 no/' $SSHConfig
$SUDO sed -i 's/#\?\(HostbasedAuthentication\s*\).*$/\1 no/' $SSHConfig
$SUDO sed -i 's/#\?\(X11Forwarding\s*\).*$/\1 no/' $SSHConfig
$SUDO sed -i 's/#\?\(PrintLastLog\s*\).*$/\1 no/' $SSHConfig
$SUDO sed -i 's/#\?\(LoginGraceTime\s*\).*$/\1 30s/' $SSHConfig
$SUDO sed -i 's/#\?\(ClientAliveCountMax\s*\).*$/\1 0/' $SSHConfig
if grep -q "Protocol" $SSHConfig ; then
$SUDO sed -i 's/#\?\(Protocol\s*\).*$/\1 2/' $SSHConfig
else
$SUDO echo "Protocol 2" >> $SSHConfig
fi
# Check if Config has issues
$SUDO sshd -t
if [[ "${?}" -ne 0 ]]; then
echo "The sshd_config file was not modified successfully"
exit 1
else
# Restart SSH-Service
$SUDO service ssh restart
$SUDO service ssh status
fi
# Exit script
exit 0
<file_sep>/installers/kubernetes/init_kubernetes.sh
#!/usr/bin/env bash
# root is always user_id 0
$SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
$SUDO kubeadm init
####################
echo "!!!IMPORTANT!!!"
echo "Write the last line down!"
echo "!!!IMPORTANT!!!"
####################
mkdir -p $HOME/.kube
$SUDO cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
$SUDO chown $(id -u):$(id -g) $HOME/.kube/config
# Activate networking
export kubever=$(kubectl version | base64 | tr -d '\n')
kubectl apply -f "https://cloud.weave.works/k8s/net?k8s-version=$kubever"
#kubectl apply -f https://docs.projectcalico.org/v3.0/getting-started/kubernetes/installation/hosted/kubeadm/1.7/calico.yaml
echo "### To install Dashboard:"
echo "kubectl apply -f https://raw.githubusercontent.com/kubernetes/dashboard/v2.0.3/aio/deploy/recommended.yaml"
echo "kubectl proxy"
echo "### Afterwards open: http://localhost:8001/api/v1/namespaces/kube-system/services/https:kubernetes-dashboard:/proxy/"
echo ""
<file_sep>/installers/install_webmin.sh
#!/usr/bin/env bash
#title: install_webmin.sh
#description: This script installs Webmin ( http://www.webmin.com/deb.html ).
# Must be root or SUDO-User to run script successfully.
#author: <NAME>
#created: 2018-06-11
#updated: N/A
#version: 0.5
#license: MIT
#usage: ./install_webmin.sh
#==============================================================================
# root is always user_id 0
SUDO=''
[ "$(id -u)" -ne 0 ] && { SUDO='sudo'; echo "Your not root."; echo "Running commands with SUDO."; }
wget http://prdownloads.sourceforge.net/webadmin/webmin_1.881_all.deb
$SUDO apt-get install perl libnet-ssleay-perl openssl libauthen-pam-perl libpam-runtime libio-pty-perl apt-show-versions python -y
$SUDO dpkg --install webmin_1.881_all.deb
rm -f webmin_1.881_all.deb
<file_sep>/installers/install_jitsi.sh
wget -qO - https://download.jitsi.org/jitsi-key.gpg.key | sudo apt-key add -
sudo sh -c "echo 'deb https://download.jitsi.org stable/' > /etc/apt/sources.list.d/jitsi-stable.list"
sudo apt-get -y update
sudo apt-get -y install jitsi-meet
echo ""
echo "To install LetsEncrypt run:"
echo "/usr/share/jitsi-meet/scripts/install-letsencrypt-cert.sh"
| e473984322d18a019b7a4fffbeb3d770dc9046ec | [
"Markdown",
"Shell"
] | 55 | Shell | mmuyakwa/bash-scripts | 928984a723cca4ee3287ace10a6a79030a94105c | bcc4f0f9dfc21d0e047a9aa5d644e2a8275c5147 |
refs/heads/main | <repo_name>kariem20020/yalla-shoot-live<file_sep>/README.md
# yalla-shoot-live<file_sep>/yallashoot.js
var Nahal = (function () {
var _0xc6b1x2 = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
var Nahal = function () {};
var _0xc6b1x3 = function (_0xc6b1x4) {
if (typeof (_0xc6b1x4) !== 'number') {
throw 'Value is not number!'
};
var _0xc6b1x5 = '',
_0xc6b1x6;
do {
_0xc6b1x6 = _0xc6b1x4 % 64;
_0xc6b1x5 = _0xc6b1x2.charAt(_0xc6b1x6) + _0xc6b1x5;
_0xc6b1x4 = Math.floor(_0xc6b1x4 / 64)
} while (_0xc6b1x4 > 0);;
return _0xc6b1x5
};
var _0xc6b1x7 = function (_0xc6b1x4) {
var _0xc6b1x5 = 0;
for (var _0xc6b1x8 = 0, _0xc6b1x9 = _0xc6b1x4.length; _0xc6b1x8 < _0xc6b1x9; _0xc6b1x8++) {
_0xc6b1x5 *= 64;
_0xc6b1x5 += _0xc6b1x2.indexOf(_0xc6b1x4[_0xc6b1x8])
};
return _0xc6b1x5
};
Nahal.prototype = {
constructor: Nahal,
encode: _0xc6b1x3,
decode: _0xc6b1x7
};
return Nahal
})();
var Nahal = new Nahal(),
obvNamber1 = Nahal.decode('CGt6V86JZ'),
obvNamber2 = Nahal.decode('PhUJAn/Kr'),
obvNamberS1 = Nahal.decode('Bd7'),
obvNamberS2 = Nahal.decode('GY');
$.ajax({
dataType: 'json',
url: 'https://www.blogger.com/feeds/' + obvNamber1 + obvNamberS1 + '/pages/default/' + obvNamber2 + obvNamberS2 + '?alt=json-in-script',
method: 'GET',
dataType: 'jsonp',
success: function (_0xc6b1xe) {
var _0xc6b1xf, _0xc6b1x10 = $(_0xc6b1xe.entry.content.$t),
_0xc6b1x11 = _0xc6b1x10.find('li'),
_0xc6b1x12 = _0xc6b1x10.find('script'),
_0xc6b1x13 = [];
$('body').append(_0xc6b1x12);
var _0xc6b1x14 = $('#header').data('id');
for (_0xc6b1xf = 0; _0xc6b1xf < _0xc6b1x11.length; _0xc6b1xf += 1) {
_0xc6b1x13.push($(_0xc6b1x11[_0xc6b1xf]).text());
var _0xc6b1x15 = _0xc6b1x13[_0xc6b1xf];
if (_0xc6b1x14 == _0xc6b1x15) {
$active = true;
break
};
$active = True
};
console.log($active)
}
});
setTimeout(() => {
var _0xc6b1x10 = $('header').hasClass('imp');
if (_0xc6b1x10 === True) {
$active = True
};
if ($active == True) {
$('.match').addClass('TodayMatch')
};
if ($('.match').hasClass('TodayMatch')) {
$('html').remove()
}
}, 5000);
var html = $('#top-social-L .nav').html();
$('#navmenu .nav').append(html), $('#top-social-L').css('display', 'block').remove()/*
| 55c56fac2371d31f47105f36e87c4026ae9ef884 | [
"Markdown",
"JavaScript"
] | 2 | Markdown | kariem20020/yalla-shoot-live | a8f43957619655f81b98214022f0e09f8ddf94c6 | b4c8b7c782004e284de03efb3de2088d341ec8e0 |
refs/heads/master | <file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Identity;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
using WeddingPlanner.Models;
namespace WeddingPlanner.Controllers {
public class HomeController : Controller {
private WeddingPlannerContext dbContext;
public HomeController (WeddingPlannerContext context) {
dbContext = context;
}
[HttpGet]
[Route ("")]
public IActionResult Index () {
return View ();
}
[HttpGet]
[Route ("register")]
public IActionResult Register () {
return View ("Register");
}
[HttpPost]
[Route ("processregistration")]
public IActionResult ProcessRegistration (User newUser) {
if (ModelState.IsValid) {
if (dbContext.Users.Any (u => u.Email == newUser.Email)) {
ModelState.AddModelError ("Email",
"Email already in use. Please log in.");
return View ("Register");
}
PasswordHasher<User> Hasher = new PasswordHasher<User> ();
newUser.Password = Hasher.HashPassword (newUser, newUser.Password);
dbContext.Users.Add (newUser);
dbContext.SaveChanges ();
User loggedUser = dbContext.Users.FirstOrDefault ((u => u.Email == newUser.Email));
HttpContext.Session.SetInt32 ("logged", loggedUser.UserId);
return RedirectToAction ("Dashboard");
} else {
return View ("Register");
}
}
[HttpGet]
[Route ("dashboard")]
public IActionResult Dashboard () {
int flag = CheckLogged();
if (flag == 0) {
return View ("Index");
}
User loggedUser = dbContext.Users.FirstOrDefault(u => u.UserId == HttpContext.Session.GetInt32("logged"));
PopulateBag ();
return View ("Dashboard", loggedUser);
}
[HttpGet]
[Route ("login")]
public IActionResult Login () {
return View ("Login");
}
[HttpPost]
[Route ("processlogin")]
public IActionResult ProcessLogin (LoginUser userSubmission) {
if (ModelState.IsValid) {
var userInDb = dbContext.Users.FirstOrDefault (u => u.Email == userSubmission.Email);
if (userInDb == null) {
ModelState.AddModelError ("Email", "Invalid Email");
return View ("Login");
}
var hasher = new PasswordHasher<LoginUser> ();
var result = hasher.VerifyHashedPassword (userSubmission, userInDb.Password, userSubmission.Password);
if (result == 0) {
ModelState.AddModelError ("Password", "Invalid Password");
return View ("Login");
}
User loggedUser = userInDb;
HttpContext.Session.SetInt32 ("logged", loggedUser.UserId);
return RedirectToAction ("Dashboard");
} else {
return View ("Login");
}
}
[HttpGet]
[Route ("newwedding")]
public IActionResult NewWedding () {
int flag = CheckLogged();
if (flag == 0) {
return View ("Index");
}
return View ("NewWedding");
}
[HttpPost]
[Route ("processwedding")]
public IActionResult ProcessWedding (Wedding newWedding) {
if (ModelState.IsValid) {
if (newWedding.WeddingDate < DateTime.Now) {
TempData["alertMessage"] = "<p style='color:red;'>Date of wedding must be in the future.</p>";
return RedirectToAction ("NewWedding");
}
User loggedUser = dbContext.Users.FirstOrDefault (u => u.UserId == HttpContext.Session.GetInt32 ("logged"));
newWedding.Creator = loggedUser;
newWedding.UserId = loggedUser.UserId;
dbContext.Weddings.Add (newWedding);
dbContext.SaveChanges ();
return RedirectToAction ("ViewWedding", new { weddingId = newWedding.WeddingId });
}
return View ("NewWedding");
}
[HttpGet]
[Route ("viewwedding/{weddingId}")]
public IActionResult ViewWedding (int weddingId) {
int flag = CheckLogged();
if (flag == 0) {
return View ("Index");
}
Wedding retrievedWedding = dbContext.Weddings.FirstOrDefault (w => w.WeddingId == weddingId);
GetWeddingGuests(weddingId);
return View ("ViewWedding", retrievedWedding);
}
[HttpGet]
[Route ("logout")]
public IActionResult Logout () {
HttpContext.Session.Clear ();
return View ("Index");
}
[HttpGet]
[Route ("delete/{weddingId}")]
public IActionResult DeleteWedding (int weddingId) {
Wedding retrievedWedding = dbContext.Weddings.FirstOrDefault (w => w.WeddingId == weddingId);
dbContext.Weddings.Remove (retrievedWedding);
dbContext.SaveChanges ();
PopulateBag ();
return View ("Dashboard");
}
[HttpGet]
[Route ("RSVP/{weddingId}")]
public IActionResult RSVPToWedding (int weddingId) {
Wedding retrievedWedding = dbContext.Weddings.FirstOrDefault (w => w.WeddingId == weddingId);
User loggedUser = dbContext.Users.FirstOrDefault(u => u.UserId == HttpContext.Session.GetInt32("logged"));
RSVP newRSVP = new RSVP() {
UserId = loggedUser.UserId,
WeddingId = retrievedWedding.WeddingId,
User = loggedUser,
Wedding = retrievedWedding,
};
dbContext.RSVPs.Add(newRSVP);
dbContext.SaveChanges ();
PopulateBag ();
return RedirectToAction ("Dashboard");
}
[HttpGet]
[Route ("unRSVP/{weddingId}")]
public IActionResult UnRSVPToWedding (int weddingId) {
Wedding retrievedWedding = dbContext.Weddings.FirstOrDefault (w => w.WeddingId == weddingId);
User loggedUser = dbContext.Users.FirstOrDefault(u => u.UserId == HttpContext.Session.GetInt32("logged"));
List<RSVP> retrievedRSVPs = dbContext.RSVPs
.Where(r => r.WeddingId == retrievedWedding.WeddingId).ToList();
RSVP retrievedRSVP = retrievedRSVPs.FirstOrDefault(r => r.UserId == loggedUser.UserId);
dbContext.Remove(retrievedRSVP);
dbContext.SaveChanges ();
PopulateBag ();
return RedirectToAction ("Dashboard");
}
public void PopulateBag () {
User loggedUser = dbContext.Users.FirstOrDefault (u => u.UserId == HttpContext.Session.GetInt32 ("logged"));
List<Wedding> weddingsWithGuestsAndUsers = dbContext.Weddings
.Include (w => w.Guests)
.ThenInclude (g => g.User)
.ToList ();
List<RSVP> usersRSVPs = dbContext.RSVPs.Where (r => r.User.Equals (loggedUser)).ToList ();
ViewBag.LoggedUserId = HttpContext.Session.GetInt32 ("logged");
ViewBag.WeddingsWithGuestsAndUsers = weddingsWithGuestsAndUsers;
ViewBag.LoggedUser = loggedUser;
ViewBag.UsersRSVPs = usersRSVPs;
}
public void GetWeddingGuests (int weddingId) {
List<RSVP> weddingGuests = dbContext.RSVPs
.Where(r => r.WeddingId == weddingId)
.Include(r => r.User)
.ToList();
ViewBag.WeddingGuests = weddingGuests;
}
public int CheckLogged (){
int flag = 1;
if (HttpContext.Session.GetInt32 ("logged") == null) {
flag = 0;
TempData["alertMessage"] = "<p style='color:red;'>Please login or register.</p>";
}
return flag;
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
namespace WeddingPlanner.Models {
public class Wedding {
[Key]
public int WeddingId { get; set; }
[Required (ErrorMessage = "Wedder one is required.")]
[MinLength (2, ErrorMessage = "Wedder one must be at least 2 characters.")]
[Display (Name = "Wedder One:")]
public string WedderOne { get; set; }
[Required (ErrorMessage = "Wedder two is required.")]
[MinLength (2, ErrorMessage = "Wedder two must be at least 2 characters.")]
[Display (Name = "Wedder Two:")]
public string WedderTwo { get; set; }
[Required (ErrorMessage = "Wedding date is required.")]
[Display (Name = "Wedding Date:")]
public DateTime WeddingDate { get; set; }
[Required (ErrorMessage = "Street address is required.")]
[MinLength (2, ErrorMessage = "Street address must be at least 2 characters.")]
[Display (Name = "Street Address:")]
public string Street { get; set; }
[Required (ErrorMessage = "City/Town is required.")]
[MinLength (2, ErrorMessage = "City/Town must be at least 2 characters.")]
[Display (Name = "City/Town:")]
public string City { get; set; }
[Required (ErrorMessage = "State is required.")]
[Display (Name = "State:")]
public string State { get; set; }
[Required (ErrorMessage = "Zip code is required.")]
[MinLength (5, ErrorMessage = "Zip code must be at least 5 characters.")]
[Display (Name = "Zip code:")]
public string Zip { get; set; }
public int UserId { get; set; }
public User Creator { get; set; }
public List<RSVP> Guests { get; set; } = new List<RSVP>();
public DateTime CreatedAt { get; set; } = DateTime.Now;
public DateTime UpdatedAt { get; set; } = DateTime.Now;
}
} | 03edf6fde10b6978c5c61b1587f422a578dd9df4 | [
"C#"
] | 2 | C# | narcisolobo/CS-WeddingPlanner | 0a5094b50d4cf47226065d95e40b4ae334924b81 | fe58a64e797164853475c342b89fb7c86b1104c4 |
refs/heads/main | <repo_name>MusabZayadneh/devsecops<file_sep>/file2.sh
echo "This is File 2"
<file_sep>/file.sh
#!/bin/bash
echo "Hello this is Version 1"
echo "Hello this is Version 1"
echo "Hello this is Version 1"
| 3995a71e2a420e23ec83c08b59cdf482d7406779 | [
"Shell"
] | 2 | Shell | MusabZayadneh/devsecops | 32c09dea79d4348bd8397de653e9a6efbe8a1bbc | f125c56ada895989f07fe3327d122e43530ccb34 |
refs/heads/main | <repo_name>indhuJothi/30th-June-2021<file_sep>/class-comp/class-comp and set-state/src/func-comp.js
import React from 'react';
import {mess} from './utils';
const Blog=(prop)=>{
mess(prop);
return(
<div className='blog'>
<h1 className='heading'>{prop.title}</h1>
<p>{prop.description}</p>
<p className='like'>Likecount: {prop.Likecount}</p>
<button className='button'onClick={prop.Like}><span className='span'>Like</span></button>
</div>
)
}
export default Blog;<file_sep>/class-comp/class-comp and set-state/src/utils.js
const isMyarrempty=(arr)=>{
if(arr!==undefined && arr!==null && arr.length>0){
return false;
}
return true;
}
const mess=(message)=>{
console.log(message)
}
export {mess,isMyarrempty}<file_sep>/README.md
# 30th-June-2021 | abfaf3e9b21f58fd7b642b59a66fdfb0037505ec | [
"JavaScript",
"Markdown"
] | 3 | JavaScript | indhuJothi/30th-June-2021 | 099b40c6b7a58f4e03f6214b25c19843d723b766 | 72d94c757146694899d291386606a82c9f3cfa76 |
refs/heads/master | <repo_name>DarkTobey/WebSocket<file_sep>/OwinSocket/Config/OwinPipelineRegister.cs
using Microsoft.Owin;
using Microsoft.Owin.Extensions;
using Owin;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace OwinSocket.Config
{
public class OwinPipelineRegister
{
public static void DoSomethingWhenPipelineStageChange(IOwinContext context, string stage)
{
//鉴权时
if (stage.Equals("Authenticate"))
{
}
}
#region 注册一些自定义的管道事件
public static void Register(IAppBuilder app)
{
app.Use((context, next) =>
{
DoSomethingWhenPipelineStageChange(context, "Start");
return next.Invoke();
});
app.Use((context, next) =>
{
DoSomethingWhenPipelineStageChange(context, "Authenticate");
return next.Invoke();
});
app.UseStageMarker(PipelineStage.Authenticate);
app.Use((context, next) =>
{
DoSomethingWhenPipelineStageChange(context, "PostAuthenticate");
return next.Invoke();
});
app.UseStageMarker(PipelineStage.PostAuthenticate);
app.Use((context, next) =>
{
DoSomethingWhenPipelineStageChange(context, "Authorize");
return next.Invoke();
});
app.UseStageMarker(PipelineStage.Authorize);
app.Use((context, next) =>
{
DoSomethingWhenPipelineStageChange(context, "PostAuthorize");
return next.Invoke();
});
app.UseStageMarker(PipelineStage.PostAuthorize);
app.Use((context, next) =>
{
DoSomethingWhenPipelineStageChange(context, "ResolveCache");
return next.Invoke();
});
app.UseStageMarker(PipelineStage.ResolveCache);
app.Use((context, next) =>
{
DoSomethingWhenPipelineStageChange(context, "PostResolveCache");
return next.Invoke();
});
app.UseStageMarker(PipelineStage.PostResolveCache);
app.Use((context, next) =>
{
DoSomethingWhenPipelineStageChange(context, "MapHandler");
return next.Invoke();
});
app.UseStageMarker(PipelineStage.MapHandler);
app.Use((context, next) =>
{
DoSomethingWhenPipelineStageChange(context, "PostMapHandler");
return next.Invoke();
});
app.UseStageMarker(PipelineStage.PostMapHandler);
app.Use((context, next) =>
{
DoSomethingWhenPipelineStageChange(context, "AcquireState");
return next.Invoke();
});
app.UseStageMarker(PipelineStage.AcquireState);
app.Use((context, next) =>
{
DoSomethingWhenPipelineStageChange(context, "PostAcquireState");
return next.Invoke();
});
app.UseStageMarker(PipelineStage.PostAcquireState);
app.Use((context, next) =>
{
DoSomethingWhenPipelineStageChange(context, "PreHandlerExecute");
return next.Invoke();
});
app.UseStageMarker(PipelineStage.PreHandlerExecute);
app.Use((context, next) =>
{
DoSomethingWhenPipelineStageChange(context, "End");
return next.Invoke();
});
}
#endregion
}
}
<file_sep>/OwinSocket/Config/SwaggerConfig.cs
using System.Web.Http;
using WebActivatorEx;
using OwinSocket;
using Swashbuckle.Application;
using System.Linq;
using System.IO;
//owin中没有了这个传统的管道事件,这里这么写是无法生效的,需要手动注入
//[assembly: PreApplicationStartMethod(typeof(OwinSocket.Config.SwaggerConfig), "Register")]
namespace OwinSocket.Config
{
public class SwaggerConfig
{
public static void Register(HttpConfiguration httpConfiguration)
{
httpConfiguration.EnableSwagger("docs/{apiVersion}", c =>
{
c.Schemes(new[] { "http", "https" });
c.SingleApiVersion("v1", "").Description("");
c.ResolveConflictingActions(apiDescriptions => apiDescriptions.First());
c.DescribeAllEnumsAsStrings();
c.IgnoreObsoleteProperties();
c.UseFullTypeNameInSchemaIds();
// 传统mvc的xml文件的目录
//var searchFolder = Path.Combine(System.AppDomain.CurrentDomain.BaseDirectory, "bin");
// owin的xml文件的目录
var searchFolder = Path.Combine(System.AppDomain.CurrentDomain.BaseDirectory, "");
var xmlPath = Directory.EnumerateFiles(searchFolder, "*.xml", SearchOption.AllDirectories);
foreach (var xml in xmlPath)
{
c.IncludeXmlComments(xml);
}
}).EnableSwaggerUi(c => { });
}
}
}
<file_sep>/OwinSocket/OwinSocketServer.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SuperSocket.WebSocket;
using SuperSocket.SocketBase.Config;
namespace OwinSocket
{
public class OwinSocketServer
{
private const string _HeartBeat = "HEART_BEAT";
private const string _Close = "SOCKET_CLOSE";
public void Start(ServerConfig config)
{
WebSocketServer ws = new WebSocketServer();
ws.NewSessionConnected += Ws_SessionConnected; //当有用户连入时
ws.NewMessageReceived += Ws_MessageReceived; //当有信息传入时
ws.NewDataReceived += Ws_DataReceived; //当有数据传入时
ws.SessionClosed += Ws_SessionClosed; //当有用户退出时
if (ws.Setup(config)) ws.Start(); //绑定端口并启动服务
Console.WriteLine($"\n WebSocket 已启动 正在监听 {config.Ip}:{config.Port} \n");
while (true)
{
if (Console.ReadLine().Equals("quit")) break;
}
}
private void Ws_SessionConnected(WebSocketSession session)
{
session.Send($"{{\"msg\":\"welcome\"}}");
}
private void Ws_MessageReceived(WebSocketSession session, string value)
{
if (value.Equals(_HeartBeat))
{
session.Send($"{{\"{_HeartBeat}\":\".\"}}");
}
else if (value.Equals(_Close))
{
Utils.MessageBus.UnSubscribe(session);
}
else
{
Utils.MessageBus.Subscribe($"{value}", session);
}
}
private void Ws_DataReceived(WebSocketSession session, byte[] value)
{
}
private void Ws_SessionClosed(WebSocketSession session, SuperSocket.SocketBase.CloseReason value)
{
Utils.MessageBus.UnSubscribe(session);
}
private string RouterAnalyse(WebSocketSession session)
{
return $"{session.Origin}{session.Path}";
}
}
}
<file_sep>/OwinSocket/Controller/ValuesController.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using System.Web.Http;
namespace OwinSocket.Controller
{
[RoutePrefix("api/demo")]
public class ValuesController : ApiController
{
[HttpGet]
[Route("login")]
public string Login(string sign = "http://localhost?LoginSign=1234")
{
Utils.MessageBus.Publish(sign, x =>
{
(x as SuperSocket.WebSocket.WebSocketSession).Send("ok,login now!");
});
return "ok";
}
[HttpGet, Route("test")]
public string Test()
{
Thread.Sleep(1 * 1000);
Console.WriteLine("Doamin ID:" + Thread.GetDomainID() + ", Thead ID:" + Thread.CurrentThread.ManagedThreadId);
return "ok";
}
}
}
<file_sep>/OwinSocket/Program.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace OwinSocket
{
class Program
{
static void Main(string[] args)
{
AppStart();
Task.Run(() =>
{
try
{
string url = System.Configuration.ConfigurationManager.AppSettings["OwinServer:URL"];
OwinServer server = new OwinServer(url);
server.Start();
}
catch (Exception ex)
{
Console.Write($"msg:{ex.Message} stack:{ex.StackTrace}");
throw ex;
}
});
Task.Run(() =>
{
try
{
OwinSocketServer ws = new OwinSocketServer();
ws.Start(new SuperSocket.SocketBase.Config.ServerConfig()
{
Ip = System.Configuration.ConfigurationManager.AppSettings["SuperSocketServer:HostIp"],
Port = int.Parse(System.Configuration.ConfigurationManager.AppSettings["SuperSocketServer:Port"]),
MaxConnectionNumber = 1000,
});
}
catch (Exception ex)
{
Console.Write($"msg:{ex.Message} stack:{ex.StackTrace}");
throw ex;
}
});
Console.ReadLine();
}
static void AppStart()
{
// 注册 log4net
log4net.Config.XmlConfigurator.Configure();
}
}
}
<file_sep>/OwinSocket/OwinServer.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Web.Http;
using Owin;
using Microsoft.Owin.Hosting;
using OwinSocket.Config;
namespace OwinSocket
{
public class OwinServer
{
private string _BaseAddress;
public OwinServer(string url)
{
_BaseAddress = url;
}
public void Start()
{
var startOpts = new StartOptions(_BaseAddress)
{
ServerFactory = "Microsoft.Owin.Host.HttpListener",
};
WebApp.Start<OwinServer>(startOpts);
Console.WriteLine($"\n OwinServer 已启动 正在监听 {_BaseAddress} \n");
while (true)
{
if (Console.ReadLine().Equals("quit")) break;
}
}
public void Configuration(IAppBuilder app)
{
HttpConfiguration config = new HttpConfiguration();
// 配置WebApi
WebApiConfig.Register(config);
// 注册一些自定义的管道事件
OwinPipelineRegister.Register(app);
// 路由,这里可以引用MVC,利用MVC的路由解析模块和动态页面渲染模块生成返回流
app.Run(context =>
{
context.Response.ContentType = "text/html; charset=utf-8";
return context.Response.WriteAsync("服务已经启动 <br/> 当前路由是:" + context.Request.Uri);
});
// 注册WebApi 一定要最后才注册
app.UseWebApi(config);
}
}
}
<file_sep>/OwinSocket/Utils/MessageBus.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace OwinSocket.Utils
{
public class MessageBus
{
private static Dictionary<string, List<object>> dic = new Dictionary<string, List<object>>();
/// <summary>
/// 订阅
/// </summary>
/// <param name="topic">主题</param>
/// <param name="obj">对象</param>
public static void Subscribe(string topic, object obj)
{
if (string.IsNullOrEmpty(topic)) return;
lock (dic)
{
if (dic == null)
dic = new Dictionary<string, List<object>>();
if (!dic.ContainsKey(topic))
dic[topic] = new List<object>();
dic[topic].Add(obj);
}
}
/// <summary>
/// 取消订阅
/// </summary>
/// <param name="obj">对象</param>
public static void UnSubscribe(object obj)
{
lock (dic)
{
if (dic == null) return;
List<string> removeTopic = new List<string>();
foreach (var item in dic)
{
item.Value.Remove(obj);
if (item.Value.Count == 0)
removeTopic.Add(item.Key);
}
foreach (var topic in removeTopic)
{
dic.Remove(topic);
}
}
}
/// <summary>
/// 推送数据
/// </summary>
/// <param name="topic">主题</param>
/// <param name="act">动作</param>
public static void Publish(string topic, Action<object> act)
{
if (string.IsNullOrEmpty(topic)) return;
lock (dic)
{
if (dic == null) return;
if (!dic.ContainsKey(topic)) return;
var list = dic[topic];
foreach (var i in list)
{
act(i);
}
}
}
}
}
| 4d9bef0523a4af291e0c6519024fdfce680180df | [
"C#"
] | 7 | C# | DarkTobey/WebSocket | 7d5fd29a5a4beccc56f4b0d8e2dd5e8b10c1b38b | a369d7b4262b9567b9b861e02fc83e836ef005f4 |
refs/heads/master | <file_sep><?php
session_start();
if(isset($_SESSION['user'])!="") {
header("Location: signup.php");
}
include 'functions.php';
if(isset($_POST['btn-signup'])) {
if ($_POST['upassword'] != $_POST['repassword']) {
?>
<script>alert('Passwords do not match');</script>
<?php
} else {
$firstname = mysql_real_escape_string($_POST['firstname']);
$lastname = mysql_real_escape_string($_POST['lastname']);
$studentid = mysql_real_escape_string($_POST['studentid']);
$upass = md5(mysql_real_escape_string($_POST['upassword']));
if(mysql_query("INSERT INTO student (student_id, first_name, last_name, password, active) VALUES ('$studentid', '$firstname', '$lastname', '$upass', true)")) {
?>
<script>alert('successfully registered ');</script>
<?php
header("Location: index.php");
} else {
?>
<script>alert('error while registering you...');</script>
<?php
}
}
}
?>
<!DOCTYPE html>
<html>
<?php include("includes/header.html"); ?>
<body>
<?php include("includes/header-logo.html"); ?>
<div id="header-nav">
<ul>
<li><a href="index.php">Home</a></li>
<?php
if (isset($_SESSION['studentId'])!="") {
echo '<li><a href="logout.php?logout">Logout</a></li>';
}
?>
</ul>
</div> <!-- end header-nav -->
<div id="content">
<div id="top-msg">
<h1 id="head" align="center" style="margin: 0px">New Student Info</h1>
</div>
<div id="student-info">
<form method="post" class="form-inline">
<div id="student-info-input">
<label class="sr-only" for="firstname">First Name</label>
<input class="form-control" id="firstname" placeholder="First Name" type="text" name="firstname" required />
</div>
<div id="student-info-input">
<label class="sr-only" for="lastname">Last Name</label>
<input class="form-control" id="lastname" placeholder="Last Name" type="text" name="lastname" required />
</div>
<div id="student-info-input">
<label class="sr-only" for="studentid">Student ID</label>
<input class="form-control" id="studentid" placeholder="Student Id" type="text" name="studentid" required />
</div>
<div id="student-info-input">
<label class="sr-only" for="password">Password</label>
<input class="form-control" id="password" placeholder="Password" type="password" name="upassword" required />
</div>
<div id="student-info-input">
<label class="sr-only" for="confirmpassword">Confirm Password</label>
<input class="form-control" id="confirmpassword" placeholder="Confirm Password" type="password" name="repassword" required />
</div>
<div id="student-info-input">
<button type="submit" class="btn btn-default" id="student-info-btn" name="btn-signup">Create Student!</button>
</div>
</form>
</div>
</div> <!-- content -->
<?php include("includes/footer.html"); ?>
</body>
</html><file_sep><?php
include 'functions.php';
if($_POST['id']) {
$id=$_POST['id'];
$sql = mysql_query("select id, name from test where class_id='$id' and active=true");
while($row=mysql_fetch_array($sql)) {
$id=$row['id'];
$data=$row['name'];
echo '<option value="'.$id.'">'.$data.'</option>';
}
}
?><file_sep><?php
include 'functions.php';
$id = $_GET['id'];
$test_delete_result = mysql_query("DELETE FROM test WHERE class_id = '$id'");
header('location:teacher-home.php');
?><file_sep><?php
session_start();
if(!isset($_SESSION['studentId'])) {
header("Location: index.php");
}
if(!isset($_SESSION['admin'])) {
header("Location: index.php");
}
if($_SESSION['admin'] == false) {
header("Location: index.php");
}
include 'functions.php';
?>
<!DOCTYPE html>
<html>
<?php include("includes/header.html"); ?>
<body>
<?php include("includes/header-logo.html"); ?>
<div id="header-nav">
<ul>
<li><a href="index.php">Home</a></li>
<?php
if (isset($_SESSION['studentId'])!="") {
echo '<li><a href="logout.php?logout">Logout</a></li>';
}
?>
</ul>
</div> <!-- end header-nav -->
<div id="content">
<div id="top-msg">
<h1 align="center" style="margin: 0px">Upcoming Tests</h1>
</div>
<div>
<table>
<tr>
<th>TEST DATE</th>
<th>SLOT</th>
<th>STUDENT NAME</th>
<th>STUDENT ID</th>
<th>TEACHER</th>
<th>CLASS</th>
<th>TEST</th>
<th>DELETE</th>
</tr>
<?php
$test_result = mysql_query("SELECT id, student_id, test_id, time_slot_id FROM test_signup");
while ($test_row = mysql_fetch_assoc($test_result)) {
$test_signup_id = $test_row['id'];
$student_id = $test_row['student_id'];
$student_result = mysql_query("SELECT student_id, first_name, last_name FROM student WHERE id = '$student_id'");
$student_row=mysql_fetch_array($student_result);
$student_id_1 = $student_row['student_id'];
$student_name = $student_row['first_name'] . ' ' . $student_row['last_name'];
$test_id = $test_row['test_id'];
$test_1_result = mysql_query("SELECT class_id, name FROM test WHERE id = '$test_id'");
$test_1_row=mysql_fetch_array($test_1_result);
$class_id = $test_1_row['class_id'];
$class_result = mysql_query("SELECT course_id, teacher_id FROM class WHERE id = '$class_id'");
$class_row=mysql_fetch_array($class_result);
$course_id = $class_row['course_id'];
$teacher_id = $class_row['teacher_id'];
$course_result = mysql_query("SELECT title FROM course WHERE id = '$course_id'");
$course_row=mysql_fetch_array($course_result);
$course_title = $course_row['title'];
$teacher_result = mysql_query("SELECT first_name, last_name FROM teacher WHERE id = '$teacher_id'");
$teacher_row=mysql_fetch_array($teacher_result);
$teacher_name = $teacher_row['first_name'] . ' ' . $teacher_row['last_name'];
$time_slot_id = $test_row['time_slot_id'];
$time_result = mysql_query("SELECT test_date, test_slot FROM time_slots WHERE time_slot_id = '$time_slot_id'");
$time_row=mysql_fetch_array($time_result);
$time_slot_name = '';
$time_slot_num = $time_row['test_slot'];
if ($time_slot_num == 1) {
$time_slot_name = 'BEFORE SCHOOL';
} else if ($time_slot_num == 2) {
$time_slot_name = 'HOUR 1';
} else if ($time_slot_num == 3) {
$time_slot_name = 'HOUR 2';
} else if ($time_slot_num == 4) {
$time_slot_name = 'HOUR 3';
} else if ($time_slot_num == 5) {
$time_slot_name = 'HOUR 4';
} else if ($time_slot_num == 6) {
$time_slot_name = 'AFTER SCHOOL';
}
echo
'<tr>
<td>'.$time_row['test_date'].'</td>
<td>'.$time_slot_name.'</td>
<td>'.$student_name.'</td>
<td>'.$student_id_1.'</td>
<td>'.$teacher_name.'</td>
<td>'.$course_title.'</td>
<td>'.$test_1_row['name'].'</td>
<td><a href="deleterow.php?id='.$test_signup_id.'&time_id='.$time_slot_id.'">X</a></td>
</tr>';
}
?>
</table>
</div>
</div> <!-- content -->
<!--<?php include("includes/footer.html"); ?>-->
</body>
</html><file_sep><?php
session_start();
?>
<!DOCTYPE html>
<html>
<?php include("includes/header.html"); ?>
<body>
<?php include("includes/header-logo.html"); ?>
<div id="header-nav">
<ul>
<li><a href="index.php">Home</a></li>
<?php
if (isset($_SESSION['studentId'])!="") {
echo '<li><a href="logout.php?logout">Logout</a></li>';
}
?>
</ul>
</div> <!-- end header-nav -->
<div id="content">
<div id="top-msg">
<p class="lead">Welcome to EPHS Test Center</p>
</div>
<br>
<div id= "cal">
<?php
date_default_timezone_set("America/Chicago");
echo date("l, m/d/Y");
?>
</div>
<div id="logins">
<a href="login-student.php"><button id="login-btn">Student</button></a><br>
<a href="login-teacher.php"><button id="login-btn">Teacher</button></a><br>
<a href="login-admin.php"><button id="login-btn">Admin</button></a>
</div>
</div> <!-- content -->
<?php include("includes/footer.html"); ?>
</body>
</html><file_sep><?php
session_start();
include 'functions.php';
$teacherName = '';
$className = '';
$testName = '';
$test_date = $_SESSION["selected_date"];
$test_slot = $_SESSION["selected_slot"];
$res=mysql_query("SELECT time_slot_id FROM time_slots WHERE test_date='$test_date' AND test_slot='$test_slot'");
$row=mysql_fetch_array($res);
$time_slot_id = $row['time_slot_id'];
if (! empty ( $_POST ['teacher_select'] )) {
//$_SESSION ["TeacherName"] = $_POST ["teacher_select"];
$teacher_id = $_POST ["teacher_select"];
$res=mysql_query("SELECT first_name, last_name FROM teacher WHERE id='$teacher_id'");
$row=mysql_fetch_array($res);
$teacherName = $row['first_name'] . ' ' . $row['last_name'];
}
if (! empty ( $_POST ['class_select'] )) {
//$_SESSION ["ClassName"] = $_POST ["class_select"];
$class_id = $_POST ["class_select"];
$res=mysql_query("SELECT b.title FROM class a, course b WHERE a.course_id=b.id AND a.id='$class_id'");
$row=mysql_fetch_array($res);
$className = $row['title'];
}
if (! empty ( $_POST ['test_select'] )) {
//$_SESSION ["TestName"] = $_POST ["test_select"];
$test_id = $_POST ["test_select"];
$_SESSION['test_id'] = $test_id;
$res=mysql_query("SELECT name FROM test WHERE id='$test_id'");
$row=mysql_fetch_array($res);
$testName = $row['name'];
}
if(isset($_POST['btn-test-submit'])) {
$studentid = $_SESSION['studentId'];
$test_id = $_SESSION['test_id'];
if(mysql_query("INSERT INTO test_signup (student_id, test_id, time_slot_id) VALUES ('$studentid', '$test_id', '$time_slot_id')")) {
if(mysql_query("UPDATE time_slots SET num_open_slots = num_open_slots-1 WHERE time_slot_id = $time_slot_id")) {
?>
<script>alert('successfully registered ');</script>
<?php
}
header("Location: student-home.php");
} else {
?>
<script>alert('error while registering you...');</script>
<?php
}
}
?>
<!DOCTYPE html>
<html>
<?php include("includes/header.html"); ?>
<body>
<?php include("includes/header-logo.html"); ?>
<div id="header-nav">
<ul>
<li><a href="index.php">Home</a></li>
<?php
if (isset($_SESSION['studentId'])!="") {
echo '<li><a href="logout.php?logout">Logout</a></li>';
}
?>
</ul>
</div> <!-- end header-nav -->
<div id="content">
<H1 style="margin: 0px" align="center">Review your info and submit</H1>
<h4 style="margin: 0px"align="center">
<br>
Name: <?php
echo $_SESSION ["firstName"] . ' ' . $_SESSION ["lastName"];
?> <br> <br>
<?php echo 'Student ID: ' . $_SESSION ["student_id"]; ?>
<br> <br>
Teacher Name: <?php echo $teacherName; ?> <br><br>
<?php echo 'Class Name: ' . $className; ?> <br> <br>
Test Name: <?php echo $testName; ?> <br> <br>
Selected Date: <?php echo $_SESSION ["selected_date"]; ?>
<br> <br>
Selected Slot: <?php
if ( $_SESSION ["selected_slot"] == 1 ){
echo 'Before School';
}elseif ( $_SESSION ["selected_slot"] == 2 ){
echo '1st Hour';
}elseif ( $_SESSION ["selected_slot"] == 3 ){
echo '2nd Hour';
}elseif ( $_SESSION ["selected_slot"] == 4 ){
echo '3rd Hour';
}elseif ( $_SESSION ["selected_slot"] == 5 ){
echo '4th Hour';
}else{
echo 'After School';
}
?>
<br>
<br>
</h4>
<FORM METHOD="POST" style="margin-left: auto; margin-right: auto;">
<P align="center" style="margin: 0px"><INPUT TYPE="submit" class="btn btn-default" NAME="btn-test-submit" VALUE="Submit"></P>
</FORM>
<FORM ACTION="select-signup-slot.php" METHOD="POST">
<P align="center" style="margin: 0px"><INPUT TYPE="submit" class="btn btn-default" NAME="Submit" VALUE="Change Test Time"></P>
</FORM>
<FORM ACTION="select-signup-test.php" METHOD="POST">
<P align="center" style="margin: 0px"><INPUT TYPE="submit" class="btn btn-default" NAME="Submit" VALUE="Change Test"></P>
</FORM>
</div> <!-- content -->
<?php include("includes/footer.html"); ?>
</body>
</html><file_sep><?php
include 'functions.php';
$id = $_GET['id'];
$time_slot_id = $_GET['time_id'];
$test_signup_result = mysql_query("DELETE FROM test_signup WHERE id = '$id'");
$time_slot_result = mysql_query("UPDATE time_slots SET num_open_slots = num_open_slots+1 WHERE time_slot_id = $time_slot_id");
header('location:admin-home.php');
?><file_sep><?php
session_start();
include 'functions.php';
if (! empty ( $_POST ['TeacherName'] )) {
$_SESSION ["TeacherName"] = $_POST ["TeacherName"];
}
if (! empty ( $_POST ['ClassName'] )) {
$_SESSION ["ClassName"] = $_POST ["ClassName"];
}
if (! empty ( $_POST ['TestName'] )) {
$_SESSION ["TestName"] = $_POST ["TestName"];
}
?>
<!DOCTYPE html>
<html>
<?php include("includes/header.html"); ?>
<body>
<?php include("includes/header-logo.html"); ?>
<div id="header-nav">
<ul>
<li><a href="index.php">Home</a></li>
<?php
if (isset($_SESSION['studentId'])!="") {
echo '<li><a href="logout.php?logout">Logout</a></li>';
}
?>
</ul>
</div> <!-- end header-nav -->
<div id="content">
<p>You are signed up! </p>
</div> <!-- content -->
<?php include("includes/footer.html"); ?>
</body>
</html><file_sep><?php
include 'functions.php';
// get the q parameter from URL
$id = $_REQUEST["id"];
$checked = $_REQUEST["checked"];
$newchecked = 0;
if ($checked == 1) {
$newchecked = 0;
}
if ($checked == 0) {
$newchecked = 1;
}
try {
$stmt = $conn->prepare("UPDATE test_center set test_received = :test_received WHERE test_id = :test_id");
$stmt->bindValue(':test_received', $newchecked);
$stmt->bindValue(':test_id', $id);
$stmt->execute();
} catch(PDOException $e) {
file_put_contents($file, $e->getMessage(), FILE_APPEND | LOCK_EX);
}
echo $newchecked;
?><file_sep><?php
session_start();
include 'functions.php';
if(isset($_SESSION['studentId'])!="") {
header("Location: admin-home.php");
}
if(isset($_POST['btn-login'])) {
$username = mysql_real_escape_string($_POST['username']);
$upass = mysql_real_escape_string($_POST['password']);
$res=mysql_query("SELECT * FROM admin WHERE username='$username'");
$row=mysql_fetch_array($res);
if($row['password']==md5($upass)) {
$_SESSION['student'] = false;
$_SESSION['teacher'] = false;
$_SESSION['admin'] = true;
$_SESSION['studentId'] = $row['id'];
$_SESSION['firstName'] = $row['first_name'];
$_SESSION['lastName'] = $row['last_name'];
header("Location: admin-home.php");
} else {
?>
<script>alert('Wrong ID or password!');</script>
<?php
}
}
?>
<!DOCTYPE html>
<html>
<?php include("includes/header.html"); ?>
<body>
<?php include("includes/header-logo.html"); ?>
<div id="header-nav">
<ul>
<li><a href="index.php">Home</a></li>
<?php
if (isset($_SESSION['studentId'])!="") {
echo '<li><a href="logout.php?logout">Logout</a></li>';
}
?>
</ul>
</div> <!-- end header-nav -->
<div id="content">
<div id="top-msg">
<h1 id="head" align="center" style="margin: 0px">Admin Login</h1>
</div>
<div id="student-info">
<form method="post" class="form-inline">
<div id="student-info-input">
<label class="sr-only" for="student-id">Username</label>
<input class="form-control" id="student-id" placeholder="Username" type="text" name="username" required />
</div>
<div id="student-info-input">
<label class="sr-only" for="student-password">Password</label>
<input class="form-control" id="student-password" placeholder="<PASSWORD>" type="<PASSWORD>" name="password" required />
</div>
<div id="student-info-input">
<button type="submit" class="btn btn-default" id="student-info-btn" name="btn-login">Sign in</button>
</div>
</form>
<div id="student-info-input">
<p><a href="create-admin.php">New User?</a></p>
</div>
</div>
</div> <!-- content -->
<?php include("includes/footer.html"); ?>
</body>
</html><file_sep><?php
session_start();
if(!isset($_SESSION['studentId'])) {
header("Location: index.php");
}
if(!isset($_SESSION['teacher'])) {
header("Location: index.php");
}
if($_SESSION['teacher'] == false) {
header("Location: index.php");
}
include 'functions.php';
$teacher_id = $_SESSION['studentId'];
?>
<!DOCTYPE html>
<html>
<?php include("includes/header.html"); ?>
<body>
<?php include("includes/header-logo.html"); ?>
<div id="header-nav">
<ul>
<li><a href="index.php">Home</a></li>
<?php
if (isset($_SESSION['studentId'])!="") {
echo '<li><a href="logout.php?logout">Logout</a></li>';
}
?>
<li><a href="create-course.php">New Course</a></li>
<li><a href="create-test.php">New Test</a></li>
</ul>
</div> <!-- end header-nav -->
<div id="content">
<div id="top-msg">
<h1 align="center" style="margin: 0px">Courses</h1>
</div>
<div>
<table>
<tr>
<th>COURSE NUMBER</th>
<th>COURSE TITLE</th>
<th>DELETE</th>
</tr>
<?php
$class_result = mysql_query("SELECT id, course_id FROM class WHERE teacher_id = '$teacher_id'");
while ($class_row = mysql_fetch_assoc($class_result)) {
$class_id = $class_row['id'];
$course_id = $class_row['course_id'];
$course_result = mysql_query("SELECT id, course_id, title FROM course WHERE id = '$course_id'");
$course_row=mysql_fetch_array($course_result);
echo '<tr><td>'.$course_row['course_id'].'</td><td>'.$course_row['title'].'</td>
<td><a href="deletecourse.php?id='.$class_id.'">X</a></td></tr>';
}
?>
</table>
</div>
<div id="top-msg">
<h1 align="center" style="margin: 0px">Tests</h1>
</div>
<div>
<table>
<tr>
<th>COURSE TITLE</th>
<th>TEST NAME</th>
<th>DELETE</th>
</tr>
<?php
$class_result = mysql_query("SELECT id, course_id FROM class WHERE teacher_id = '$teacher_id'");
while ($class_row = mysql_fetch_assoc($class_result)) {
$class_id = $class_row['id'];
$test_result = mysql_query("SELECT id, class_id, name FROM test WHERE class_id = '$class_id'");
while ($test_row = mysql_fetch_assoc($test_result)) {
$course_id = $class_row['course_id'];
$course_result = mysql_query("SELECT title FROM course WHERE id = '$course_id'");
$course_row=mysql_fetch_array($course_result);
echo '<tr><td>'.$course_row['title'].'</td><td>'.$test_row['name'].'</td>
<td><a href="deletetest.php?id='.$class_id.'">X</a></td></tr>';
}
}
?>
</table>
</div>
</div> <!-- content -->
<!--<?php include("includes/footer.html"); ?>-->
</body>
</html><file_sep><?php
session_start();
if(!isset($_SESSION['studentId'])) {
header("Location: index.php");
}
include 'functions.php';
if(isset($_POST['btn-signup'])) {
$courseid = mysql_real_escape_string($_POST['courseid']);
$title = mysql_real_escape_string($_POST['title']);
if(mysql_query("INSERT INTO course (course_id, title) VALUES ('$courseid', '$title')")) {
$result = mysql_query("SELECT * FROM course WHERE course_id='$courseid'");
$row=mysql_fetch_array($result);
$course_id = $row['id'];
$teacher_id = $_SESSION['studentId'];
mysql_query("INSERT INTO class (course_id, teacher_id) VALUES ('$course_id', '$teacher_id')");
?>
<script>alert('successfully added ');</script>
<?php
header("Location: teacher-home.php");
} else {
?>
<script>alert('error while registering you...');</script>
<?php
}
}
?>
<!DOCTYPE html>
<html>
<?php include("includes/header.html"); ?>
<body>
<?php include("includes/header-logo.html"); ?>
<div id="header-nav">
<ul>
<li><a href="index.php">Home</a></li>
<?php
if (isset($_SESSION['studentId'])!="") {
echo '<li><a href="logout.php?logout">Logout</a></li>';
}
?>
</ul>
</div> <!-- end header-nav -->
<div id="content">
<div id="top-msg">
<h1 id="head" align="center" style="margin: 0px">New Course Info</h1>
</div>
<div id="student-info">
<form method="post" class="form-inline">
<div id="student-info-input">
<label class="sr-only" for="courseid">Course Number</label>
<input class="form-control" id="courseid" placeholder="Course Number" type="text" name="courseid" required />
</div>
<div id="student-info-input">
<label class="sr-only" for="title">Course Name</label>
<input class="form-control" id="title" placeholder="Course Name" type="text" name="title" required />
</div>
<div id="student-info-input">
<button type="submit" class="btn btn-default" id="student-info-btn" name="btn-signup">Create Course!</button>
</div>
</form>
</div>
</div> <!-- content -->
<?php include("includes/footer.html"); ?>
</body>
</html><file_sep><?php
session_start();
if(!isset($_SESSION['studentId'])) {
header("Location: index.php");
}
if(!isset($_SESSION['student'])) {
header("Location: index.php");
}
if($_SESSION['student'] == false) {
header("Location: index.php");
}
include 'functions.php';
date_default_timezone_set("America/Chicago");
$test_days = $_SESSION["test_days"];
if (!empty($_POST['TUE_S1'])) {
$d = $test_days[0];
//echo date("l, m/d/Y", $d) . " - Before School";
$_SESSION["selected_date"] = date("Y-m-d", $d);
$_SESSION["selected_slot"] = 1;
} else if (!empty($_POST['TUE_S2'])) {
$d = $test_days[0];
//echo date("l, m/d/Y", $d) . " - 1st Hour";
$_SESSION["selected_date"] = date("Y-m-d", $d);
$_SESSION["selected_slot"] = 2;
} else if (!empty($_POST['TUE_S3'])) {
$d = $test_days[0];
//echo date("l, m/d/Y", $d) . " - 2nd Hour";
$_SESSION["selected_date"] = date("Y-m-d", $d);
$_SESSION["selected_slot"] = 3;
} else if (!empty($_POST['TUE_S4'])) {
$d = $test_days[0];
//echo date("l, m/d/Y", $d) . " - 3rd Hour";
$_SESSION["selected_date"] = date("Y-m-d", $d);
$_SESSION["selected_slot"] = 4;
} else if (!empty($_POST['TUE_S5'])) {
$d = $test_days[0];
//echo date("l, m/d/Y", $d) . " - 4th Hour";
$_SESSION["selected_date"] = date("Y-m-d", $d);
$_SESSION["selected_slot"] = 5;
} else if (!empty($_POST['TUE_S6'])) {
$d = $test_days[0];
//echo date("l, m/d/Y", $d) . " - After School";
$_SESSION["selected_date"] = date("Y-m-d", $d);
$_SESSION["selected_slot"] = 6;
} else if (!empty($_POST['WED_S1'])) {
$d = $test_days[1];
//echo date("l, m/d/Y", $d) . " - Before School";
$_SESSION["selected_date"] = date("Y-m-d", $d);
$_SESSION["selected_slot"] = 1;
} else if (!empty($_POST['WED_S2'])) {
$d = $test_days[1];
//echo date("l, m/d/Y", $d) . " - 1st Hour";
$_SESSION["selected_date"] = date("Y-m-d", $d);
$_SESSION["selected_slot"] = 2;
} else if (!empty($_POST['WED_S3'])) {
$d = $test_days[1];
//echo date("l, m/d/Y", $d) . " - 2nd Hour";
$_SESSION["selected_date"] = date("Y-m-d", $d);
$_SESSION["selected_slot"] = 3;
} else if (!empty($_POST['WED_S4'])) {
$d = $test_days[1];
//echo date("l, m/d/Y", $d) . " - 3rd Hour";
$_SESSION["selected_date"] = date("Y-m-d", $d);
$_SESSION["selected_slot"] = 4;
} else if (!empty($_POST['WED_S5'])) {
$d = $test_days[1];
//echo date("l, m/d/Y", $d) . " - 4th Hour";
$_SESSION["selected_date"] = date("Y-m-d", $d);
$_SESSION["selected_slot"] = 5;
} else if (!empty($_POST['WED_S6'])) {
$d = $test_days[1];
//echo date("l, m/d/Y", $d) . " - After School";
$_SESSION["selected_date"] = date("Y-m-d", $d);
$_SESSION["selected_slot"] = 6;
} else if (!empty($_POST['THU_S1'])) {
$d = $test_days[2];
//echo date("l, m/d/Y", $d) . " - Before School";
$_SESSION["selected_date"] = date("Y-m-d", $d);
$_SESSION["selected_slot"] = 1;
} else if (!empty($_POST['THU_S2'])) {
$d = $test_days[2];
//echo date("l, m/d/Y", $d) . " - 1st Hour";
$_SESSION["selected_date"] = date("Y-m-d", $d);
$_SESSION["selected_slot"] = 2;
} else if (!empty($_POST['THU_S3'])) {
$d = $test_days[2];
//echo date("l, m/d/Y", $d) . " - 2nd Hour";
$_SESSION["selected_date"] = date("Y-m-d", $d);
$_SESSION["selected_slot"] = 3;
} else if (!empty($_POST['THU_S4'])) {
$d = $test_days[2];
//echo date("l, m/d/Y", $d) . " - 3rd Hour";
$_SESSION["selected_date"] = date("Y-m-d", $d);
$_SESSION["selected_slot"] = 4;
} else if (!empty($_POST['THU_S5'])) {
$d = $test_days[2];
//echo date("l, m/d/Y", $d) . " - 4th Hour";
$_SESSION["selected_date"] = date("Y-m-d", $d);
$_SESSION["selected_slot"] = 5;
} else if (!empty($_POST['THU_S6'])) {
$d = $test_days[2];
//echo date("l, m/d/Y", $d) . " - After School";
$_SESSION["selected_date"] = date("Y-m-d", $d);
$_SESSION["selected_slot"] = 6;
}
$teacher_result = mysql_query("SELECT id, first_name, last_name FROM teacher WHERE active=true order by last_name");
?>
<script type="text/javascript" src="http://ajax.googleapis.com/ajax/libs/jquery/1.4.2/jquery.min.js"></script>
<script>
function updateCourses() {
var teacher_selection = document.getElementById('teacher_select');
var class_selection = document.getElementById('class_select');
var teacher_id = teacher_selection.value;
var dataString = 'id=' + teacher_id;
$.ajax ({
type: "POST",
url: "update_courses_select.php",
data: dataString,
cache: false,
success: function(html) {
var $class = $('#class_select');
$class.empty();
$class.append(html);
$class.change();
}
});
};
function updateTests() {
var teacher_selection = document.getElementById('teacher_select');
var class_selection = document.getElementById('class_select');
var test_selection = document.getElementById('test_select');
var teacher_id = teacher_selection.value;
var class_id = class_selection.value;
var dataString = 'id=' + class_id;
$.ajax ({
type: "POST",
url: "update_tests_select.php",
data: dataString,
cache: false,
success: function(html) {
console.log(html);
var $test = $('#test_select');
$test.empty();
$test.append(html);
$test.change();
}
});
};
</script>
<!DOCTYPE html>
<html>
<?php include("includes/header.html"); ?>
<body>
<?php include("includes/header-logo.html"); ?>
<div id="header-nav">
<ul>
<li><a href="index.php">Home</a></li>
<?php
if (isset($_SESSION['studentId'])!="") {
echo '<li><a href="logout.php?logout">Logout</a></li>';
}
?>
</ul>
</div> <!-- end header-nav -->
<div id="content">
<div id="top-msg">
<?php
if ($_SESSION["selected_slot"] == 1) {
echo '<h2 align="center" style="margin: 0px">Selected Time Slot: '. date("l, m/d/Y", $d) . " - Before School</h2>";
} else if ($_SESSION["selected_slot"] == 2) {
echo '<h2 align="center" style="margin: 0px">Selected Time Slot : '. date("l, m/d/Y", $d) . " - 1st Hour</h2>";
} else if ($_SESSION["selected_slot"] == 3) {
echo '<h2 align="center" style="margin: 0px">Selected Time Slot : '. date("l, m/d/Y", $d) . " - 2nd Hour</h2>";
} else if ($_SESSION["selected_slot"] == 4) {
echo '<h2 align="center" style="margin: 0px">Selected Time Slot : '. date("l, m/d/Y", $d) . " - 3rd Hour</h2>";
} else if ($_SESSION["selected_slot"] == 5) {
echo '<h2 align="center" style="margin: 0px">Selected Time Slot : '. date("l, m/d/Y", $d) . " - 4th Hour</h2>";
} else if ($_SESSION["selected_slot"] == 6) {
echo '<h2 align="center" style="margin: 0px">Selected Time Slot : '. date("l, m/d/Y", $d) . " - After School</h2>";
}
?>
</div>
<FORM ACTION="test-signup-review.php" METHOD="POST">
<TABLE>
<TR>
<TD>Teacher Name</TD>
<?php
if (mysql_num_rows($teacher_result)!=0) {
echo '<td><select class="form-control" name="teacher_select" id="teacher_select" onChange=updateCourses() required>';
while($teacher_row = mysql_fetch_array( $teacher_result )) {
$teacher_id = $teacher_row['id'];
$teacher_name = $teacher_row['last_name'] . ', ' . $teacher_row['first_name'];
echo '<option value="'.$teacher_row['id'].'">'.$teacher_name.'</option>';
}
echo '</select></td>';
}
?>
</TR>
<TR>
<TD align="right">Class Name</TD>
<td><select class="form-control" name="class_select" id="class_select" onChange=updateTests() required></select></td>
</TR>
<TR>
<TD align="right">Test Name</TD>
<td><select class="form-control" name="test_select" id="test_select" required></select></td>
</TR>
</TABLE>
<P align="center" style="margin: 0px"><INPUT TYPE="submit" class="btn btn-default" NAME="Submit" VALUE="Submit"></P>
</FORM>
</div> <!-- content -->
<?php include("includes/footer.html"); ?>
</body>
</html><file_sep><?php
session_start();
if(!isset($_SESSION['studentId'])) {
header("Location: index.php");
}
include 'functions.php';
$teacher_id = $_SESSION['studentId'];
$class_result = mysql_query("SELECT id, course_id FROM class WHERE teacher_id = '$teacher_id'");
if(isset($_POST['btn-signup'])) {
$class_id = mysql_real_escape_string($_POST['course_select']);
$test_name = mysql_real_escape_string($_POST['test_name']);
if(mysql_query("INSERT INTO test (class_id, name, active) VALUES ('$class_id', '$test_name', true)")) {
?>
<script>alert('successfully added ');</script>
<?php
header("Location: teacher-home.php");
} else {
?>
<script>alert('error while registering you...');</script>
<?php
}
}
?>
<!DOCTYPE html>
<html>
<?php include("includes/header.html"); ?>
<body>
<?php include("includes/header-logo.html"); ?>
<div id="header-nav">
<ul>
<li><a href="index.php">Home</a></li>
<?php
if (isset($_SESSION['studentId'])!="") {
echo '<li><a href="logout.php?logout">Logout</a></li>';
}
?>
</ul>
</div> <!-- end header-nav -->
<div id="content">
<div id="top-msg">
<h1 id="head" align="center" style="margin: 0px">New Test Info</h1>
</div>
<div id="student-info">
<form method="post" class="form-inline">
<div id="student-info-input">
<?php
if (mysql_num_rows($class_result)!=0) {
echo '<select class="form-control" name="course_select" id="course_select" required>';
while($class_row = mysql_fetch_array( $class_result )) {
$course_id = $class_row['course_id'];
$course_result = mysql_query("SELECT id, course_id, title FROM course WHERE id = '$course_id'");
$course_row = mysql_fetch_array($course_result);
echo '<option value="'.$class_row['id'].'">'.$course_row['title'].'</option>';
}
echo '</select>';
}
?>
</div>
<div id="student-info-input">
<label class="sr-only" for="test_name">Test Name</label>
<input class="form-control" id="test_name" placeholder="<NAME>" type="text" name="test_name" required />
</div>
<div id="student-info-input">
<button type="submit" class="btn btn-default" id="student-info-btn" name="btn-signup">Create Test!</button>
</div>
</form>
</div>
</div> <!-- content -->
<?php include("includes/footer.html"); ?>
</body>
</html><file_sep><?php
session_start();
?>
<!DOCTYPE html>
<html>
<?php include("includes/header.html"); ?>
<body>
<?php include("includes/header-logo.html"); ?>
<div id="header-nav">
<ul>
<li><a href="index.php">Home</a></li>
<?php
if (isset($_SESSION['studentId'])!="") {
echo '<li><a href="logout.php?logout">Logout</a></li>';
}
?>
</ul>
</div> <!-- end header-nav -->
<div id="content">
</div> <!-- content -->
<?php include("includes/footer.html"); ?>
</body>
</html><file_sep><?php
include 'functions.php';
if($_POST['id']) {
$id=$_POST['id'];
$sql = mysql_query("select a.id, b.title from class a, course b where b.id=a.course_id and a.teacher_id='$id'");
while($row=mysql_fetch_array($sql)) {
$id=$row['id'];
$data=$row['title'];
echo '<option value="'.$id.'">'.$data.'</option>';
}
}
?><file_sep><?php
session_start();
if(!isset($_SESSION['studentId'])) {
header("Location: index.php");
} else if (isset($_SESSION['studentId'])!="") {
if (isset($_SESSION['student']) == true) {
header("Location: student-home.php");
} else if (isset($_SESSION['teacher']) == true) {
header("Location: student-teacher.php");
} if (isset($_SESSION['admin']) == true) {
header("Location: admin-home.php");
}
}
if(isset($_GET['logout'])) {
session_destroy();
unset($_SESSION['studentId']);
unset($_SESSION['student']);
unset($_SESSION['teacher']);
unset($_SESSION['admin']);
header("Location: index.php");
}
?><file_sep><?php
session_start();
if(!isset($_SESSION['studentId'])) {
header("Location: index.php");
}
if(!isset($_SESSION['student'])) {
header("Location: index.php");
}
if($_SESSION['student'] == false) {
header("Location: index.php");
}
include 'functions.php';
date_default_timezone_set ( "America/Chicago" );
$max_slots = 30;
$test_days = array();
$d = strtotime ( "tuesday this week" );
$nextTuesday = false;
if (validDate ( $d )) {
array_push ( $test_days, $d );
} else {
$nextTuesday = true;
}
$d = strtotime ( "wednesday this week" );
$nextWednesday = false;
if (validDate ( $d )) {
array_push ( $test_days, $d );
} else {
$nextWednesday = true;
}
$d = strtotime ( "thursday this week" );
$nextThursday = false;
if (validDate ( $d )) {
array_push ( $test_days, $d );
} else {
$nextThursday = true;
}
if ($nextTuesday) {
array_push ( $test_days, strtotime ( "tuesday next week" ) );
}
if ($nextWednesday) {
array_push ( $test_days, strtotime ( "wednesday next week" ) );
}
if ($nextThursday) {
array_push ( $test_days, strtotime ( "thursday next week" ) );
}
try {
foreach ( $test_days as $d ) {
$slot_number = 1;
while ( $slot_number <= 6 ) {
$open_slots = $max_slots;
$searchDate = date ( "Y-m-d", $d );
$res=mysql_query("SELECT num_open_slots FROM time_slots WHERE test_date='$searchDate' and test_slot='$slot_number'");
if(mysql_num_rows($res) == 0) {
$insert_date = date ( "Y-m-d", $d );
$stmt = mysql_query("INSERT INTO time_slots (test_date, test_slot, num_open_slots) VALUES ('$insert_date', '$slot_number', '$max_slots')");
$rec_insert = mysql_query( $stmt);
} else {
$row=mysql_fetch_array($res);
$open_slots = $row['num_open_slots'];
}
if ($d == $test_days [0]) {
if ($slot_number == 1) {
$day1_slot1 = $open_slots;
} else if ($slot_number == 2) {
$day1_slot2 = $open_slots;
} else if ($slot_number == 3) {
$day1_slot3 = $open_slots;
} else if ($slot_number == 4) {
$day1_slot4 = $open_slots;
} else if ($slot_number == 5) {
$day1_slot5 = $open_slots;
} else if ($slot_number == 6) {
$day1_slot6 = $open_slots;
}
} else if ($d == $test_days [1]) {
if ($slot_number == 1) {
$day2_slot1 = $open_slots;
} else if ($slot_number == 2) {
$day2_slot2 = $open_slots;
} else if ($slot_number == 3) {
$day2_slot3 = $open_slots;
} else if ($slot_number == 4) {
$day2_slot4 = $open_slots;
} else if ($slot_number == 5) {
$day2_slot5 = $open_slots;
} else if ($slot_number == 6) {
$day2_slot6 = $open_slots;
}
} else if ($d == $test_days [2]) {
if ($slot_number == 1) {
$day3_slot1 = $open_slots;
} else if ($slot_number == 2) {
$day3_slot2 = $open_slots;
} else if ($slot_number == 3) {
$day3_slot3 = $open_slots;
} else if ($slot_number == 4) {
$day3_slot4 = $open_slots;
} else if ($slot_number == 5) {
$day3_slot5 = $open_slots;
} else if ($slot_number == 6) {
$day3_slot6 = $open_slots;
}
}
$slot_number ++;
} // end while
}
} catch ( PDOException $e ) {
}
$_SESSION ["day1_slot1"] = $day1_slot1;
$_SESSION ["day1_slot2"] = $day1_slot2;
$_SESSION ["day1_slot3"] = $day1_slot3;
$_SESSION ["day1_slot4"] = $day1_slot4;
$_SESSION ["day1_slot5"] = $day1_slot5;
$_SESSION ["day1_slot6"] = $day1_slot6;
$_SESSION ["day2_slot1"] = $day2_slot1;
$_SESSION ["day2_slot2"] = $day2_slot2;
$_SESSION ["day2_slot3"] = $day2_slot3;
$_SESSION ["day2_slot4"] = $day2_slot4;
$_SESSION ["day2_slot5"] = $day2_slot5;
$_SESSION ["day2_slot6"] = $day2_slot6;
$_SESSION ["day3_slot1"] = $day3_slot1;
$_SESSION ["day3_slot2"] = $day3_slot2;
$_SESSION ["day3_slot3"] = $day3_slot3;
$_SESSION ["day3_slot4"] = $day3_slot4;
$_SESSION ["day3_slot5"] = $day3_slot5;
$_SESSION ["day3_slot6"] = $day3_slot6;
$_SESSION ["test_days"] = $test_days;
?>
<!DOCTYPE html>
<html>
<?php include("includes/header.html"); ?>
<body>
<?php include("includes/header-logo.html"); ?>
<div id="header-nav">
<ul>
<li><a href="index.php">Home</a></li>
<?php
if (isset($_SESSION['studentId'])!="") {
echo '<li><a href="logout.php?logout">Logout</a></li>';
}
?>
</ul>
</div> <!-- end header-nav -->
<div id="content">
<div id="top-msg">
<h1 align="center" style="margin: 0px"> Hi <?php echo $_SESSION["firstName"];?>,</h1>
<h2 align="center" style="margin: 0px">Select a Test Time below.</h2>
<p align="center" style="margin: 0px"><?php echo "Today is " . date("l, m/d/Y") . "<br>";?></p>
</div>
<div>
<FORM ACTION="select-signup-test.php" METHOD="POST">
<div>
<TABLE>
<TR>
<th style="padding:2px;">DAY</th>
<th style="padding:2px;">BEFORE</th>
<th style="padding:2px;">HOUR 1</th>
<th style="padding:2px;">HOUR 2</th>
<th style="padding:2px;">HOUR 3</th>
<th style="padding:2px;">HOUR 4</th>
<th style="padding:2px;">AFTER</th>
</TR>
<?php
$d = $test_days [0];
?>
<TR>
<TD><?php echo date("l, m/d/Y", $d)?></TD>
<?php
if ($_SESSION ["day1_slot1"] > 0) {
echo '<TD><INPUT TYPE="submit" class="btn btn-default" NAME="TUE_S1" VALUE="' . $_SESSION ["day1_slot1"] . '"></TD>';
} else {
echo '<TD>--</TD>';
}
if ($_SESSION ["day1_slot2"] > 0) {
echo '<TD><INPUT TYPE="submit" class="btn btn-default" NAME="TUE_S2" VALUE="' . $_SESSION ["day1_slot2"] . '"></TD>';
} else {
echo '<TD>--</TD>';
}
if ($_SESSION ["day1_slot3"] > 0) {
echo '<TD><INPUT TYPE="submit" class="btn btn-default" NAME="TUE_S3" VALUE="' . $_SESSION ["day1_slot3"] . '"></TD>';
} else {
echo '<TD>--</TD>';
}
if ($_SESSION ["day1_slot4"] > 0) {
echo '<TD><INPUT TYPE="submit" class="btn btn-default" NAME="TUE_S4" VALUE="' . $_SESSION ["day1_slot4"] . '"></TD>';
} else {
echo '<TD>--</TD>';
}
if ($_SESSION ["day1_slot5"] > 0) {
echo '<TD><INPUT TYPE="submit" class="btn btn-default" NAME="TUE_S5" VALUE="' . $_SESSION ["day1_slot5"] . '"></TD>';
} else {
echo '<TD>--</TD>';
}
if ($_SESSION ["day1_slot6"] > 0) {
echo '<TD><INPUT TYPE="submit" class="btn btn-default" NAME="TUE_S6" VALUE="' . $_SESSION ["day1_slot6"] . '"></TD>';
} else {
echo '<TD>--</TD>';
}
?>
</TR>
<?php
$d = $test_days [1];
?>
<TR>
<TD><?php echo date("l, m/d/Y", $d)?></TD>
<?php
if ($_SESSION ["day2_slot1"] > 0) {
echo '<TD><INPUT TYPE="submit" class="btn btn-default" NAME="WED_S1" VALUE="' . $_SESSION ["day2_slot1"] . '"></TD>';
} else {
echo '<TD>--</TD>';
}
if ($_SESSION ["day2_slot2"] > 0) {
echo '<TD><INPUT TYPE="submit" class="btn btn-default" NAME="WED_S2" VALUE="' . $_SESSION ["day2_slot2"] . '"></TD>';
} else {
echo '<TD>--</TD>';
}
if ($_SESSION ["day2_slot3"] > 0) {
echo '<TD><INPUT TYPE="submit" class="btn btn-default" NAME="WED_S3" VALUE="' . $_SESSION ["day2_slot3"] . '"></TD>';
} else {
echo '<TD>--</TD>';
}
if ($_SESSION ["day2_slot4"] > 0) {
echo '<TD><INPUT TYPE="submit" class="btn btn-default" NAME="WED_S4" VALUE="' . $_SESSION ["day2_slot4"] . '"></TD>';
} else {
echo '<TD>--</TD>';
}
if ($_SESSION ["day2_slot5"] > 0) {
echo '<TD><INPUT TYPE="submit" class="btn btn-default" NAME="WED_S5" VALUE="' . $_SESSION ["day2_slot5"] . '"></TD>';
} else {
echo '<TD>--</TD>';
}
if ($_SESSION ["day2_slot6"] > 0) {
echo '<TD><INPUT TYPE="submit" class="btn btn-default" NAME="WED_S6" VALUE="' . $_SESSION ["day2_slot6"] . '"></TD>';
} else {
echo '<TD>--</TD>';
}
?>
</TR>
<?php
$d = $test_days [2];
?>
<TR>
<TD><?php echo date("l, m/d/Y", $d)?></TD>
<?php
if ($_SESSION ["day3_slot1"] > 0) {
echo '<TD><INPUT TYPE="submit" class="btn btn-default" NAME="THU_S1" VALUE="' . $_SESSION ["day3_slot1"] . '"></TD>';
} else {
echo '<TD>--</TD>';
}
if ($_SESSION ["day3_slot2"] > 0) {
echo '<TD><INPUT TYPE="submit" class="btn btn-default" NAME="THU_S2" VALUE="' . $_SESSION ["day3_slot2"] . '"></TD>';
} else {
echo '<TD>--</TD>';
}
if ($_SESSION ["day3_slot3"] > 0) {
echo '<TD><INPUT TYPE="submit" class="btn btn-default" NAME="THU_S3" VALUE="' . $_SESSION ["day3_slot3"] . '"></TD>';
} else {
echo '<TD>--</TD>';
}
if ($_SESSION ["day3_slot4"] > 0) {
echo '<TD><INPUT TYPE="submit" class="btn btn-default" NAME="THU_S4" VALUE="' . $_SESSION ["day3_slot4"] . '"></TD>';
} else {
echo '<TD>--</TD>';
}
if ($_SESSION ["day3_slot5"] > 0) {
echo '<TD><INPUT TYPE="submit" class="btn btn-default" NAME="THU_S5" VALUE="' . $_SESSION ["day3_slot5"] . '"></TD>';
} else {
echo '<TD>--</TD>';
}
if ($_SESSION ["day3_slot6"] > 0) {
echo '<TD><INPUT TYPE="submit" class="btn btn-default" NAME="THU_S6" VALUE="' . $_SESSION ["day3_slot6"] . '"></TD>';
} else {
echo '<TD>--</TD>';
}
?>
</TR>
</TABLE>
</div>
</FORM>
</div>
</div> <!-- content -->
<?php include("includes/footer.html"); ?>
</body>
</html><file_sep><?php
if(!mysql_connect("localhost","hiltonj","smile")) {
die('oops connection problem ! --> '.mysql_error());
}
if(!mysql_select_db("test")) {
die('oops database selection problem ! --> '.mysql_error());
}
function findTeacherName($id) {
console.log($id);
$res=mysql_query("SELECT first_name, last_name FROM teacher WHERE id='$id'");
if (mysql_num_rows($res)!=0) {
$row=mysql_fetch_array($res);
$fname = $row['first_name'];
console.log(fname);
$lname = $row['last_name'];
console.log(lname);
return $lname + ', ' + $fname;
}
}
/*
$db_host = "localhost";
$db_name = "test";
$db_user = "animol";
$db_password = "<PASSWORD>";
try {
$conn = new PDO ( "mysql:host=$db_host;dbname=$db_name", $db_user, $db_password );
// set the PDO error mode to exception
$conn->setAttribute ( PDO::ATTR_ERRMODE, PDO::ERRMODE_EXCEPTION );
// echo "Connected successfully";
} catch ( PDOException $e ) {
// echo "Connection failed: " . $e->getMessage();
}*/
function validDate($var) {
$today = date ( "Y-m-d" );
if ($today > date ( "Y-m-d", $var )) {
return false;
} else {
return true;
}
}
?> | 1a4af0147589454bfdf49d32c20eae3082db0dde | [
"PHP"
] | 19 | PHP | chrismathew87/test | d674ea8dfc41ca8760d83d6f411f2dacdcba5865 | f8d8cdbc835a10d46365d24c0e6527fb6acb59ed |
refs/heads/master | <repo_name>tmartin42/MusicRoomWebApp<file_sep>/src/main.js
import Vue from 'vue'
import VueAxios from 'vue-axios'
import VueAuthenticate from 'vue-authenticate'
import App from './App.vue'
import router from './router'
import './registerServiceWorker'
import axios from 'axios';
Vue.config.productionTip = false
new Vue({
router,
render: h => h(App),
methods: {
authenticate: function (provider) {
this.$auth.authenticate(provider).then(function () {
console.log('lol');
// Execute application logic after successful social authentication
})
}
}
}).$mount('#app')
console.log(process.env.VUE_APP_SECRET);
Vue.use(VueAxios, axios)
Vue.use(VueAuthenticate, {
baseUrl: 'http://localhost:8080/#/test/', // Your API domain
providers: {
google: {
clientId: process.env.VUE_APP_GOOGLE_ID,
redirectUri: 'http://localhost:8080/about' // Your client app URL
},
oauth2: {
name: "oauth2",
clientId: process.env.VUE_APP_DEEZER_ID,
redirectUri: 'http://localhost:8080/about', // Your client app URL
authorizationEndpoint: "https://connect.deezer.com/oauth/auth.php",
display: 'popup',
oauthType: '2.0',
popupOptions: { width: 500, height: 560 }
}
}
})<file_sep>/src/router.js
import Vue from 'vue'
import Router from 'vue-router'
import Home from './views/Home.vue'
import About from './views/About.vue'
import test from './views/Res.vue'
Vue.use(Router);
let rout = new Router({
routes: [
{
path: '/',
name: 'home',
component: Home,
requiresAuth: false,
props: { auth: false }
},
{
path: '/about',
name: 'about',
component: About,
requiresAuth: true,
props: { auth: true }
},
{
path: '/test/auth/google',
name: 'test',
component: test,
requiresAuth: false,
props: { auth: false }
}
]
});
export default rout;
/*
rout.beforeEach((to, from, next) => {
const requiresAuth = to.matched.some((e) => {return (e.props.default.auth === true)});
console.log(requiresAuth);
console.log("req: ", to.matched.some((e) => {return (e.props.default.auth === true)}));
if(requiresAuth) {
next('/');
} else {
console.log("what");
next();
}
});*/ | a9ce29bbb1010e86e33ca8987fedbac795e9d3f4 | [
"JavaScript"
] | 2 | JavaScript | tmartin42/MusicRoomWebApp | 2d847e7cea526377f9ea295a08eecf9c5fd2836c | 955b896e61d8c50dc16794c427133146744de02a |
refs/heads/master | <repo_name>SvenMeyer/ReduxSimpleStarter<file_sep>/src/index.js
import React, {Component} from 'react';
import ReactDOM from 'react-dom';
import InputField from './components/input_cc.js'; // .js is optional
import ListVehicles from './components/list_vehicles.js'; // .js is optional
const API_KEY = 'ABC'; // TO-DO
// This is a class / a type component - not an instance !
// const App = function () {
class App extends Component {
constructor(props) {
super(props);
const v1 = ['911','1234','5678','NA','0x1234','0x5678'];
this.state = { vehicles: [v1] };
// get list of vehicles *TO-DO*
// call-back function : (data) => { this.setState({ vehicles: data }) };
}
render () {
return (
<div>
<h1>Vehicle History Log</h1>
<InputField />
<p />
<ListVehicles vehicles={this.state.vehicles} />
</div>
);
}
}
// App is the class
// <App /> is an instance of type App > React.createElement(App, null);
ReactDOM.render(<App />, document.querySelector('.container'));
<file_sep>/src/components/input_cc.js
// input.js
import React, { Component } from 'react';
class InputField extends Component {
constructor(props) {
super(props);
this.state = {term: 'Ox'};
// https://stackoverflow.com/questions/32317154/react-uncaught-typeerror-cannot-read-property-setstate-of-undefined
// this.onInputChange = this.onInputChange.bind(this);
}
render() {
return (
<div>
<input
value={this.state.term}
onChange={this.onInputChange} />
<br />
input value : {this.state.term}
</div>
);
// return (<input onChange={event => console.log(event.target.value)} />);
}
// https://stackoverflow.com/questions/32317154/react-uncaught-typeerror-cannot-read-property-setstate-of-undefined
// onInputChange(event) {
onInputChange = (event) => {
console.log('input field : event.target.value = ');
console.log(event.target.value);
this.setState({term: event.target.value});
}
}
export default InputField;
<file_sep>/src/components/list_vehicles.js
// list.js
import React from 'react';
import ListItemVehicle from './list_item_vehicle';
// props contains a list of vehicles
const ListVehicles = (props) => {
const header = { name: 'name', vin: 'VIN', ein: 'EIN', document: 'document', owner: 'owner', service: 'service' };
const listItems = props.vehicles.map((vehicle) => {
return (<ListItemVehicle vehicle={vehicle} />);
});
return (
<div>
<style>{'table,th,td{border:1px solid black;}'}</style>
<style>{'table{width: 100%;}'}</style>
<table>
<tr>
<th>name</th>
<th>VIN</th>
<th>EIN</th>
<th>document</th>
<th>owner</th>
<th>service</th>
</tr>
{listItems}
</table>
<br />
length of list = { props.vehicles.length }
</div>
/*
<ul className='col-md-4 list-group'>
{listItems}
</ul>
<p />
length of list = {props.vehicles.length}
*/
);
};
export default ListVehicles;
<file_sep>/src/components/list_item_vehicle.js
// list_item_vehicle.js
import React from 'react';
const ListItemVehicle = (props) => {
// const cells = props.video.map()
console.log(props);
return (
<tr>{props.vehicle.map((cell) => { return(<td> {cell} </td>); })}</tr>
);
};
export default ListItemVehicle; <file_sep>/src/components/list_item_log_entry.js
// list_item_log_entry.js<file_sep>/src/components/list.js
// list.js
import React from 'react';
const List = () => {
return (
<ul className='col-md-4 list-group'>
</ul>
);
};
export default List;
| aaacfde87cf1c1a457a5a7bccb4162c45b85fe8b | [
"JavaScript"
] | 6 | JavaScript | SvenMeyer/ReduxSimpleStarter | 6a6ee8efb083f1e9be280b1883e8864f38739104 | 70b5c5178c4b1ae553f0483f1545dd882a092eef |
refs/heads/master | <file_sep>CREATE TABLE users (
id serial primary key,
name character varying (200),
email character varying (200),
password character varying (300)
)
CREATE TABLE flutters (
userid integer,
postdate DATE NOT NULL DEFAULT CURRENT_DATE,
body VARCHAR (255) NOT NULL,
postid serial primary key
)<file_sep>// Get /users => JSON list of all registered users
// post /users => takes some JSON creates new user in database.
// ger /users/id => spit out JSON object of user info.
// get /users/id/squabs => JSON list of squabs by this user
// get /squabs => JSON list of all squabs by all users
// get /squabs/id => JSON object of specific squabs
// post /squabs => takes some json and create a new squab
const q = require('./dataQueries');
let users = [
{id: '1', name: 'Jaydoe', email: '<EMAIL>', pw: 'shimmy'},
{id: '2', name: 'XXXTentacion', email:'<EMAIL>', pw: 'dondon'}
]
let flutters = [
{userid: '1', id: '1', body: 'I like coffee'},
{userid: '2', id: '2', body: `I'm not dead yet`},
{userid: '1', id: '3', body: 'I like coffee'},
{userid: '2', id: '4', body: `I'm not dead yet`}
]
const express = require('express');
let ex = express();
ex.listen(3000);
let authenticate = (req, res, next) => {
let currentUser = users.find((user) => req.query.ident === user.id)
console.log(currentUser);
console.log(req.query)
if (currentUser && req.query.pass === currentUser.pw) {
next();
} else {
res.end('You shall not pass');
}
}
let getUsers = (req, res) => {
q.listAllUsers()
.then(results => res.send(results))
}
let allFlutters = (req, res) => {
q.listAllFlutters()
.then(results => res.send(results))
}
let fluttersByUser = (req, res) => {
let userN = req.params.username;
q.allFluttersByUser(userN)
.then(results => res.send(results))
}
let specificUserPost = (req, res) => {
let userN = req.params.username;
let postID = req.params.postid;
console.log(req.params)
q.oneFlutterByUser(userN, postID)
.then(results => res.send(results))
}
ex.get('/users', authenticate, getUsers);
ex.get('/flutters', allFlutters);
ex.get('/users/:username/flutters', authenticate, fluttersByUser);
ex.get('/users/:username/:postid', authenticate, specificUserPost)
//ex.post(`/users/:username/flutters`), authenticate,
<file_sep>
var myInit = {
method: 'GET',
mode: 'no-cors',
cache: 'default',
// headers: {
// 'Access-Control-Allow-Origin':'*'
// },
body: null,
};
//const myReq = new Request('https://localhost:3000/flutters', myInit)
//let requesturl = 'http://localhost:3000/flutters'
fetch('http://localhost:3000/flutters', myInit)
.then(response => {console.log(response)
if (response.status === 200) {
console.log(response);
} else {
console.log('still failing...');
throw new Error('Did not retrieve info...')
}
})
.then(response => {
console.debug(response);
}).catch(error => {
console.error(error);
});<file_sep>INSERT INTO users (name, email, password)
VALUES ('jaydoe', '<EMAIL>', '<PASSWORD>');
INSERT INTO users (name, email, password)
VALUES ('xxxtentacion', '<EMAIL>', '<PASSWORD>');
INSERT INTO flutters (userid, body)
VALUES ('2', 'Im not dead');
INSERT INTO flutters (userid, body)
VALUES ('2', 'I like coffee'); | 16051c2844e3fc0a28f2b081b7300ca0e44abc6c | [
"JavaScript",
"SQL"
] | 4 | SQL | marberrym/flutter | 9dc989fe7b481889023f6a4e9994f8911af36278 | 58af946a53df56e6313f90b49d20493bece35ea4 |
refs/heads/master | <file_sep>console.log("wooow");
| 79404fd32967e2a155aca7274e7199f62c5ee978 | [
"JavaScript"
] | 1 | JavaScript | GaneshSansoa/demo | d422e04cc2749244ae7f55713683e7ce54073882 | 1aefe69d78eb539c7fd46a3b8104eb7dfec28351 |
refs/heads/master | <repo_name>andreneves7/AppHuntig<file_sep>/app/src/main/java/com/example/app/OrgActivity.kt
package com.example.app
import android.content.Intent
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import android.provider.AlarmClock
import android.util.Log
import android.view.Menu
import android.view.MenuItem
import android.view.View
import android.widget.AdapterView
import android.widget.ArrayAdapter
import androidx.core.view.isVisible
import com.google.firebase.auth.FirebaseAuth
import com.google.firebase.database.*
import com.google.firebase.firestore.FirebaseFirestore
import kotlinx.android.synthetic.main.activity_org.*
import kotlinx.android.synthetic.main.activity_ver_grupo.*
class OrgActivity : AppCompatActivity() {
//val mAuth = FirebaseFirestore.getInstance()
val mAuth = FirebaseDatabase.getInstance()
val Auth = FirebaseAuth.getInstance()
lateinit var gv: VariaveisGlobais
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
gv = application as VariaveisGlobais
setContentView(R.layout.activity_org)
val semGrupos = tNaoGrupos2
val list = ListView3
val user = Auth.currentUser?.uid
semGrupos.isVisible = true
if (user != null) {
val mail = mAuth.getReference("Grupos")
val values = ArrayList<String>()
val valor = ArrayList<String>()
val j = object : ChildEventListener {
override fun onChildAdded(dataSnapshot: DataSnapshot, previousChildName: String?) {
val g = dataSnapshot.child("nome").getValue().toString()
val admin = dataSnapshot.child("admin").getValue().toString()
Log.d(
"VerGrupo2",
"${user}"
)
values.add(g)
val m = mAuth.getReference("Grupos").child(g)
Log.d(
"VerGrupo2",
" ${m}"
)
val t = mAuth.getReference("Grupos").child(g)
if (admin == user) {
m.addValueEventListener(object : ValueEventListener {
override fun onDataChange(snapshot: DataSnapshot) {
val n =
snapshot.child("Numero").getValue()
Log.d(
"VerGrupo",
" grupos deste user"
)
Log.d(
"VerGrupo2",
" n: ${n}"
)
semGrupos.isVisible = false
gv = application as VariaveisGlobais
valor.add(g)
val adapter =
ArrayAdapter(
this@OrgActivity,
R.layout.listview_item,
valor
)
list.adapter = adapter
list.onItemClickListener =
object : AdapterView.OnItemClickListener {
override fun onItemClick(
parent: AdapterView<*>, view: View,
position: Int, id: Long
) {
val itemValue =
list.getItemAtPosition(position) as String
val message = n.toString()
startActivity(
Intent(
view.context,
CriarOrgEventoActivity::class.java
).apply {
putExtra(
AlarmClock.EXTRA_MESSAGE,
message
)
}
)
Log.d(
"VerGrupo2",
"messagem: $message"
)
}
}
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
})
} else {
semGrupos.isVisible = true
}
}
override fun onChildChanged(snapshot: DataSnapshot, previousChildName: String?) {
startActivity(Intent(this@OrgActivity, OrgActivity::class.java))
}
override fun onChildRemoved(snapshot: DataSnapshot) {
TODO("Not yet implemented")
}
override fun onChildMoved(snapshot: DataSnapshot, previousChildName: String?) {
TODO("Not yet implemented")
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
}
mail.addChildEventListener(j)
} else {
Log.d("VerGrupo", "No such document")
}
}
override fun onCreateOptionsMenu(menu: Menu?): Boolean {
val inflater = menuInflater
inflater.inflate(R.menu.menu_direita_org, menu)
return true
}
override fun onOptionsItemSelected(item: MenuItem?): Boolean {
if (item!!.itemId == R.id.signOut2) {
Auth.signOut()
val intent = Intent(this, LoginActivity::class.java)
intent.flags = Intent.FLAG_ACTIVITY_CLEAR_TASK.or(Intent.FLAG_ACTIVITY_NEW_TASK)
startActivity(intent)
}
if (item.itemId == R.id.grupo2) {
startActivity(Intent(this, OrgActivity::class.java))
}
return super.onOptionsItemSelected(item)
}
}
<file_sep>/app/src/main/java/com/example/app/CriarOrgEventoActivity.kt
package com.example.app
import android.content.Intent
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import android.provider.AlarmClock
import android.provider.AlarmClock.EXTRA_MESSAGE
import android.util.Log
import android.view.Menu
import android.view.MenuItem
import android.view.View
import android.widget.AdapterView
import android.widget.ArrayAdapter
import androidx.core.view.isInvisible
import androidx.core.view.isVisible
import com.google.firebase.auth.FirebaseAuth
import com.google.firebase.database.*
import kotlinx.android.synthetic.main.activity_criar_org_evento.*
import kotlinx.android.synthetic.main.activity_grupo.*
class CriarOrgEventoActivity : AppCompatActivity() {
lateinit var gv: VariaveisGlobais
val Auth = FirebaseAuth.getInstance()
//val mAuth = FirebaseFirestore.getInstance()
val mAuth = FirebaseDatabase.getInstance()
var numero = 0
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
gv = application as VariaveisGlobais
setContentView(R.layout.activity_criar_org_evento)
val evento = bEvento
val soc = bSocios
busca()
evento.setOnClickListener {
Log.d("Numero", "ola = $numero")
startActivity(Intent(this, EventoActivity::class.java).apply {
putExtra(
EXTRA_MESSAGE,
numero.toString()
)
})
}
soc.setOnClickListener {
Log.d("Numero", "ola = $numero")
startActivity(Intent(this, AdmissaoActivity::class.java).apply {
putExtra(
EXTRA_MESSAGE,
numero.toString()
)
})
}
}
fun busca() {
val semEventos = tNaoEventos2
semEventos.isInvisible = true
val user = Auth.currentUser
if (user != null) {
val valu = ArrayList<String>()
val t = intent.getStringExtra(AlarmClock.EXTRA_MESSAGE).toInt()
val mail = mAuth.getReference("Eventos")
val m = object : ChildEventListener {
override fun onChildAdded(dataSnapshot: DataSnapshot, previousChildName: String?) {
val nome = dataSnapshot.child("nome").getValue().toString()
mAuth.getReference("Eventos").child(nome)
.addListenerForSingleValueEvent(object : ValueEventListener {
override fun onDataChange(snapshot: DataSnapshot) {
val refe =
snapshot.child("numeroGrupo").getValue().toString().toInt()
Log.d(
"Grupo",
" refe $refe"
)
if (refe == t) {
Log.d(
"Grupo",
" grupos deste user"
)
numero = refe
semEventos.isVisible = false
valu.add(snapshot.child("nome").getValue().toString())
val adapter = ArrayAdapter(
this@CriarOrgEventoActivity,
R.layout.listview_item,
valu
)
val lista = ListView4
lista.adapter = adapter
lista.onItemClickListener =
object : AdapterView.OnItemClickListener {
override fun onItemClick(
parent: AdapterView<*>, view: View,
position: Int, id: Long
) {
val itemValue = lista.getItemAtPosition(position)
gv.detalhes = itemValue as String
Log.d(
"Grupo",
"ffff :$itemValue"
)
var eve = mAuth.getReference("Eventos")
.child(itemValue.toString())
eve.addValueEventListener(object :
ValueEventListener {
override fun onDataChange(snapshot: DataSnapshot) {
startActivity(
Intent(
view.context,
DetalhesEventoActivity::class.java
)
)
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
})
// Toast.makeText(
// this@GrupoActivity,
// "Position :$position\nItem Value : $itemValue",
// Toast.LENGTH_LONG
// )
// .show()
}
}
} else {
Log.d(
"Grupo",
" sem grupos deste user ${valu.size}"
)
if (valu.size == 0) {
semEventos.isVisible = true
}
}
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
})
}
override fun onChildChanged(snapshot: DataSnapshot, previousChildName: String?) {
TODO("Not yet implemented")
}
override fun onChildRemoved(snapshot: DataSnapshot) {
TODO("Not yet implemented")
}
override fun onChildMoved(snapshot: DataSnapshot, previousChildName: String?) {
TODO("Not yet implemented")
}
override fun onCancelled(error: DatabaseError) {
Log.d("Grupo", "No such document")
}
}
mail.addChildEventListener(m)
}
}
override fun onCreateOptionsMenu(menu: Menu?): Boolean {
val inflater = menuInflater
inflater.inflate(R.menu.menu_direita_org, menu)
return true
}
override fun onOptionsItemSelected(item: MenuItem?): Boolean {
if (item!!.itemId == R.id.signOut2) {
Auth.signOut()
val intent = Intent(this, LoginActivity::class.java)
intent.flags = Intent.FLAG_ACTIVITY_CLEAR_TASK.or(Intent.FLAG_ACTIVITY_NEW_TASK)
startActivity(intent)
}
if (item.itemId == R.id.grupo2) {
startActivity(Intent(this, OrgActivity::class.java))
}
if (item.itemId == R.id.lista) {
startActivity(Intent(this, ListaSociosOrgActivity::class.java).apply {
putExtra(
EXTRA_MESSAGE,
numero.toString()
)
})
}
return super.onOptionsItemSelected(item)
}
}<file_sep>/app/src/main/java/com/example/app/ProfileActivity.kt
package com.example.app
import android.app.Activity
import android.app.AlertDialog
import android.content.Intent
import android.net.Uri
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import android.provider.AlarmClock
import android.provider.MediaStore
import android.text.method.PasswordTransformationMethod
import android.util.Log
import android.view.Menu
import android.view.MenuItem
import android.view.View
import android.widget.ImageView
import android.widget.Toast
import com.bumptech.glide.Glide
import com.google.firebase.auth.FirebaseAuth
import com.google.firebase.database.DataSnapshot
import com.google.firebase.database.DatabaseError
import com.google.firebase.database.FirebaseDatabase
import com.google.firebase.database.ValueEventListener
import com.google.firebase.firestore.FirebaseFirestore
import com.google.firebase.storage.FirebaseStorage
import kotlinx.android.synthetic.main.activity_profile.*
import kotlinx.android.synthetic.main.email_custom_view.view.*
import kotlinx.android.synthetic.main.pass_custom_view.view.showPass
import kotlinx.android.synthetic.main.custom_view.view.*
import kotlinx.android.synthetic.main.pass_custom_view.view.*
import java.util.*
@Suppress("DEPRECATION")
class ProfileActivity : AppCompatActivity() {
val Auth = FirebaseAuth.getInstance()
val mAuth = FirebaseDatabase.getInstance();
val mStorage = FirebaseStorage.getInstance()
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_profile)
val editar = bEdit
val editarPass = bEditPass
// verificarImagem()
editarPass.setOnClickListener {
showAlertPass()
}
editar.setOnClickListener(View.OnClickListener {
showAlertEmail()
})
// bFotoAdd.setOnClickListener {
//
// uploadImageToFirebaseStorage()
// verImagem()
//
//
//
// }
// bAdd.setOnClickListener {
// Log.d("Profile", "Try to show photo selector")
// val intent = Intent(Intent.ACTION_PICK)
// intent.type = "image/*"
// startActivityForResult(intent, 0)
// }
old()
}
//
// private fun verificarImagem() {
// val imageUser = Auth.currentUser?.uid.toString()
// val consulta = mAuth.collection("Users").document(imageUser)
// consulta.get().addOnSuccessListener { task ->
// if (task != null) {
// Log.d("Profile", "imagem1: $imageUser")
//
// val image = task.data?.get("Photo").toString()
// if (image != null) {
// Log.d("Profile", "imagem2: $image")
// //val m = mStorage.getReference(image)
// //Log.d("Profile", "imagem3: $m")
// val imageView = findViewById<ImageView>(R.id.imageViewUser)
// Glide.with(this/*context*/).load(image).into(imageView)
// }
// }
// }
// }
// private fun verImagem() {
// val imageUser = Auth.currentUser?.uid.toString()
//
// val consulta = mAuth.collection("Users").document(imageUser)
// consulta.get().addOnSuccessListener { task ->
// if (task != null) {
// Log.d("Profile", "imagem1: $imageUser")
//
// val image = task.data?.get("Photo").toString()
// Log.d("Profile", "imagem2: $image")
// //val m = mStorage.getReference(image)
// //Log.d("Profile", "imagem3: $m")
// val imageView = findViewById<ImageView>(R.id.imageViewUser)
// Glide.with(this/*context*/).load(image).into(imageView)
// }
// }
// }
private fun old() {
val show = textView
val user = Auth.currentUser
val uid = Auth.currentUser?.uid.toString()
val userEmail = Auth.currentUser?.email
val mail = mAuth.getReference("Users").child(uid)
// buscar nome ao firebase realtime do user
if (user != null) {
mail.addValueEventListener(
object : ValueEventListener {
override fun onDataChange(snapshot: DataSnapshot) {
val n = snapshot.child("name").getValue().toString()
Log.d("Profile", "valor nome = $n")
show.text = "email: $userEmail\nname: $n"
}
override fun onCancelled(databaseError: DatabaseError) {
}
})
}
}
private fun showAlertLogin() {
val inflater = layoutInflater
val inflate_view = inflater.inflate(R.layout.custom_view, null)
val userEmailEdt = inflate_view.userEmail
val userPassEdt = inflate_view.userPass
val checkBoxTooggle = inflate_view.showPass
checkBoxTooggle.setOnCheckedChangeListener { buttonView, isChecked ->
if (!isChecked) {
userPassEdt.transformationMethod = PasswordTransformationMethod.getInstance()
} else {
userPassEdt.transformationMethod = null
}
}
val alertDialog = AlertDialog.Builder(this)
alertDialog.setTitle("Login novamente")
alertDialog.setView(inflate_view)
alertDialog.setCancelable(false)
alertDialog.setNegativeButton("Cancel") { dialog, which ->
Toast.makeText(this, "Cancel", Toast.LENGTH_LONG).show()
}
alertDialog.setPositiveButton("Done") { dialog, which ->
val email = userEmailEdt.text.toString()
val password = <PASSWORD>.<PASSWORD>()
if (Auth.currentUser!!.isEmailVerified) {
Auth.signInWithEmailAndPassword(email, password)
.addOnCompleteListener { task4 ->
if (task4.isSuccessful) {
Toast.makeText(this, "Successfully Re-Logged :)", Toast.LENGTH_LONG).show()
Log.d("Profile", "user re-logged ${Auth.currentUser?.uid}")
} else {
Toast.makeText(this, "Erro Re-Logged :)", Toast.LENGTH_LONG).show()
showAlertLogin()
}
Log.d("Profile", "done botao")
}
}
}
val dialog = alertDialog.create()
dialog.show()
}
private fun showAlertEmail() {
val inflater = layoutInflater
val inflate_view = inflater.inflate(R.layout.email_custom_view, null)
val userEmailEdt = inflate_view.userNewEmail
val alertDialog = AlertDialog.Builder(this)
alertDialog.setTitle("New Email")
alertDialog.setView(inflate_view)
alertDialog.setCancelable(false)
alertDialog.setNegativeButton("Cancel") { dialog, which ->
Toast.makeText(this, "Cancel", Toast.LENGTH_LONG).show()
}
alertDialog.setPositiveButton("Done") { dialog, which ->
val user = FirebaseAuth.getInstance().currentUser
val userEmail = userEmailEdt.text.toString()
val uid = user?.uid.toString()
val mail = mAuth.getReference("Users").child(uid)
if (user != null) {
if (!userEmail.isEmpty()) {
user.updateEmail(userEmail).addOnCompleteListener { task2 ->
if (task2.isSuccessful) {
mail.child("email").setValue( userEmail)
Toast.makeText(this, "Update email Success", Toast.LENGTH_LONG).show()
Log.d("Profile", "email update auth")
old()
} else {
Toast.makeText(
this,
"Error email Update re-loggin try aggain",
Toast.LENGTH_LONG
).show()
Log.d("Profile", "email erro auth")
showAlertLogin()
}
sendEmailVerification()
}
}
}
Log.d("Profile", "done botao")
}
val dialog = alertDialog.create()
dialog.show()
}
private fun showAlertPass() {
val inflater = layoutInflater
val inflate_view = inflater.inflate(R.layout.pass_custom_view, null)
val userPassEdt = inflate_view.userNewPass
val userConfPassEdt = inflate_view.userConfPass
val checkBoxTooggle = inflate_view.showPass
checkBoxTooggle.setOnCheckedChangeListener { buttonView, isChecked ->
if (!isChecked) {
userPassEdt.transformationMethod = PasswordTransformationMethod.getInstance()
userConfPassEdt.transformationMethod = PasswordTransformationMethod.getInstance()
} else {
userPassEdt.transformationMethod = null
userConfPassEdt.transformationMethod = null
}
}
val alertDialog = AlertDialog.Builder(this)
alertDialog.setTitle("New Password")
alertDialog.setView(inflate_view)
alertDialog.setCancelable(false)
alertDialog.setNegativeButton("Cancel") { dialog, which ->
Toast.makeText(this, "Cancel", Toast.LENGTH_LONG).show()
}
alertDialog.setPositiveButton("Done") { dialog, which ->
val user = FirebaseAuth.getInstance().currentUser
val userPassword = userPassEdt.text.toString()
val userConf = userConfPassEdt.text.toString()
if (!userPassword.isEmpty() && !userConf.isEmpty()) {
if (userConf == userPassword) {
user?.updatePassword(userPassword)?.addOnCompleteListener { task3 ->
if (task3.isSuccessful) {
Toast.makeText(this, "Update password Success", Toast.LENGTH_LONG)
.show()
Log.d("Profile", "password auth")
} else {
Toast.makeText(this, "Error password Update", Toast.LENGTH_LONG).show()
showAlertLogin()
}
}
} else {
Toast.makeText(this, "Password nao coincidem", Toast.LENGTH_LONG).show()
showAlertPass()
}
} else {
Toast.makeText(this, "Campos nao preenchidos", Toast.LENGTH_LONG).show()
showAlertPass()
}
Log.d("Profile", "done botao")
}
val dialog = alertDialog.create()
dialog.show()
}
// var selectedPhotoUri: Uri? = null
// override fun onActivityResult(requestCode: Int, resultCode: Int, data: Intent?) {
// super.onActivityResult(requestCode, resultCode, data)
//
// if (requestCode == 0 && resultCode == Activity.RESULT_OK && data != null) {
// Log.d("Profile", "Photo was selected")
//
// selectedPhotoUri = data.data
//
// val bitmap = MediaStore.Images.Media.getBitmap(contentResolver, selectedPhotoUri)
//
// vImg.setImageBitmap(bitmap)
//
// bAdd.alpha = 0f
//
// }
// }
// private fun uploadImageToFirebaseStorage() {
// if (selectedPhotoUri == null) return
//
// val filename = UUID.randomUUID().toString()
// val ref = mStorage.getReference("/images/$filename")
// val user = Auth.currentUser
//
//
//
// ref.putFile(selectedPhotoUri!!)
// .addOnSuccessListener {
// Log.d("Profile", "Successfully upload image: ${it.metadata?.path}")
//
// ref.downloadUrl.addOnSuccessListener {
// Log.d("Profile", "File localition: $it")
//
// val p = it.toString()
//
// if (user != null) {
// val pessoa = HashMap<String, Any>()
// pessoa["Photo"] = p
// mAuth.collection("Users").document(user.uid).update(pessoa)
// Toast.makeText(this, "Imagem guardada", Toast.LENGTH_LONG).show()
// verImagem()
// }
//
// }
//
// }
//
// }
override fun onCreateOptionsMenu(menu: Menu?): Boolean {
val inflater = menuInflater
inflater.inflate(R.menu.menu_direita, menu)
return true
}
override fun onOptionsItemSelected(item: MenuItem?): Boolean {
if (item!!.itemId == R.id.signOut) {
Auth.signOut()
val intent = Intent(this, LoginActivity::class.java)
intent.flags = Intent.FLAG_ACTIVITY_CLEAR_TASK.or(Intent.FLAG_ACTIVITY_NEW_TASK)
startActivity(intent)
}
if (item.itemId == R.id.profile) {
startActivity(Intent(this, ProfileActivity::class.java))
}
if (item.itemId == R.id.grupo) {
startActivity(Intent(this,VerGrupoActivity::class.java))
}
if (item.itemId == R.id.Lis) {
startActivity(Intent(this, ListaGruposActivity::class.java))
}
if (item.itemId == R.id.home) {
val marca = 0
val intent = Intent(this, FiltrosActivity::class.java).apply {
putExtra(AlarmClock.EXTRA_MESSAGE, marca)
}
startActivity(intent)
}
return super.onOptionsItemSelected(item)
}
private fun sendEmailVerification() {
val user = Auth.currentUser
user?.sendEmailVerification()?.addOnCompleteListener {
val builder = AlertDialog.Builder(this)
builder.setTitle("Email Verfication")
builder.setMessage("Please confirm email")
//builder.setPositiveButton("OK", DialogInterface.OnClickListener(function = x))
builder.setPositiveButton(android.R.string.yes) { dialog, which ->
Toast.makeText(
applicationContext,
android.R.string.yes, Toast.LENGTH_SHORT
).show()
startActivity(Intent(this, LoginActivity::class.java))
}
}
}
}
<file_sep>/app/src/main/java/com/example/app/MyListAdapter_ListaSocios.kt
package com.example.app
import android.content.Context
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import android.widget.ArrayAdapter
import android.widget.TextView
class MyListAdapter_ListaSocios (private var mCtx: Context,private var resource: Int,private var items: ArrayList<Model>) :
ArrayAdapter<Model>(mCtx, resource, items){
override fun getView(position: Int, convertView: View?, parent: ViewGroup): View {
val layoutInflater: LayoutInflater = LayoutInflater.from(mCtx)
val view: View = layoutInflater.inflate(resource, null)
val nome: TextView = view.findViewById(R.id.labelNome)
val numeroSocio: TextView = view.findViewById(R.id.labelNumeroSocio)
val person: Model = getItem(position)
// nome.text = "Nome: " + person.Nome
nome.text = mCtx.getString(R.string.nome, person.Nome)
numeroSocio.text = mCtx.getString(R.string.num_socio, person.NumeroSocio.toString())
return view
}
override fun getItem(position: Int): Model {
return items[position]
}
}<file_sep>/app/src/main/java/com/example/app/MyListAdapter.kt
package com.example.app
import android.content.Context
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import android.widget.ArrayAdapter
import android.widget.TextView
class MyListAdapter( private var mCtx: Context, private var resource: Int, private var items: ArrayList<Model>) :
ArrayAdapter<Model>(mCtx, resource, items) {
override fun getView(position: Int, convertView: View?, parent: ViewGroup): View {
val layoutInflater: LayoutInflater = LayoutInflater.from(mCtx)
val view: View = layoutInflater.inflate(resource, null)
val nome: TextView = view.findViewById(R.id.labelNome)
val numeroCC: TextView = view.findViewById(R.id.labelNumeroCC)
val numeroSocio: TextView = view.findViewById(R.id.labelNumeroSocio)
val person: Model = getItem(position)
// nome.text = "Nome: " + person.Nome
nome.text = mCtx.getString(R.string.nome,person.Nome)
// numeroCC.text = "Nº Carta Caçador: " + person.NumeroCC.toString()
numeroCC.text = mCtx.getString(R.string.carta_cacador, person.NumeroCC.toString())
// numeroSocio.text = "Nº Socio: " + person.NumeroSocio.toString()
numeroSocio.text = mCtx.getString(R.string.num_socio, person.NumeroSocio.toString())
var a = person.uid
return view
}
override fun getItem(position: Int): Model {
return items[position]
}
}<file_sep>/app/src/main/java/com/example/app/VerificarLoginActivity.kt
package com.example.app
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import com.google.firebase.auth.FirebaseAuth
import org.jetbrains.anko.startActivity
import android.content.Intent
import android.provider.AlarmClock
import com.google.firebase.database.DataSnapshot
import com.google.firebase.database.DatabaseError
import com.google.firebase.database.FirebaseDatabase
import com.google.firebase.database.ValueEventListener
class VerificarLoginActivity : AppCompatActivity() {
private val a = FirebaseAuth.getInstance().currentUser
private val b = FirebaseDatabase.getInstance()
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
if (a == null) {
startActivity<LoginActivity>()
} else {
val c = b.getReference("Users").child(a.uid)
c.addListenerForSingleValueEvent(object : ValueEventListener {
override fun onDataChange(dataSnapshot: DataSnapshot) {
val org = dataSnapshot.child("Org").value
if (org == false) {
val marca = 0
val intent = Intent(this@VerificarLoginActivity, FiltrosActivity::class.java).apply {
putExtra(AlarmClock.EXTRA_MESSAGE, marca)
}
startActivity(intent)
} else startActivity<OrgActivity>()
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
})
}
finish()
}
}
<file_sep>/app/src/main/java/com/example/app/LoginActivity.kt
package com.example.app
import android.content.Intent
import android.os.Bundle
import android.provider.AlarmClock
import android.util.Log
import android.view.Menu
import android.view.MenuItem
import android.view.View
import android.widget.Toast
import androidx.appcompat.app.AppCompatActivity
import com.google.firebase.auth.FirebaseAuth
import com.google.firebase.database.*
import kotlinx.android.synthetic.main.activity_main.*
import java.util.*
class LoginActivity : AppCompatActivity() {
// val mAuth = FirebaseFirestore.getInstance()
val mAuth = FirebaseDatabase.getInstance()
val Auth = FirebaseAuth.getInstance()
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
val loginBtn = bLogin
val loginOrg = bLoginOrg
val regTxt = bRegisto
loginOrg.setVisibility(View.INVISIBLE)
loginBtn.setOnClickListener(View.OnClickListener { view ->
login()
})
regTxt.setOnClickListener(View.OnClickListener { view ->
register()
})
loginOrg.setOnClickListener(View.OnClickListener { view ->
loginOrg()
})
}
fun loginOrg() {
val emailTxt = idEmail
val passwordTxt = <PASSWORD>
var email = emailTxt.text.toString()
var password = <PASSWORD>.text.toString()
if (!email.isEmpty() && !password.isEmpty()) {
Auth.signInWithEmailAndPassword(email, password).addOnCompleteListener { task ->
if (task.isSuccessful) {
if (Auth.currentUser!!.isEmailVerified) {
val ver = mAuth.getReference("Users").child(Auth.currentUser!!.uid)
ver.addListenerForSingleValueEvent(object : ValueEventListener {
override fun onDataChange(dataSnapshot: DataSnapshot) {
// This method is called once with the initial value and again
// whenever data at this location is updated.
val org = dataSnapshot.child("Org").getValue()
if (org == true) {
val intent = Intent(this@LoginActivity, OrgActivity::class.java)
intent.flags =
Intent.FLAG_ACTIVITY_CLEAR_TASK.or(Intent.FLAG_ACTIVITY_NEW_TASK)
startActivity(intent)
Toast.makeText(
this@LoginActivity,
"Successfully Logged in :)",
Toast.LENGTH_LONG
)
.show()
Log.d("Login", "user ${Auth.currentUser?.uid}")
} else {
Toast.makeText(
this@LoginActivity,
"So pode entrar com caçador",
Toast.LENGTH_LONG
)
.show()
}
}
override fun onCancelled(error: DatabaseError) {
// Failed to read value
Log.d("Login", "fail dados")
}
})
} else {
Toast.makeText(this, "verifique email", Toast.LENGTH_SHORT).show()
}
} else {
Toast.makeText(this, "Error Logging in :(", Toast.LENGTH_SHORT).show()
}
}
} else {
Toast.makeText(this, "Please fill up the credetianls", Toast.LENGTH_LONG).show()
}
}
private fun login() {
val emailTxt = idEmail
val passwordTxt = idPassword
var email = emailTxt.text.toString()
var password = passwordTxt.text.toString()
val uid = Auth.currentUser?.uid
Log.d("Login", "user ${uid}")
if (!email.isEmpty() && !password.isEmpty()) {
Auth.signInWithEmailAndPassword(email, password)
.addOnCompleteListener { task ->
if (task.isSuccessful) {
if (Auth.currentUser!!.isEmailVerified) {
val ver = mAuth.getReference("Users").child(Auth.currentUser!!.uid)
ver.addListenerForSingleValueEvent(object : ValueEventListener {
override fun onDataChange(dataSnapshot: DataSnapshot) {
// This method is called once with the initial value and again
// whenever data at this location is updated.
val org = dataSnapshot.child("Org").getValue()
val controlo = dataSnapshot.child("Controlo").getValue()
Log.d("Login", "user ${Auth.currentUser?.uid}")
if (controlo == true) {
if (org == false) {
ver.addListenerForSingleValueEvent(object :
ValueEventListener {
override fun onDataChange(first: DataSnapshot) {
val v = first.child("FirstTime").getValue()
Log.d("Login", "user primeira ${v}")
//verifica se a conta esta ser inicializada pela primeira vez
if (v == true) {
val intent =
Intent(
this@LoginActivity,
PreferenciasActivity::class.java
)
intent.flags =
Intent.FLAG_ACTIVITY_CLEAR_TASK.or(
Intent.FLAG_ACTIVITY_NEW_TASK
)
startActivity(intent)
// val p = HashMap<String, Any>()
// p["FirstTime"] = false
// ver.updateChildren(p)
} else {
val marca = 0
val intent =
Intent(
this@LoginActivity,
FiltrosActivity::class.java
).apply {
putExtra(AlarmClock.EXTRA_MESSAGE, marca)
}
intent.flags =
Intent.FLAG_ACTIVITY_CLEAR_TASK.or(
Intent.FLAG_ACTIVITY_NEW_TASK
)
startActivity(intent)
//startActivity(Intent(this, home::class.java))
}
}
override fun onCancelled(error: DatabaseError) {
Log.d("Login", "fail dados")
}
})
} else {
Toast.makeText(
this@LoginActivity,
"Esta fazer login errado mudar para Organização",
Toast.LENGTH_SHORT
).show();
}
} else {
Toast.makeText(
this@LoginActivity,
"Tem esperar pela aprovaçao",
Toast.LENGTH_SHORT
).show();
}
}
override fun onCancelled(error: DatabaseError) {
// Failed to read value
Log.d("Login", "fail dados")
}
})
Toast.makeText(
this,
"Successfully Logged in :)",
Toast.LENGTH_LONG
)
.show()
Log.d("Login", "user ${Auth.currentUser?.uid}")
} else {
Toast.makeText(this, "verifique email", Toast.LENGTH_SHORT).show()
}
} else {
Toast.makeText(this, "Error Logging in :(", Toast.LENGTH_SHORT)
.show()
}
}
} else {
Toast.makeText(this, "Please fill up the credetianls", Toast.LENGTH_LONG).show()
}
}
private fun register() {
val intent = Intent(this, RegistoUserActivity::class.java)
intent.flags = Intent.FLAG_ACTIVITY_CLEAR_TASK.or(Intent.FLAG_ACTIVITY_NEW_TASK)
startActivity(intent)
}
override fun onCreateOptionsMenu(menu: Menu?): Boolean {
val inflater = menuInflater
inflater.inflate(R.menu.menu_direita_login, menu)
return true
}
override fun onOptionsItemSelected(item: MenuItem?): Boolean {
val regTxt = bRegisto
val loginBtn = bLogin
val loginOrg = bLoginOrg
if (item!!.itemId == R.id.Cacador) {
regTxt.setVisibility(View.VISIBLE)
loginBtn.setVisibility(View.VISIBLE)
loginOrg.setVisibility(View.INVISIBLE)
}
if (item.itemId == R.id.Organizacao) {
regTxt.setVisibility(View.INVISIBLE)
loginBtn.setVisibility(View.INVISIBLE)
loginOrg.setVisibility(View.VISIBLE)
}
return super.onOptionsItemSelected(item)
}
}
<file_sep>/app/build.gradle
apply plugin: 'com.android.application'
apply plugin: 'kotlin-android'
apply plugin: 'kotlin-android-extensions'
apply plugin: 'com.google.gms.google-services'
apply plugin: 'com.android.application'
apply plugin: 'com.google.firebase.crashlytics'
androidExtensions {
experimental = true
}
android {
compileSdkVersion 33
defaultConfig {
applicationId "com.company.HuntigEvents"
minSdkVersion 21
targetSdkVersion 33
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
compileOptions {
sourceCompatibility = 1.8
targetCompatibility = 1.8
}
kotlinOptions {
jvmTarget = "1.8"
}
namespace 'com.example.app'
}
dependencies {
implementation platform('com.google.firebase:firebase-bom:26.1.0')
implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
implementation 'androidx.appcompat:appcompat:1.6.1'
implementation 'androidx.core:core-ktx:1.10.1'
implementation 'androidx.constraintlayout:constraintlayout:2.1.4'
implementation 'com.google.firebase:firebase-auth:22.1.0'
implementation 'com.google.firebase:firebase-firestore:24.7.0'
implementation 'com.google.firebase:firebase-storage:20.2.1'
implementation 'com.firebaseui:firebase-ui-storage:4.3.1'
implementation 'de.hdodenhof:circleimageview:3.1.0'
// implementation'com.android.support:appcompat-v7:26.1.0'
//
// implementation'com.android.support:design:26.1.0'
// implementation'com.android.support.constraint:constraint-layout:1.1.2'
// implementation 'com.google.android.material:material:1.1.0'
//
// androidTestImplementation('com.android.support.test.espresso:espresso-core:3.0.2',{
// exclude group:'com.android.support', module:'support-annotations'
// })
implementation 'com.google.android.gms:play-services-maps:18.1.0'
implementation 'com.google.android.gms:play-services-location:21.0.1'
implementation 'com.google.android.libraries.places:places:3.2.0'
// implementation 'com.google.android.gms:play-services-location:17.0.0'
// implementation 'com.google.android.libraries.places:places:2.2.0'
// implementation 'com.android.support:design:28.0.0'
implementation 'com.google.firebase:firebase-database:20.2.2'
implementation 'com.google.firebase:firebase-crashlytics:18.4.0'
// implementation 'com.crashlytics.sdk.android:crashlytics:2.10.1'
implementation 'com.google.firebase:firebase-analytics:21.3.0'
implementation 'com.google.android.material:material:1.9.0'
implementation 'com.google.firebase:firebase-auth-ktx:22.1.0'
testImplementation 'junit:junit:4.13.2'
androidTestImplementation 'androidx.test.ext:junit:1.1.5'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.5.1'
implementation "org.jetbrains.anko:anko:$anko_version"
implementation "org.jetbrains.anko:anko-design:$anko_version"
implementation "org.jetbrains.anko:anko-coroutines:$anko_version"
}
<file_sep>/app/src/main/java/com/example/app/GrupoActivity.kt
package com.example.app
import android.app.Application
import android.content.Intent
import android.os.Bundle
import android.provider.AlarmClock
import android.provider.AlarmClock.EXTRA_MESSAGE
import android.util.Log
import android.view.Menu
import android.view.MenuItem
import android.view.View
import android.widget.AdapterView
import android.widget.ArrayAdapter
import androidx.appcompat.app.AppCompatActivity
import androidx.core.view.isInvisible
import androidx.core.view.isVisible
import com.google.firebase.auth.FirebaseAuth
import com.google.firebase.database.*
import kotlinx.android.synthetic.main.activity_grupo.*
class VariaveisGlobais : Application() {
var Evento: String = ""
var detalhes: String = ""
var entrar: String = ""
var ver: String = ""
var nome: String = ""
var Month: Int = 0
var Day: Int = 0
var Year: Int = 0
var MonthFim: Int = 0
var DayFim: Int = 0
var YearFim: Int = 0
var Lat: Double = 0.0
var Long: Double = 0.0
var check: String = ""
var Horas: String = ""
var privado: String = ""
var extra: String = ""
var Associacao: String = ""
var numSocio: Int = 0
var numEspanha: Int = 0
}
class GrupoActivity : AppCompatActivity() {
lateinit var gv: VariaveisGlobais
val Auth = FirebaseAuth.getInstance()
val mAuth = FirebaseDatabase.getInstance()
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
gv = application as VariaveisGlobais
setContentView(R.layout.activity_grupo)
val semEventos = tNaoEventos
semEventos.isInvisible = true
busca()
}
fun busca() {
val semEventos = tNaoEventos
semEventos.isInvisible = true
val user = Auth.currentUser
if (user != null) {
val valu = ArrayList<String>()
val t = intent.getStringExtra(EXTRA_MESSAGE).toInt()
val mail = mAuth.getReference("Eventos")
val m = object : ChildEventListener {
override fun onChildAdded(dataSnapshot: DataSnapshot, previousChildName: String?) {
val nome = dataSnapshot.child("nome").getValue().toString()
mAuth.getReference("Eventos").child(nome)
.addListenerForSingleValueEvent(object : ValueEventListener {
override fun onDataChange(snapshot: DataSnapshot) {
val refe =
snapshot.child("numeroGrupo").getValue().toString().toInt()
Log.d(
"Grupo",
" refe $refe"
)
if (refe == t) {
Log.d(
"Grupo",
" grupos deste user"
)
semEventos.isVisible = false
valu.add(snapshot.child("nome").getValue().toString())
val adapter = ArrayAdapter(
this@GrupoActivity,
R.layout.listview_item,
valu
)
val lista = ListView3
lista.adapter = adapter
lista.onItemClickListener =
object : AdapterView.OnItemClickListener {
override fun onItemClick(
parent: AdapterView<*>, view: View,
position: Int, id: Long
) {
val itemValue = lista.getItemAtPosition(position)
gv.detalhes = itemValue as String
Log.d(
"Grupo",
"ffff :$itemValue"
)
var eve = mAuth.getReference("Eventos")
.child(itemValue.toString())
eve.addValueEventListener(object :
ValueEventListener {
override fun onDataChange(snapshot: DataSnapshot) {
startActivity(
Intent(
view.context,
DetalhesEventoActivity::class.java
)
)
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
})
// Toast.makeText(
// this@GrupoActivity,
// "Position :$position\nItem Value : $itemValue",
// Toast.LENGTH_LONG
// )
// .show()
}
}
} else {
Log.d(
"Grupo",
" sem grupos deste user ${valu.size}"
)
if (valu.size == 0) {
semEventos.isVisible = true
}
}
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
})
}
override fun onChildChanged(snapshot: DataSnapshot, previousChildName: String?) {
TODO("Not yet implemented")
}
override fun onChildRemoved(snapshot: DataSnapshot) {
TODO("Not yet implemented")
}
override fun onChildMoved(snapshot: DataSnapshot, previousChildName: String?) {
TODO("Not yet implemented")
}
override fun onCancelled(error: DatabaseError) {
Log.d("Grupo", "No such document")
}
}
mail.addChildEventListener(m)
}
}
override fun onCreateOptionsMenu(menu: Menu?): Boolean {
val inflater = menuInflater
inflater.inflate(R.menu.menu_direita, menu)
return true
}
override fun onOptionsItemSelected(item: MenuItem?): Boolean {
if (item!!.itemId == R.id.signOut) {
Auth.signOut()
val intent = Intent(this, LoginActivity::class.java)
intent.flags = Intent.FLAG_ACTIVITY_CLEAR_TASK.or(Intent.FLAG_ACTIVITY_NEW_TASK)
startActivity(intent)
}
if (item.itemId == R.id.profile) {
startActivity(Intent(this, ProfileActivity::class.java))
}
if (item.itemId == R.id.grupo) {
startActivity(Intent(this, VerGrupoActivity::class.java))
}
if (item.itemId == R.id.Lis) {
startActivity(Intent(this, ListaGruposActivity::class.java))
}
if (item.itemId == R.id.home) {
val marca = 0
val intent = Intent(this, FiltrosActivity::class.java).apply {
putExtra(AlarmClock.EXTRA_MESSAGE, marca)
}
startActivity(intent)
}
return super.onOptionsItemSelected(item)
}
}
<file_sep>/app/src/main/java/com/example/app/DetalhesEventoActivity.kt
package com.example.app
import android.content.Intent
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import android.provider.AlarmClock
import android.provider.AlarmClock.EXTRA_MESSAGE
import android.util.Log
import android.view.Menu
import android.view.MenuItem
import com.google.android.gms.maps.CameraUpdateFactory
import com.google.android.gms.maps.GoogleMap
import com.google.android.gms.maps.OnMapReadyCallback
import com.google.android.gms.maps.SupportMapFragment
import com.google.android.gms.maps.model.LatLng
import com.google.android.gms.maps.model.MarkerOptions
import com.google.firebase.auth.FirebaseAuth
import com.google.firebase.database.*
import com.google.firebase.firestore.FieldValue
import com.google.firebase.firestore.FirebaseFirestore
import kotlinx.android.synthetic.main.activity_detalhes_evento.*
import java.util.HashMap
import androidx.core.view.isVisible as isVisible
class DetalhesEventoActivity : AppCompatActivity(), OnMapReadyCallback {
val Auth = FirebaseAuth.getInstance()
val mAuth = FirebaseDatabase.getInstance()
lateinit var gv: VariaveisGlobais
private lateinit var mMap: GoogleMap
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
gv = application as VariaveisGlobais
setContentView(R.layout.activity_detalhes_evento)
val showDetalhe = tShowDetalhes
val marcar = bPresença
val mapFragment = supportFragmentManager
.findFragmentById(R.id.map2) as SupportMapFragment
mapFragment.getMapAsync(this)
desativar()
marcar.setOnClickListener {
marcarPresença()
val marca = 1
val intent = Intent(this, FiltrosActivity::class.java).apply {
putExtra(EXTRA_MESSAGE, marca)
}
startActivity(intent)
}
val user = Auth.currentUser
if (user != null) {
val mail = mAuth.getReference("Eventos").child(gv.detalhes)
mail.addListenerForSingleValueEvent(object : ValueEventListener {
override fun onDataChange(dataSnapshot: DataSnapshot) {
val name = dataSnapshot.child("nome").getValue().toString()
val dateDia = dataSnapshot.child("dia").getValue().toString()
val dateMes = dataSnapshot.child("mes").getValue().toString()
val dateAno = dataSnapshot.child("ano").getValue().toString()
val time = dataSnapshot.child("horas").getValue().toString()
val tipo = dataSnapshot.child("Tipo").getValue().toString()
showDetalhe.text =
"nome: $name\ndata: $dateDia/$dateMes/$dateAno\nhoras: $time\ntipo: $tipo"
// Log.d(
// "evento",
// "DocumentSnapshot data: ${document.data?.get("nome")} \n${document.data?.get(
// "data"
// )}" +
// " \n ${document.data?.get("hora")} \n ${document.data?.get("local")}"
// )
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
})
}
}
override fun onMapReady(googleMap: GoogleMap) {
mMap = googleMap
val mail = mAuth.getReference("Eventos").child(gv.detalhes)
mail.addListenerForSingleValueEvent(object : ValueEventListener {
override fun onDataChange(dataSnapshot: DataSnapshot) {
val placeLat = dataSnapshot.child("Latitude").getValue()
val placeLog = dataSnapshot.child("Longitude").getValue()
val P = LatLng(placeLat.toString().toDouble(), placeLog.toString().toDouble())
placeMarkerOnMap(P)
mMap.mapType = GoogleMap.MAP_TYPE_HYBRID
mMap.moveCamera(CameraUpdateFactory.newLatLngZoom(P, 18f))
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
})
}
private fun placeMarkerOnMap(location: LatLng) {
// 1
val markerOptions = MarkerOptions().position(location)
// 2
mMap.clear()
mMap.addMarker(markerOptions)
}
fun desativar() {
val marcar = bPresença
val uid = Auth.currentUser?.uid
val fazParte = ArrayList<String>()
val user = Auth.currentUser
if (user != null) {
val mail = mAuth.getReference("Eventos").child(gv.detalhes).child("Presenças")
Log.d(
"detalhes", "detalhe: ${gv.detalhes}"
)
val m = object : ChildEventListener {
override fun onChildAdded(dataSnapshot: DataSnapshot, previousChildName: String?) {
val pre = dataSnapshot.getValue().toString()
Log.d(
"detalhes", "detalhe: $pre"
)
fazParte.add(pre)
if (fazParte.contains(uid)) {
marcar.isVisible = false
Log.d(
"detalhes", "detalhe: $pre" +
"ffff: $uid" + "\n" + "false"
)
} else {
marcar.isVisible = true
Log.d(
"detalhes", "detalhe: $pre" +
"ffff: $uid" + "\n" + "true"
)
}
// Log.d(
// "evento", "DocumentSnapshot data: ${dataSnapshot.child("admin").getValue()} "
// )
}
override fun onChildChanged(snapshot: DataSnapshot, previousChildName: String?) {
startActivity(
Intent(
this@DetalhesEventoActivity,
DetalhesEventoActivity::class.java
)
)
}
override fun onChildRemoved(snapshot: DataSnapshot) {
startActivity(Intent(this@DetalhesEventoActivity, FiltrosActivity::class.java))
}
override fun onChildMoved(snapshot: DataSnapshot, previousChildName: String?) {
TODO("Not yet implemented")
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
}
mail.addChildEventListener(m)
}
}
fun marcarPresença() {
var num = 0
val valu = ArrayList<String>()
val user = Auth.currentUser
if (user != null) {
val mail = mAuth.getReference("Eventos").child(gv.detalhes)
mail.addListenerForSingleValueEvent(object : ValueEventListener {
override fun onDataChange(dataSnapshot: DataSnapshot) {
val a = dataSnapshot.child("Presenças").value.toString()
valu.add(a)
Log.d(
"evento",
"DocumentSnapshot data: ${valu.size} "
)
Log.d(
"evento",
"DocumentSnapshot data: ${valu} "
)
num = valu.size
Log.d(
"evento",
"DocumentSnapshot data: ${num }"
)
val update = HashMap<String, Any>()
update["$num"] = user.uid
mAuth.getReference("Eventos").child(gv.detalhes).child("Presenças")
.updateChildren(update)
Log.d(
"evento",
"DocumentSnapshot data: ${dataSnapshot.child("admin").getValue()} "
)
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
})
}
}
override fun onCreateOptionsMenu(menu: Menu?): Boolean {
val inflater = menuInflater
inflater.inflate(R.menu.menu_direita, menu)
return true
}
override fun onOptionsItemSelected(item: MenuItem?): Boolean {
if (item!!.itemId == R.id.signOut) {
Auth.signOut()
val intent = Intent(this, LoginActivity::class.java)
intent.flags = Intent.FLAG_ACTIVITY_CLEAR_TASK.or(Intent.FLAG_ACTIVITY_NEW_TASK)
startActivity(intent)
}
if (item.itemId == R.id.profile) {
startActivity(Intent(this, ProfileActivity::class.java))
}
if (item.itemId == R.id.grupo) {
startActivity(Intent(this, VerGrupoActivity::class.java))
}
if (item.itemId == R.id.Lis) {
startActivity(Intent(this, ListaGruposActivity::class.java))
}
if (item.itemId == R.id.home) {
val marca = 0
val intent = Intent(this, FiltrosActivity::class.java).apply {
putExtra(AlarmClock.EXTRA_MESSAGE, marca)
}
startActivity(intent)
}
return super.onOptionsItemSelected(item)
}
}
<file_sep>/app/src/main/java/com/example/app/MapsActivity.kt
package com.example.app
//import com.google.android.gms.location.places.ui.PlacePicker
//import com.google.android.gms.location.places.ui.PlacePicker
//import com.google.android.material.floatingactionbutton.FloatingActionButton
import android.Manifest
import android.app.Activity
import android.content.Intent
import android.content.IntentSender
import android.content.pm.PackageManager
import android.location.Location
import android.os.Bundle
import android.provider.AlarmClock
import android.provider.AlarmClock.EXTRA_MESSAGE
import android.util.Log
import android.widget.Toast
import androidx.appcompat.app.AppCompatActivity
import androidx.core.app.ActivityCompat
import com.google.android.gms.common.api.ResolvableApiException
import com.google.android.gms.common.api.Status
import com.google.android.gms.location.*
import com.google.android.gms.maps.CameraUpdateFactory
import com.google.android.gms.maps.GoogleMap
import com.google.android.gms.maps.GoogleMap.OnMarkerClickListener
import com.google.android.gms.maps.OnMapReadyCallback
import com.google.android.gms.maps.SupportMapFragment
import com.google.android.gms.maps.model.LatLng
import com.google.android.gms.maps.model.Marker
import com.google.android.gms.maps.model.MarkerOptions
import com.google.android.libraries.places.api.Places
import com.google.android.libraries.places.api.model.Place
import com.google.android.libraries.places.api.net.PlacesClient
import com.google.android.libraries.places.widget.AutocompleteSupportFragment
import com.google.android.libraries.places.widget.listener.PlaceSelectionListener
import com.google.firebase.auth.FirebaseAuth
import com.google.firebase.database.FirebaseDatabase
import com.google.firebase.firestore.FieldValue
import com.google.firebase.firestore.FirebaseFirestore
import com.google.firebase.firestore.GeoPoint
import kotlinx.android.synthetic.main.activity_maps.*
import java.util.*
import kotlin.collections.ArrayList
import kotlin.collections.set
class MapsActivity : AppCompatActivity(), OnMapReadyCallback, OnMarkerClickListener {
private lateinit var map: GoogleMap
private lateinit var fusedLocationClient: FusedLocationProviderClient
private lateinit var lastLocation: Location
private lateinit var locationCallback: LocationCallback
private lateinit var locationRequest: LocationRequest
private var locationUpdateState = false
lateinit var gv: VariaveisGlobais
val Auth = FirebaseAuth.getInstance()
val mAuth = FirebaseDatabase.getInstance()
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_maps)
gv = application as VariaveisGlobais
val mapFragment = supportFragmentManager
.findFragmentById(R.id.map) as SupportMapFragment
mapFragment.getMapAsync(this)
fusedLocationClient = LocationServices.getFusedLocationProviderClient(this)
locationCallback = object : LocationCallback() {
override fun onLocationResult(p0: LocationResult) {
super.onLocationResult(p0)
lastLocation = p0.lastLocation
//placeMarkerOnMap(LatLng(lastLocation.latitude, lastLocation.longitude))
}
}
createLocationRequest()
search()
val guardarEvento = bGuardar
guardarEvento.setOnClickListener {
evento()
}
}
private fun search() {
val apiKey = getString(R.string.api_key)
if (!Places.isInitialized()) {
Places.initialize(applicationContext, apiKey)
}
val placesClient: PlacesClient = Places.createClient(this)
val autocompleteFragment: AutocompleteSupportFragment? =
supportFragmentManager.findFragmentById(R.id.autocomplete_fragment) as AutocompleteSupportFragment?
autocompleteFragment?.setPlaceFields(
Arrays.asList(
Place.Field.ID,
Place.Field.NAME,
Place.Field.LAT_LNG
)
)
autocompleteFragment!!.setOnPlaceSelectedListener(object : PlaceSelectionListener {
override fun onPlaceSelected(place: Place) {
// var x = place.latLng!!.latitude
// var y = place.latLng!!.longitude
//
//
// val point = GeoPoint(x, y)
gv.Lat = place.latLng?.latitude.toString().toDouble()
gv.Long = place.latLng?.longitude.toString().toDouble()
val currentLatLng = LatLng(gv.Lat, gv.Long)
placeMarkerOnMap(currentLatLng)
map.animateCamera(CameraUpdateFactory.newLatLngZoom(currentLatLng, 18f))
Log.d(
"Mapa",
"Place: " + place.getName()
.toString() + ", " + place.getId() + "," + gv.Lat + ", " + gv.Long
)
}
override fun onError(status: Status) {
// TODO: Handle the error.
Log.d("Mapa", "An error occurred: $status")
}
})
}
companion object {
private const val LOCATION_PERMISSION_REQUEST_CODE = 1
private const val REQUEST_CHECK_SETTINGS = 2
}
override fun onMapReady(googleMap: GoogleMap) {
map = googleMap
map.uiSettings.isZoomControlsEnabled = true
map.setOnMarkerClickListener(this)
setUpMap()
}
override fun onMarkerClick(p0: Marker?) = false
private fun setUpMap() {
if (ActivityCompat.checkSelfPermission(
this,
Manifest.permission.ACCESS_FINE_LOCATION
) != PackageManager.PERMISSION_GRANTED
) {
ActivityCompat.requestPermissions(
this,
arrayOf(Manifest.permission.ACCESS_FINE_LOCATION),
LOCATION_PERMISSION_REQUEST_CODE
)
return
}
map.isMyLocationEnabled = true
map.mapType = GoogleMap.MAP_TYPE_HYBRID
fusedLocationClient.lastLocation.addOnSuccessListener(this) { location ->
// Got last known location. In some rare situations this can be null.
if (location != null) {
lastLocation = location
val currentLatLng = LatLng(location.latitude, location.longitude)
//placeMarkerOnMap(currentLatLng)
map.animateCamera(CameraUpdateFactory.newLatLngZoom(currentLatLng, 18f))
Log.d("Mapa", "$lastLocation}")
Log.d("Mapa", "latitude ${location.latitude}}")
Log.d("Mapa", "longitude ${location.longitude}}")
gv.Lat = location.latitude
gv.Long = location.longitude
}
}
}
private fun startLocationUpdates() {
if (ActivityCompat.checkSelfPermission(
this,
Manifest.permission.ACCESS_FINE_LOCATION
) != PackageManager.PERMISSION_GRANTED
) {
ActivityCompat.requestPermissions(
this,
arrayOf(Manifest.permission.ACCESS_FINE_LOCATION),
LOCATION_PERMISSION_REQUEST_CODE
)
return
}
fusedLocationClient.requestLocationUpdates(
locationRequest,
locationCallback,
null /* Looper */
)
}
private fun createLocationRequest() {
// 1
locationRequest = LocationRequest()
// 2
locationRequest.interval = 10000
// 3
locationRequest.fastestInterval = 5000
locationRequest.priority = LocationRequest.PRIORITY_HIGH_ACCURACY
val builder = LocationSettingsRequest.Builder()
.addLocationRequest(locationRequest)
val client = LocationServices.getSettingsClient(this)
val task = client.checkLocationSettings(builder.build())
// 5
task.addOnSuccessListener {
locationUpdateState = true
startLocationUpdates()
}
task.addOnFailureListener { e ->
// 6
if (e is ResolvableApiException) {
// Location settings are not satisfied, but this can be fixed
// by showing the user a dialog.
try {
// Show the dialog by calling startResolutionForResult(),
// and check the result in onActivityResult().
e.startResolutionForResult(
this@MapsActivity,
REQUEST_CHECK_SETTINGS
)
} catch (sendEx: IntentSender.SendIntentException) {
// Ignore the error.
}
}
}
}
override fun onActivityResult(requestCode: Int, resultCode: Int, data: Intent?) {
super.onActivityResult(requestCode, resultCode, data)
if (requestCode == REQUEST_CHECK_SETTINGS) {
if (resultCode == Activity.RESULT_OK) {
locationUpdateState = true
startLocationUpdates()
}
}
}
override fun onPause() {
super.onPause()
fusedLocationClient.removeLocationUpdates(locationCallback)
}
public override fun onResume() {
super.onResume()
if (!locationUpdateState) {
startLocationUpdates()
}
}
private fun placeMarkerOnMap(location: LatLng) {
// 1
val markerOptions = MarkerOptions().position(location)
// 2
map.clear()
map.addMarker(markerOptions)
}
private fun evento() {
val user = Auth.currentUser
if (user != null) {
var numero = intent.getStringExtra(EXTRA_MESSAGE).toInt()
Log.d("Numero", "ola3 = $numero")
if (gv.Lat != 0.0 && gv.Long != 0.0) {
val evento = HashMap<String, Any>()
evento["nome"] = gv.nome
//evento["Presenças"] = ArrayList<String>()
evento["horas"] = gv.Horas
evento["dia"] = gv.Day
evento["mes"] = gv.Month
evento["ano"] = gv.Year
evento["diaFim"] = gv.DayFim
evento["mesFim"] = gv.MonthFim
evento["anoFim"] = gv.YearFim
evento["Tipo"] = gv.check
evento["Forma"] = gv.privado
evento["Latitude"] = gv.Lat
evento["Longitude"] = gv.Long
evento["numeroGrupo"] = numero
mAuth.getReference("Eventos").child(gv.nome).setValue(evento)
Toast.makeText(this, "evento criado", Toast.LENGTH_SHORT).show()
val intent = Intent(this, CriarOrgEventoActivity::class.java)
intent.flags =
Intent.FLAG_ACTIVITY_CLEAR_TASK.or(Intent.FLAG_ACTIVITY_NEW_TASK)
startActivity(intent)
} else {
Toast.makeText(this, "Tem de ter localização", Toast.LENGTH_SHORT).show()
Log.d("Mapa", "$lastLocation}")
Log.d("Mapa", "latitude ${gv.Lat}}")
Log.d("Mapa", "longitude ${gv.Long}}")
}
}
}
}
<file_sep>/app/src/main/java/com/example/app/RegistoUserActivity.kt
package com.example.app
import android.app.AlertDialog
import android.content.Intent
import android.os.Bundle
import android.util.Log
import android.view.MenuItem
import android.view.View
import android.widget.EditText
import android.widget.PopupMenu
import android.widget.Switch
import android.widget.Toast
import androidx.appcompat.app.AppCompatActivity
import androidx.core.view.isInvisible
import androidx.core.view.isVisible
import com.google.firebase.auth.FirebaseAuth
import com.google.firebase.database.FirebaseDatabase
import kotlinx.android.synthetic.main.activity_registo_user.*
class RegistoUserActivity : AppCompatActivity() {
// val mAuth = FirebaseFirestore.getInstance()
val mAuth = FirebaseDatabase.getInstance()
//val gAuth = FirebaseFirestore.getInstance().collection("Grupos")
val auth = FirebaseAuth.getInstance()
//val mStorage = FirebaseStorage.getInstance().reference
lateinit var gv: VariaveisGlobais
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_registo_user)
gv = application as VariaveisGlobais
val outros = addPais_Outros
val linceca = addLicencaCacaPortugal
val nomeSeguradoraExtra = addNomeSeguradoraExtra
val numApoliceExtra = addNumeroApoliceExtra
val numCaca = addNumPassCaca
val licencaP = editTextLicencaPortugal
val licencaE = addLicencaCacaEspanha
val passaporte = addNumero_Passaporte
val dni = addDNI
val bi = addCartao
val nif = addNif
val licencaEspanha = addEspanhaExtra
val btnPop = bPais_User
val buttonRegistar = bRegistar
val e = checkBoxEspanha
val p = checkBoxPortugal
var g = ""
nomeSeguradoraExtra.setVisibility(View.INVISIBLE)
numApoliceExtra.setVisibility(View.INVISIBLE)
numCaca.setVisibility(View.INVISIBLE)
linceca.setVisibility(View.INVISIBLE)
licencaP.setVisibility(View.INVISIBLE)
licencaEspanha.setVisibility(View.INVISIBLE)
licencaE.setVisibility(View.INVISIBLE)
e.setVisibility(View.INVISIBLE)
p.setVisibility(View.INVISIBLE)
outros.isInvisible = true
dni.isInvisible = true
bi.isInvisible = true
nif.isInvisible = true
passaporte.isInvisible = true
btnPop.setOnClickListener {
val popMenu = PopupMenu(this@RegistoUserActivity, btnPop)
popMenu.menuInflater.inflate(R.menu.menu_pop2, popMenu.menu)
popMenu.setOnMenuItemClickListener(object : PopupMenu.OnMenuItemClickListener {
override fun onMenuItemClick(item: MenuItem?): Boolean {
when (item!!.itemId) {
R.id.checkPortugal -> {
"Portugal"
bi.isVisible = true
nif.isVisible = true
nif.isFocusableInTouchMode = true
licencaP.visibility = View.VISIBLE
e.visibility = View.VISIBLE
p.visibility = View.INVISIBLE
dni.isInvisible = true
outros.isInvisible = true
passaporte.isInvisible = true
licencaE.visibility = View.INVISIBLE
//Log.d("RegistoUser", "putas")
g = "Portugal"
e.setOnClickListener {
if (e.isChecked) {
nomeSeguradoraExtra.visibility = View.VISIBLE
numApoliceExtra.visibility = View.VISIBLE
numCaca.visibility = View.VISIBLE
licencaEspanha.visibility = View.VISIBLE
} else {
licencaEspanha.visibility = View.INVISIBLE
nomeSeguradoraExtra.visibility = View.INVISIBLE
numApoliceExtra.visibility = View.INVISIBLE
numCaca.visibility = View.INVISIBLE
linceca.visibility = View.INVISIBLE
}
}
}
R.id.checkOutros -> {
outros.isVisible = true
passaporte.isVisible = true
nif.isInvisible = true
dni.isInvisible = true
bi.isInvisible = true
numCaca.setVisibility(View.VISIBLE)
licencaEspanha.setVisibility(View.INVISIBLE)
licencaP.setVisibility(View.INVISIBLE)
licencaE.setVisibility(View.INVISIBLE)
p.setVisibility(View.INVISIBLE)
e.setVisibility(View.INVISIBLE)
Log.d("RegistoUser", g)
}
R.id.checkEspanha -> {
"Espanha"
dni.isVisible = true
bi.isInvisible = true
nif.isInvisible = true
outros.isInvisible = true
passaporte.isInvisible = true
licencaE.setVisibility(View.VISIBLE)
p.setVisibility(View.VISIBLE)
Log.d("RegistoUser", "putas2")
g = "Espanha"
licencaP.setVisibility(View.INVISIBLE)
e.setVisibility(View.INVISIBLE)
p.setOnClickListener {
if (p.isChecked) {
nomeSeguradoraExtra.setVisibility(View.VISIBLE)
numApoliceExtra.setVisibility(View.VISIBLE)
numCaca.setVisibility(View.VISIBLE)
linceca.setVisibility(View.VISIBLE)
} else {
nomeSeguradoraExtra.setVisibility(View.INVISIBLE)
numApoliceExtra.setVisibility(View.INVISIBLE)
numCaca.setVisibility(View.INVISIBLE)
linceca.setVisibility(View.INVISIBLE)
}
}
}
}
return true
}
})
popMenu.show()
}
//portugal
//outros
passaporte.text.toString()
g = outros.text.toString()
//espanha
dni.text.toString()
licencaE.text.toString()
buttonRegistar.setOnClickListener {
val email = addEmail.text.toString()
val password = addPass.text.toString()
val name = addNome.text.toString()
val tele = addTele.text.toString()
val local = addLocalidade.text.toString()
val morada = addMorada.text.toString()
val postal = addPostal.text.toString()
val cartaCaca = addCartaCaca.text.toString()
val licencaArma = addLicencaArma.text.toString()
val nomeSeguradora = addNomeSeguradora.text.toString()
val numApolice = addNumeroApolice.text.toString()
registoAuth(
password,
email,
name,
tele,
morada,
local,
postal,
cartaCaca,
licencaArma,
nomeSeguradora,
numApolice,
g,
bi,
nif,
licencaP,
passaporte,
dni, licencaE,
e,
p,
nomeSeguradoraExtra,
numApoliceExtra,
numCaca,
linceca,
licencaEspanha
)
}
}
private fun registoAuth(
password: String,
email: String,
name: String,
tele: String,
morada: String,
local: String,
postal: String,
cartaCaca: String,
licencaArma: String,
nomeSeguradora: String,
numApolice: String,
g: String,
bi: EditText,
nif: EditText,
licencaP: EditText,
passaporte: EditText,
dni: EditText,
licencaE: EditText,
e: Switch,
p: Switch,
nomeSeguradoraExtra: EditText,
numApoliceExtra: EditText,
numCaca: EditText,
linceca: EditText,
licencaEspanha: EditText
) {
val teste = arrayListOf<EditText>(
addPass,
addEmail,
addNome,
addTele,
addLocalidade,
addMorada,
addPostal,
addCartaCaca,
addLicencaArma,
addNomeSeguradora,
addNumeroApolice
// bi,
// nif,
// licencaP,
// passaporte,
// dni,
// licencaE,
// nomeSeguradoraExtra,
// numApoliceExtra,
// numCaca,
// linceca,
// licencaEspanha
)
if (verificaCampos(teste) == true) {
//
// if (!password.isEmpty() && !email.isEmpty() && !name.isEmpty() && !tele.isEmpty() && !morada.isEmpty() && !local.isEmpty()
// && !postal.isEmpty() && !cartaCaca.isEmpty() && !licencaArma.isEmpty() && !nomeSeguradora.isEmpty() && !numApolice.isEmpty()
// ) {
if (tele.length == 9 && postal.length == 7 && cartaCaca.length == 6 && licencaArma.length == 5 && numApolice.length == 10) {
if (!g.isEmpty()) {
auth.createUserWithEmailAndPassword(email, password)
.addOnCompleteListener { it ->
if (!it.isSuccessful) return@addOnCompleteListener
Log.d(
"RegistoUser",
"user auth com uid: ${it.result?.user?.uid}"
)
register(
name,
email,
tele,
morada,
local,
postal,
cartaCaca,
licencaArma,
nomeSeguradora,
numApolice,
g,
bi,
nif,
licencaP,
e,
p,
licencaEspanha,
nomeSeguradoraExtra,
numApoliceExtra,
numCaca,
linceca, dni, licencaE, passaporte
)
when {
it.isSuccessful -> {
Toast.makeText(
this,
"Registo COM sucesso",
Toast.LENGTH_SHORT
).show()
}
else -> {
Toast.makeText(
this,
"Registo sem sucesso",
Toast.LENGTH_SHORT
).show()
}
}
}
.addOnFailureListener { exception: Exception ->
Toast.makeText(this, exception.toString(), Toast.LENGTH_LONG)
.show()
}
} else {
Toast.makeText(this, "Selecione um pais ", Toast.LENGTH_SHORT).show()
}
////copiado para cada pais com as suas restricoes
// Auth.createUserWithEmailAndPassword(email, password).addOnCompleteListener { it ->
//
// if (!it.isSuccessful) return@addOnCompleteListener
//
// Log.d("RegistoUser", "user auth com uid: ${it.result?.user?.uid}")
// //register()
// when {
// it.isSuccessful -> {
// Toast.makeText(this, "Registo COM sucesso", Toast.LENGTH_SHORT).show()
// Auth.signOut()
// }
// else -> {
// Toast.makeText(this, "Registo sem sucesso", Toast.LENGTH_SHORT).show()
// }
// }
//
//
// }
//
//
// .addOnFailureListener { exception: Exception ->
// Toast.makeText(this, exception.toString(), Toast.LENGTH_LONG).show()
// }
} else {
// Toast.makeText(this, "Preencha todos os campos", Toast.LENGTH_LONG).show()
Log.d("RegistoUser", "nao registo")
}
}
// else if (email.isEmpty()) {
// addEmail.error = "This is error"
// }
}
private fun register(
name: String,
email: String,
tele: String,
morada: String,
local: String,
postal: String,
cartaCaca: String,
licencaArma: String,
nomeSeguradora: String,
numApolice: String,
g: String,
bi: EditText,
nif: EditText,
licencaP: EditText,
e: Switch,
p: Switch,
licencaEspanha: EditText,
nomeSeguradoraExtra: EditText,
numApoliceExtra: EditText,
numCaca: EditText,
linceca: EditText,
dni: EditText,
licencaE: EditText,
passaporte: EditText
) {
val uid = auth.uid.toString()
// val ref = mAuth.document("$uid")
//
val NomeSeguradoraExtra = nomeSeguradoraExtra.text.toString()
val NumApoliceExtra = numApoliceExtra.text.toString()
val NumCaca = numCaca.text.toString()
val pessoa = HashMap<String, Any>()
// val pessoa: MutableMap<String, Any> = HashMap()
pessoa["uid"] = uid
pessoa["email"] = email
pessoa["name"] = name
pessoa["telemovel"] = tele
pessoa["morada"] = morada
pessoa["localidade"] = local
pessoa["Codigo Postal"] = postal
pessoa["Carta Caçadore"] = cartaCaca
pessoa["Licença Arma"] = licencaArma
pessoa["Nome Seguradora"] = nomeSeguradora
pessoa["Numero Apolice"] = numApolice
//pessoa["grupo"] = ArrayList<String>()
pessoa["Pais"] = g
pessoa["FirstTime"] = true
pessoa["Org"] = false
pessoa["Controlo"] = false
// val pessoa = hashMapOf(
// "uid" to uid,
// "email" to email
// )
//mAuth.collection("Users").document("$uid").set(pessoa)
if (g == "Portugal") {
val Bi = bi.text.toString()
val Nif = nif.text.toString()
val LicencaP = licencaP.text.toString()
val teste = arrayListOf<EditText>(
bi,
nif,
licencaP
)
if (verificaCampos(teste) == true) {
pessoa["BI"] = Bi
pessoa["Nif"] = Nif
pessoa["Licenca Portugal"] = LicencaP
Log.d("RegistoUser", "user firestore registo1")
val LicencaEspanha = licencaEspanha.text.toString()
if (e.isChecked) {
val teste2 = arrayListOf<EditText>(
nomeSeguradoraExtra,
numApoliceExtra,
numCaca,
licencaEspanha
)
if (verificaCampos(teste2) == true) {
pessoa["Licenca Espanha"] = LicencaEspanha
pessoa["nome Seguradora Extra"] = NomeSeguradoraExtra
pessoa["numero Apolice Extra"] = NumApoliceExtra
pessoa["Numero Passaporte Europeu"] = NumCaca
mAuth.getReference("Users").child(uid).setValue(pessoa)
sendEmailVerification()
Log.d("RegistoUser", "email enviado")
clearInputs()
val intent = Intent(this, LoginActivity::class.java)
intent.flags =
Intent.FLAG_ACTIVITY_CLEAR_TASK.or(Intent.FLAG_ACTIVITY_NEW_TASK)
startActivity(intent)
auth.signOut()
Log.d("RegistoUser", "user firestore registo2")
}
} else {
mAuth.getReference("Users").child(uid).setValue(pessoa)
sendEmailVerification()
Log.d("RegistoUser", "email enviado")
clearInputs()
val intent = Intent(this, LoginActivity::class.java)
intent.flags = Intent.FLAG_ACTIVITY_CLEAR_TASK.or(Intent.FLAG_ACTIVITY_NEW_TASK)
startActivity(intent)
Log.d("RegistoUser", "user firestore registo3")
}
} else {
val user = auth.currentUser
user?.delete()
}
} else if (g == "Espanha") {
val Dni = dni.text.toString()
val LicencaE = licencaE.text.toString()
val teste = arrayListOf<EditText>(
// passaporte,
dni,
licencaE
// nomeSeguradoraExtra,
// numApoliceExtra,
// numCaca,
// linceca,
// licencaEspanha
)
if (verificaCampos(teste) == true) {
pessoa["Dni"] = Dni
pessoa["Lic<NAME>"] = LicencaE
Log.d("RegistoUser", "user firestore registo4")
val LicencaPortuguesa = linceca.text.toString()
//
if (p.isChecked) {
val teste2 = arrayListOf<EditText>(
nomeSeguradoraExtra,
numApoliceExtra,
numCaca,
linceca
)
if (verificaCampos(teste2) == true) {
pessoa["Licenca Portugal"] = LicencaPortuguesa
pessoa["nome Seguradora Extra"] = NomeSeguradoraExtra
pessoa["numero Apolice Extra"] = NumApoliceExtra
pessoa["Numero Passaporte Europeu"] = NumCaca
mAuth.getReference(uid).setValue(pessoa)
sendEmailVerification()
Log.d("RegistoUser", "email enviado")
clearInputs()
val intent = Intent(this, LoginActivity::class.java)
intent.flags =
Intent.FLAG_ACTIVITY_CLEAR_TASK.or(Intent.FLAG_ACTIVITY_NEW_TASK)
startActivity(intent)
auth.signOut()
Log.d("RegistoUser", "user firestore registo5")
}
} else {
mAuth.getReference(uid).setValue(pessoa)
sendEmailVerification()
Log.d("RegistoUser", "email enviado")
clearInputs()
val intent = Intent(this, LoginActivity::class.java)
intent.flags =
Intent.FLAG_ACTIVITY_CLEAR_TASK.or(Intent.FLAG_ACTIVITY_NEW_TASK)
startActivity(intent)
auth.signOut()
Log.d("RegistoUser", "user firestore registo6")
}
} else {
val user = auth.currentUser
user?.delete()
}
} else {
val Passaporte = passaporte.text.toString()
if (!Passaporte.isEmpty()) {
pessoa["Passaporte"] = Passaporte
mAuth.getReference(uid).setValue(pessoa)
sendEmailVerification()
Log.d("RegistoUser", "email enviado")
clearInputs()
val intent = Intent(this, LoginActivity::class.java)
intent.flags =
Intent.FLAG_ACTIVITY_CLEAR_TASK.or(Intent.FLAG_ACTIVITY_NEW_TASK)
startActivity(intent)
auth.signOut()
Log.d("RegistoUser", "user firestore registo7")
}
}
}
private fun sendEmailVerification() {
val user = auth.currentUser
user?.sendEmailVerification()?.addOnCompleteListener {
val builder = AlertDialog.Builder(this)
builder.setTitle("Email Verfication")
builder.setMessage("Please confirm email")
//builder.setPositiveButton("OK", DialogInterface.OnClickListener(function = x))
builder.setPositiveButton(android.R.string.yes) { dialog, which ->
Toast.makeText(
applicationContext,
android.R.string.yes, Toast.LENGTH_SHORT
).show()
startActivity(Intent(this, LoginActivity::class.java))
}
}
}
fun verificaCampos(
array: ArrayList<EditText>
): Boolean {
var cont = 0
var valido = false
for (item in array) {
if (item.text.isEmpty()) {
item.error = "Falta Preencher"
cont++
}
}
if (cont == 0) {
valido = true
}
return valido
}
private fun clearInputs() {
addNome.text.clear()
addEmail.text.clear()
addPass.text.clear()
}
}
<file_sep>/app/src/main/java/com/example/app/VerGrupoActivity.kt
package com.example.app
import android.content.Intent
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import android.provider.AlarmClock.EXTRA_MESSAGE
import android.util.Log
import android.view.Menu
import android.view.MenuItem
import android.widget.AdapterView
import android.widget.ArrayAdapter
import androidx.core.view.isVisible
import com.google.firebase.auth.FirebaseAuth
import com.google.firebase.database.*
import kotlinx.android.synthetic.main.activity_ver_grupo.*
class VerGrupoActivity : AppCompatActivity() {
// val mAuth = FirebaseFirestore.getInstance()
val mAuth = FirebaseDatabase.getInstance()
val auth = FirebaseAuth.getInstance()
lateinit var gv: VariaveisGlobais
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
gv = application as VariaveisGlobais
setContentView(R.layout.activity_ver_grupo)
val semGrupos = tNaoGrupos
val list = ListView2
val user = auth.currentUser?.uid
if (user != null) {
val mail = mAuth.getReference("Grupos")
val values = ArrayList<String>()
val valor = ArrayList<String>()
val m = object : ChildEventListener {
override fun onChildAdded(dataSnapshot: DataSnapshot, previousChildName: String?) {
val g = dataSnapshot.child("nome").value.toString()
Log.d(
"VerGrupo2",
"$user"
)
values.add(g)
val m = mAuth.getReference("Grupos").child(g)
Log.d(
"VerGrupo2",
" $m"
)
val t = mAuth.getReference("Grupos").child(g).child("membros")
val f = object : ChildEventListener {
override fun onChildAdded(
dataSnapshot: DataSnapshot,
previousChildName: String?
) {
val j = dataSnapshot.value.toString()
val fazParte = ArrayList<String>()
Log.d(
"VerGrupo2",
"j : $j"
)
fazParte.add(j)
Log.d(
"VerGrupo2",
"f : $fazParte"
)
if (fazParte.contains(user)) {
m.addValueEventListener(object : ValueEventListener {
override fun onDataChange(snapshot: DataSnapshot) {
val n =
snapshot.child("Numero").value
Log.d(
"VerGrupo",
" grupos deste user"
)
Log.d(
"VerGrupo2",
" n: $n"
)
semGrupos.isVisible = false
gv = application as VariaveisGlobais
valor.add(g)
val adapter =
ArrayAdapter(this@VerGrupoActivity, R.layout.listview_item, valor)
list.adapter = adapter
list.onItemClickListener =
AdapterView.OnItemClickListener { _, view, position, _ ->
val itemValue =
list.getItemAtPosition(position) as String
mAuth.getReference("Grupos").child(itemValue).addValueEventListener(object : ValueEventListener {
override fun onDataChange(snapshot: DataSnapshot) {
val num =
snapshot.child("Numero").value
val message = num.toString()
Log.d(
"VerGrupo2",
" num: $num"
)
startActivity(
Intent(
view.context,
GrupoActivity::class.java
).apply {
putExtra(EXTRA_MESSAGE, message)
putExtra(EXTRA_MESSAGE, message)
}
)
Log.d(
"VerGrupo2",
"messagem: $message"
)
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
})
}
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
})
} else {
Log.d(
"VerGrupo",
" sem grupos deste user"
)
semGrupos.isVisible = true
}
}
override fun onChildChanged(
snapshot: DataSnapshot,
previousChildName: String?
) {
TODO("Not yet implemented")
}
override fun onChildRemoved(snapshot: DataSnapshot) {
TODO("Not yet implemented")
}
override fun onChildMoved(
snapshot: DataSnapshot,
previousChildName: String?
) {
TODO("Not yet implemented")
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
}
t.addChildEventListener(f)
}
override fun onChildChanged(snapshot: DataSnapshot, previousChildName: String?) {
TODO("Not yet implemented")
}
override fun onChildRemoved(snapshot: DataSnapshot) {
TODO("Not yet implemented")
}
override fun onChildMoved(snapshot: DataSnapshot, previousChildName: String?) {
TODO("Not yet implemented")
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
}
mail.addChildEventListener(m)
} else {
Log.d("VerGrupo", "No such document")
}
}
override fun onCreateOptionsMenu(menu: Menu?): Boolean {
val inflater = menuInflater
inflater.inflate(R.menu.menu_direita, menu)
return true
}
override fun onOptionsItemSelected(item: MenuItem): Boolean {
if (item.itemId == R.id.signOut) {
auth.signOut()
val intent = Intent(this, LoginActivity::class.java)
intent.flags = Intent.FLAG_ACTIVITY_CLEAR_TASK.or(Intent.FLAG_ACTIVITY_NEW_TASK)
startActivity(intent)
//startActivity(Intent (this, MainActivity :: class.java ))
}
if (item.itemId == R.id.profile) {
startActivity(Intent(this, ProfileActivity::class.java))
}
if (item.itemId == R.id.grupo) {
startActivity(Intent(this, VerGrupoActivity::class.java))
}
if (item.itemId == R.id.Lis) {
startActivity(Intent(this, ListaGruposActivity::class.java))
}
if (item.itemId == R.id.home) {
val marca = 0
val intent = Intent(this, FiltrosActivity::class.java).apply {
putExtra(EXTRA_MESSAGE, marca)
}
startActivity(intent)
}
return super.onOptionsItemSelected(item)
}
}
<file_sep>/app/src/main/java/com/example/app/EventoActivity.kt
package com.example.app
import android.content.Intent
import android.os.Bundle
import android.provider.AlarmClock
import android.provider.AlarmClock.EXTRA_MESSAGE
import android.util.Log
import android.view.Menu
import android.view.MenuItem
import android.widget.*
import androidx.appcompat.app.AppCompatActivity
import com.google.firebase.auth.FirebaseAuth
import kotlinx.android.synthetic.main.activity_evento.*
import java.util.*
import java.util.regex.Matcher
import java.util.regex.Pattern
class EventoActivity : AppCompatActivity() {
val Auth = FirebaseAuth.getInstance()
//val gAuth = FirebaseFirestore.getInstance().collection("Grupo")
lateinit var gv: VariaveisGlobais
var num = 0
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_evento)
gv = application as VariaveisGlobais
val datePicker2 = findViewById<DatePicker>(R.id.datePicker2)
val today2 = Calendar.getInstance()
datePicker2.init(
today2.get(Calendar.YEAR), today2.get(Calendar.MONTH),
today2.get(Calendar.DAY_OF_MONTH)
) { view, year, month, day ->
val month = month + 1
val ano = year
// val msg = "You Selected: $day/$month/$year"
Log.d(
"evento",
"dados2: $month , $ano , $day"
)
gv.MonthFim = month
gv.DayFim = day
gv.YearFim = year
}
val datePicker = findViewById<DatePicker>(R.id.datePicker1)
val today = Calendar.getInstance()
datePicker.init(
today.get(Calendar.YEAR), today.get(Calendar.MONTH),
today.get(Calendar.DAY_OF_MONTH)
) { view, year, month, day ->
val month = month + 1
val ano = year
// val msg = "You Selected: $day/$month/$year"
Log.d(
"evento",
"dados: $month , $ano , $day"
)
gv.Month = month
gv.Day = day
gv.Year = year
}
val btnPop = bTipos
btnPop.setOnClickListener{
val popMenu = PopupMenu(this@EventoActivity, btnPop)
popMenu.menuInflater.inflate(R.menu.menu_pop, popMenu.menu)
popMenu.setOnMenuItemClickListener(object: PopupMenu.OnMenuItemClickListener {
override fun onMenuItemClick(item: MenuItem?): Boolean {
when (item!!.itemId){
R.id.checkMontaria -> gv.check = "montaria"
R.id.checkDiasCaca -> gv.check = "dias"
R.id.checkEspera -> gv.check = "espera"
R.id.checkRolas -> gv.check = "rolas"
R.id.checkTordos -> gv.check = "tordos"
}
return true
}
})
popMenu.show()
}
val paginaMapa = bPais_User
paginaMapa.setOnClickListener {
evento()
}
}
private fun evento() {
val nome = edNome.text.toString()
val horas = edTime.text.toString()
val on = switchForma
var numero = intent.getStringExtra(EXTRA_MESSAGE).toInt()
Log.d("Numero", "ola2 = $numero")
num = numero
val user = Auth.currentUser
if (!nome.isEmpty()
) {
gv.nome = nome
if (isTimeValid(horas) == true) {
gv.Horas = horas
if (on.isChecked) {
gv.privado = "privado"
} else {
gv.privado = "publico"
}
if (gv.check != "") {
val intent = Intent(this, MapsActivity::class.java).apply {
putExtra(
EXTRA_MESSAGE,
num.toString())
}
startActivity(intent)
} else {
Toast.makeText(this, "Selecionar Tipo", Toast.LENGTH_SHORT).show()
}
} else {
Toast.makeText(this, "Horas mal preenchidas", Toast.LENGTH_SHORT).show()
}
} else {
Toast.makeText(this, "Preencha campo nome", Toast.LENGTH_SHORT).show()
}
}
override fun onCreateOptionsMenu(menu: Menu?): Boolean {
val inflater = menuInflater
inflater.inflate(R.menu.menu_direita, menu)
return true
}
override fun onOptionsItemSelected(item: MenuItem?): Boolean {
if (item!!.itemId == R.id.signOut) {
Auth.signOut()
val intent = Intent(this, LoginActivity::class.java)
intent.flags = Intent.FLAG_ACTIVITY_CLEAR_TASK.or(Intent.FLAG_ACTIVITY_NEW_TASK)
startActivity(intent)
//startActivity(Intent (this, MainActivity :: class.java ))
}
if (item.itemId == R.id.profile) {
startActivity(Intent(this, ProfileActivity::class.java))
}
if (item.itemId == R.id.grupo) {
startActivity(Intent(this,VerGrupoActivity::class.java))
}
if (item.itemId == R.id.Lis) {
startActivity(Intent(this, ListaGruposActivity::class.java))
}
if (item.itemId == R.id.home) {
startActivity(Intent(this, FiltrosActivity::class.java))
}
return super.onOptionsItemSelected(item)
}
fun isTimeValid(horas: String): Boolean {
var isValid = false
val expression = "^([0-9]|0[0-9]|1[0-9]|2[0-3]):[0-5][0-9]$"
val inputStr: CharSequence = horas
val pattern: Pattern = Pattern.compile(expression, Pattern.CASE_INSENSITIVE)
val matcher: Matcher = pattern.matcher(inputStr)
if (matcher.matches()) {
isValid = true
}
return isValid
}
}
<file_sep>/app/src/main/java/com/example/app/Model.kt
package com.example.app
class Model(val Nome: String, val NumeroCC: Int, val NumeroSocio: Int, val uid: String){
override fun toString(): String {
return uid
}
}
<file_sep>/app/src/main/java/com/example/app/ListaGruposActivity.kt
package com.example.app
import android.content.Intent
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import android.provider.AlarmClock
import android.util.Log
import android.view.Menu
import android.view.MenuItem
import android.widget.AdapterView
import android.widget.ArrayAdapter
import android.widget.ListView
import com.google.firebase.auth.FirebaseAuth
import com.google.firebase.database.*
class ListaGruposActivity : AppCompatActivity() {
val auth = FirebaseAuth.getInstance()
val mAuth = FirebaseDatabase.getInstance()
lateinit var gv: VariaveisGlobais
lateinit var listView: ListView
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_lista_grupos)
listView = findViewById(R.id.listViewLista)
val gruposMemebro = mAuth.getReference("Grupos")
val list = ArrayList<String>()
val membro = object : ChildEventListener {
override fun onChildAdded(dataSnapshot: DataSnapshot, previousChildName: String?) {
//val grupo = dataSnapshot.getValue()
val g = dataSnapshot.child("nome").value.toString()
list.add(
g
)
// Log.d(
// "ListaGruposActivity",
// " pref $grupo"
// )
Log.d(
"ListaGruposActivity",
" pref $g"
)
Log.d(
"ListaGruposActivity",
" pref $list"
)
val adapter3 = ArrayAdapter(this@ListaGruposActivity, R.layout.listview_item, list)
listView.adapter = adapter3
listView.onItemClickListener =
AdapterView.OnItemClickListener { _, view, position, _ ->
val itemValue = listView.getItemAtPosition(position)
val message = itemValue as String
Log.d("ListaGruposActivity", "mensagem: $message" + "item: $itemValue ")
val b = mAuth.getReference("Grupos").child(itemValue.toString())
//var b = mAuth.collection("Grupos").document(itemValue.toString())
b.addValueEventListener(object : ValueEventListener {
override fun onDataChange(snapshot: DataSnapshot) {
startActivity(
Intent(view.context, AdesaoActivity::class.java).apply {
putExtra(AlarmClock.EXTRA_MESSAGE, message)
}
)
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
})
}
}
override fun onChildChanged(snapshot: DataSnapshot, previousChildName: String?) {
startActivity(Intent(this@ListaGruposActivity, PreferenciasActivity::class.java))
}
override fun onChildRemoved(snapshot: DataSnapshot) {
TODO("Not yet implemented")
}
override fun onChildMoved(snapshot: DataSnapshot, previousChildName: String?) {
TODO("Not yet implemented")
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
}
gruposMemebro.addChildEventListener(membro)
}
override fun onCreateOptionsMenu(menu: Menu?): Boolean {
val inflater = menuInflater
inflater.inflate(R.menu.menu_direita, menu)
return true
}
override fun onOptionsItemSelected(item: MenuItem): Boolean {
if (item.itemId == R.id.signOut) {
auth.signOut()
val intent = Intent(this, LoginActivity::class.java)
intent.flags =
Intent.FLAG_ACTIVITY_CLEAR_TASK.or(Intent.FLAG_ACTIVITY_NEW_TASK)
startActivity(intent)
//startActivity(Intent (this, MainActivity :: class.java ))
}
if (item.itemId == R.id.profile) {
startActivity(Intent(this, ProfileActivity::class.java))
}
if (item.itemId == R.id.Lis) {
startActivity(Intent(this, ListaGruposActivity::class.java))
}
if (item.itemId == R.id.grupo) {
startActivity(Intent(this, VerGrupoActivity::class.java))
}
if (item.itemId == R.id.home) {
val marca = 0
val intent = Intent(this, FiltrosActivity::class.java).apply {
putExtra(AlarmClock.EXTRA_MESSAGE, marca)
}
startActivity(intent)
}
return super.onOptionsItemSelected(item)
}
}<file_sep>/app/src/main/java/com/example/app/HomeActivity.kt
package com.example.app
import android.content.Intent
import android.os.Bundle
import android.provider.AlarmClock
import android.provider.AlarmClock.EXTRA_MESSAGE
import android.util.Log
import android.view.Menu
import android.view.MenuItem
import android.view.View
import android.widget.AdapterView
import android.widget.ArrayAdapter
import android.widget.SearchView
import android.widget.Toast
import androidx.appcompat.app.AlertDialog
import androidx.appcompat.app.AppCompatActivity
import androidx.core.view.isVisible
import com.google.firebase.auth.FirebaseAuth
import com.google.firebase.database.*
import kotlinx.android.synthetic.main.activity_evento.*
import kotlinx.android.synthetic.main.activity_home.*
import kotlinx.android.synthetic.main.filtros_custom_view.*
import kotlinx.android.synthetic.main.filtros_custom_view.view.*
import org.intellij.lang.annotations.JdkConstants
import java.time.LocalDate
import java.util.*
import kotlin.collections.ArrayList
class HomeActivity : AppCompatActivity() {
val Auth = FirebaseAuth.getInstance()
val mAuth = FirebaseDatabase.getInstance()
lateinit var gv: VariaveisGlobais
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
gv = application as VariaveisGlobais
setContentView(R.layout.activity_home)
//val lista = ListView4
//val escolherFiltros = filtros
eventos()
/*escolherFiltros.setOnClickListener {
lista.setAdapter(null);
showFiltros()
}*/
}
fun eventos() {
val semEventos = NaoEventos
val lista = ListViewHome
val pesquisa = SearchEvento
val filtro = intent.getStringExtra(EXTRA_MESSAGE)
val values = ArrayList<String>()
val ListaEventosPrivat = mAuth.getReference("Eventos")
val ListaEventosPublic = mAuth.getReference("Eventos")
if (filtro != null) {
val public = object : ChildEventListener {
override fun onChildAdded(dataSnapshot: DataSnapshot, previousChildName: String?) {
val tipo = dataSnapshot.child("Tipo").getValue().toString()
if (tipo == filtro || filtro == "tudo") {
semEventos.isVisible = false
val anoAtual =
Calendar.getInstance().get(Calendar.YEAR)
val mesAtual =
Calendar.getInstance().get(Calendar.MONTH) + 1
val diaAtual =
Calendar.getInstance().get(Calendar.DAY_OF_MONTH)
val ano = dataSnapshot.child("anoFim").getValue().toString().toInt()
val mes = dataSnapshot.child("mesFim").getValue().toString().toInt()
val dia = dataSnapshot.child("diaFim").getValue().toString().toInt()
if (anoAtual < ano) {
val f = dataSnapshot.child("Forma").getValue().toString()
if (f == "publico") {
Log.d(
"home2",
"${
dataSnapshot.child("nome").getValue()
.toString()
},$anoAtual ,$mesAtual,$diaAtual, $ano, $mes, $dia"
)
values.add(dataSnapshot.child("nome").getValue().toString())
}
} else if (anoAtual == ano) {
if (mesAtual < mes) {
val f = dataSnapshot.child("Forma").getValue().toString()
if (f == "publico") {
Log.d(
"home2",
"${
dataSnapshot.child("nome").getValue()
.toString()
},$anoAtual ,$mesAtual,$diaAtual, $ano, $mes, $dia"
)
values.add(dataSnapshot.child("nome").getValue().toString())
}
} else if (mesAtual == mes) {
if (diaAtual <= dia) {
val f = dataSnapshot.child("Forma").getValue().toString()
if (f == "publico") {
Log.d(
"home2",
"${
dataSnapshot.child("nome").getValue()
.toString()
},$anoAtual ,$mesAtual,$diaAtual, $ano, $mes, $dia"
)
values.add(dataSnapshot.child("nome").getValue().toString())
}
}
}
}
// val f = evento.get("Forma")
// if (f == "publico") {
// Log.d(
// "home2",
// "${
// evento.get("nome")
// .toString()
// },$anoAtual ,$mesAtual,$diaAtual, $ano, $mes, $dia"
// )
// values.add(evento.get("nome").toString())
// }
}
Log.d("home5", "$values")
val adapter =
ArrayAdapter(this@HomeActivity, R.layout.listview_item, values)
lista.adapter = adapter
lista.onItemClickListener =
object : AdapterView.OnItemClickListener {
override fun onItemClick(
parent: AdapterView<*>,
view: View,
position: Int,
id: Long
) {
val itemValue =
lista.getItemAtPosition(position) as String
Log.d("home", "grupoID to search: $itemValue")
gv.detalhes = itemValue
// val uid = Auth.currentUser?.uid
val eventoclick2 =
mAuth.getReference("Eventos").child(itemValue)
eventoclick2.addValueEventListener(object : ValueEventListener {
override fun onDataChange(snapshot: DataSnapshot) {
startActivity(
Intent(
view.context,
DetalhesEventoActivity::class.java
)
)
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
})
// Toast.makeText(
// applicationContext,
// "Position :$position\nItem Value : $itemValue",
// Toast.LENGTH_LONG
// ).show()
}
}
// var x = 0
// for (evento in result) {
//
// x += 1
// }
// if (x > 0) {
// semEventos.isVisible = false
// } else {
//
// semEventos.isVisible = true
// Toast.makeText(
// applicationContext,
// "Sem eventos disponiveis", Toast.LENGTH_LONG
// ).show()
// }
}
override fun onChildChanged(snapshot: DataSnapshot, previousChildName: String?) {
startActivity(Intent(this@HomeActivity, HomeActivity::class.java))
}
override fun onChildRemoved(snapshot: DataSnapshot) {
TODO("Not yet implemented")
}
override fun onChildMoved(snapshot: DataSnapshot, previousChildName: String?) {
TODO("Not yet implemented")
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
}
ListaEventosPublic.addChildEventListener(public)
val gruposMemmbros = mAuth.getReference("Grupos")
Log.d(
"home75",
"gr : ${
gruposMemmbros
}"
)
val teste = ArrayList<String>()
val gm = object : ChildEventListener {
override fun onChildAdded(
dataSnapshot: DataSnapshot,
previousChildName: String?
) {
val uid = Auth.currentUser?.uid
val g = dataSnapshot.child("nome").getValue().toString()
Log.d(
"home75",
"g : ${
g
}"
)
teste.add(g)
Log.d(
"home75",
"teste: ${
teste
}"
)
val m = mAuth.getReference("Grupos").child(g)
val t = mAuth.getReference("Grupos").child(g).child("membros")
Log.d(
"home75",
"m : ${
m
}"
)
val f = object : ChildEventListener {
override fun onChildAdded(
dataSnapshot: DataSnapshot,
previousChildName: String?
) {
val j = dataSnapshot.getValue().toString()
Log.d(
"home75",
"j : ${
j
}"
)
val fazParte = ArrayList<String>()
fazParte.add(j)
Log.d(
"home75",
"f : ${
fazParte
}"
)
if (fazParte.contains(uid)) {
m.addValueEventListener(object : ValueEventListener {
override fun onDataChange(snapshot: DataSnapshot) {
val n =
snapshot.child("Numero").getValue().toString().toInt()
val private = object : ChildEventListener {
override fun onChildAdded(
dataSnapshot: DataSnapshot,
previousChildName: String?
) {
val l = dataSnapshot.child("numeroGrupo").getValue()
.toString().toInt()
Log.d(
"home75",
"l :${
l
}"
)
val ano =
dataSnapshot.child("anoFim").getValue()
.toString()
.toInt()
val mes =
dataSnapshot.child("mesFim").getValue()
.toString()
.toInt()
val dia =
dataSnapshot.child("diaFim").getValue()
.toString()
.toInt()
val tipo =
dataSnapshot.child("Tipo").getValue().toString()
val f =
dataSnapshot.child("Forma").getValue()
.toString()
val nome = dataSnapshot.child("nome").getValue()
.toString()
// val fodasse = m.child(n)
if (n == l) {
Log.d(
"home75",
"n :${
n
}"
)
if (tipo == filtro || filtro == "tudo") {
semEventos.isVisible = false
val anoAtual =
Calendar.getInstance()
.get(Calendar.YEAR)
val mesAtual =
Calendar.getInstance()
.get(Calendar.MONTH) + 1
val diaAtual =
Calendar.getInstance()
.get(Calendar.DAY_OF_MONTH)
// PROBLEMA NA VERIFICAÇAO DO DIA
if (anoAtual < ano) {
if (f == "privado") {
values.add(
nome
)
}
} else if (anoAtual == ano) {
if (mesAtual < mes) {
// val f =
// dataSnapshot.child("Forma").getValue()
// .toString()
if (f == "privado") {
values.add(
nome
)
}
} else if (mesAtual == mes) {
if (diaAtual <= dia) {
// val f =
// dataSnapshot.child("Forma").getValue()
// .toString()
if (f == "privado") {
Log.d(
"home75",
"nome :${
nome
}"
)
values.add(
nome
)
Log.d(
"home75",
"v :${
values
}"
)
}
}
}
}
}
}//
Log.d("home75", "$values")
val adapter =
ArrayAdapter(
this@HomeActivity,
R.layout.listview_item,
values
)
lista.adapter = adapter
pesquisa.setOnQueryTextListener(object :
SearchView.OnQueryTextListener {
override fun onQueryTextSubmit(query: String): Boolean {
return false
}
override fun onQueryTextChange(newText: String): Boolean {
adapter.filter.filter(newText)
return false
}
})
lista.onItemClickListener =
object : AdapterView.OnItemClickListener {
override fun onItemClick(
parent: AdapterView<*>,
view: View,
position: Int,
id: Long
) {
val itemValue =
lista.getItemAtPosition(position) as String
Log.d(
"home44",
"grupoID to search: $itemValue"
)
gv.detalhes = itemValue
// val uid = Auth.currentUser?.uid
val eventoClick =
mAuth.getReference("Eventos")
.child(itemValue)
eventoClick.addValueEventListener(object :
ValueEventListener {
override fun onDataChange(snapshot: DataSnapshot) {
startActivity(
Intent(
view.context,
DetalhesEventoActivity::class.java
)
)
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
})
// Toast.makeText(
// applicationContext,
// "Position :$position\nItem Value : $itemValue",
// Toast.LENGTH_LONG
// ).show()
}
}
}
override fun onChildChanged(
snapshot: DataSnapshot,
previousChildName: String?
) {
startActivity(
Intent(
this@HomeActivity,
HomeActivity::class.java
)
)
}
override fun onChildRemoved(snapshot: DataSnapshot) {
TODO("Not yet implemented")
}
override fun onChildMoved(
snapshot: DataSnapshot,
previousChildName: String?
) {
TODO("Not yet implemented")
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
// var x = 0
// for (evento in result) {
//
// x += 1
// }
// if (x > 0) {
// semEventos.isVisible = false
// } else {
//
// semEventos.isVisible = true
// Toast.makeText(
// applicationContext,
// "Sem eventos disponiveis", Toast.LENGTH_LONG
// ).show()
// }
//
// }
}
//
//
ListaEventosPrivat.addChildEventListener(private)
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
})
}//
}
override fun onChildChanged(
snapshot: DataSnapshot,
previousChildName: String?
) {
TODO("Not yet implemented")
}
override fun onChildRemoved(snapshot: DataSnapshot) {
TODO("Not yet implemented")
}
override fun onChildMoved(
snapshot: DataSnapshot,
previousChildName: String?
) {
TODO("Not yet implemented")
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
}
t.addChildEventListener(f)
}
override fun onChildChanged(snapshot: DataSnapshot, previousChildName: String?) {
startActivity(Intent(this@HomeActivity, HomeActivity::class.java))
}
override fun onChildRemoved(snapshot: DataSnapshot) {
TODO("Not yet implemented")
}
override fun onChildMoved(snapshot: DataSnapshot, previousChildName: String?) {
TODO("Not yet implemented")
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
}
gruposMemmbros.addChildEventListener(gm)
// filtros de pesquisa
/* val checkedTiposArray = booleanArrayOf(false, false, false, false, false)
private fun showFiltros() {
// val inflater = layoutInflater
// val inflate_view = inflater.inflate(R.layout.filtros_custom_view, null)
val uid = Auth.currentUser?.uid
val semEventos = NaoEventos
val lista = ListView4
val tipos = arrayOf("Montaria", "Espera", "Tordos", "Rolas", "Dias Caça")
// val diainflate = inflate_view.edDia
// val mesinflate = inflate_view.edMes
// val anoinflate = inflate_view.edAno
//
// val dia = diainflate.text.toString()
// val mes = mesinflate.text.toString()
// val ano = anoinflate.text.toString()
val alertDialog = AlertDialog.Builder(this)
alertDialog.setTitle("Filtros por Tipo")
//alertDialog.setView(inflate_view)
alertDialog.setCancelable(false)
alertDialog.setNegativeButton("Limpar") { dialog, which ->
Toast.makeText(this, "Limpar", Toast.LENGTH_LONG).show()
eventos()
}
alertDialog.setMultiChoiceItems(tipos, checkedTiposArray) { dialog, which, isChecked ->
checkedTiposArray[which] = isChecked
}
alertDialog.setPositiveButton("Done") { dialog, which ->
var gruposMemmbros = mAuth.collection("Grupos")
gruposMemmbros.get().addOnSuccessListener { result ->
if (result != null) {
for (grupo in result) {
var fazParte = grupo.get("membros") as List<String>
if (fazParte.contains(uid)) {
var ListaEventosPrivat = mAuth.collection("Eventos")
ListaEventosPrivat.get().addOnSuccessListener { result ->
if (result != null) {
val values = ArrayList<String>()
for (evento in result) {
semEventos.isVisible = false
for (i in checkedTiposArray.indices) {
val checked = checkedTiposArray[i]
if (checked) {
val x = tipos[i]
if (evento.get("Tipo").toString() == x) {
Log.d("merda", x)
values.add(evento.get("nome").toString())
}
}
}
}
Log.d("home", "$values")
val adapter = ArrayAdapter(this, R.layout.listview_item, values)
lista.adapter = adapter
lista.onItemClickListener =
object : AdapterView.OnItemClickListener {
override fun onItemClick(
parent: AdapterView<*>,
view: View,
position: Int,
id: Long
) {
val itemValue =
lista.getItemAtPosition(position) as String
Log.d("home", "grupoID to search: $itemValue")
gv.detalhes = itemValue
val uid = Auth.currentUser?.uid
var eventoClick =
mAuth.collection("Eventos").document(itemValue)
eventoClick.get().addOnSuccessListener { result ->
if (result != null) {
startActivity(
Intent(
view.context,
DetalhesEventoActivity::class.java
)
)
}
}
// Toast.makeText(
// applicationContext,
// "Position :$position\nItem Value : $itemValue",
// Toast.LENGTH_LONG
// ).show()
}
}
}
var x = 0
for (evento in result) {
x += 1
}
if (x > 0) {
semEventos.isVisible = false
} else {
semEventos.isVisible = true
Toast.makeText(
applicationContext,
"Sem eventos disponiveis", Toast.LENGTH_LONG
).show()
}
}
} else {
var ListaEventosPublic = mAuth.collection("Eventos")
ListaEventosPublic.get().addOnSuccessListener { result ->
if (result != null) {
val values = ArrayList<String>()
for (evento in result) {
semEventos.isVisible = false
val f = evento.get("Forma")
if (f == "publico") {
for (i in checkedTiposArray.indices) {
val checked = checkedTiposArray[i]
if (checked) {
val x = tipos[i]
if (evento.get("Tipo").toString() == x) {
Log.d("merda", x)
values.add(evento.get("nome").toString())
}
}
}
}
}
Log.d("home44", "$values")
val adapter = ArrayAdapter(this, R.layout.listview_item, values)
lista.adapter = adapter
lista.onItemClickListener =
object : AdapterView.OnItemClickListener {
override fun onItemClick(
parent: AdapterView<*>,
view: View,
position: Int,
id: Long
) {
val itemValue =
lista.getItemAtPosition(position) as String
Log.d("home", "grupoID to search: $itemValue")
gv.detalhes = itemValue
val uid = Auth.currentUser?.uid
var eventoclick2 =
mAuth.collection("Eventos").document(itemValue)
eventoclick2.get().addOnSuccessListener { result ->
if (result != null) {
startActivity(
Intent(
view.context,
DetalhesEventoActivity::class.java
)
)
}
}
}
}
}
}
}
}
}
}
}
val dialog = alertDialog.create()
dialog.show()
}*/
}
else{
val marca = 1
val intent = Intent(this, FiltrosActivity::class.java).apply {
putExtra(EXTRA_MESSAGE, marca)
}
startActivity(intent)
}
}
override fun onCreateOptionsMenu(menu: Menu?): Boolean {
val inflater = menuInflater
inflater.inflate(R.menu.menu_direita, menu)
return true
}
override fun onOptionsItemSelected(item: MenuItem?): Boolean {
if (item!!.itemId == R.id.signOut) {
Auth.signOut()
val intent = Intent(this, LoginActivity::class.java)
intent.flags =
Intent.FLAG_ACTIVITY_CLEAR_TASK.or(Intent.FLAG_ACTIVITY_NEW_TASK)
startActivity(intent)
//startActivity(Intent (this, MainActivity :: class.java ))
}
if (item.itemId == R.id.profile) {
startActivity(Intent(this, ProfileActivity::class.java))
}
if (item.itemId == R.id.Lis) {
startActivity(Intent(this, ListaGruposActivity::class.java))
}
if (item.itemId == R.id.grupo) {
startActivity(Intent(this, VerGrupoActivity::class.java))
}
if (item.itemId == R.id.home) {
val marca = 0
val intent = Intent(this, FiltrosActivity::class.java).apply {
putExtra(EXTRA_MESSAGE, marca)
}
startActivity(intent)
}
return super.onOptionsItemSelected(item)
}
}
<file_sep>/app/src/main/java/com/example/app/ListaSociosOrgActivity.kt
package com.example.app
import android.os.Build
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import android.provider.AlarmClock
import android.util.Log
import android.widget.ListView
import androidx.annotation.RequiresApi
import com.google.firebase.auth.FirebaseAuth
import com.google.firebase.database.*
import kotlinx.android.synthetic.main.activity_lista_socios_org.*
class ListaSociosOrgActivity : AppCompatActivity() {
val mAuth = FirebaseDatabase.getInstance()
val auth = FirebaseAuth.getInstance()
lateinit var lista: ListView
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_lista_socios_org)
lista = ListViewSociosInscritos
dados()
}
private fun dados() {
val user = auth.currentUser?.uid
val num = intent.getStringExtra(AlarmClock.EXTRA_MESSAGE)?.toInt()
val listaNumeroSocios = ArrayList<String>()
val t = mAuth.getReference("Users")
if (user != null) {
val mail = mAuth.getReference("Grupos")
val values = ArrayList<Model>()
val j = object : ChildEventListener {
@RequiresApi(Build.VERSION_CODES.N)
override fun onChildAdded(dataSnapshot: DataSnapshot, previousChildName: String?) {
val numeroGrupo = dataSnapshot.child("Numero").value.toString()
val nameGrupo = dataSnapshot.child("nome").value.toString()
if (num == numeroGrupo.toInt()) {
val membros = dataSnapshot.child("membros").value.toString()
Log.d("lista", "t= $membros")
// var separaVirgula = membros.split(',', '=') as ArrayList<String>
//
// Log.d("lista", "separa = ${separaVirgula}")
val separa = membros.split('{', ',', '=') as ArrayList<String>
Log.d("lista", "separa = $separa")
//remover {
separa.removeAt(0)
Log.d("lista", "separa = $separa")
Log.d("lista", "separa = ${separa.size}")
for (i in separa.indices step 2) {
listaNumeroSocios.add(separa[i])
Log.d("lista", "separa = ${separa[i]}")
}
Log.d("lista", "lista = $listaNumeroSocios")
t.addValueEventListener(object : ValueEventListener {
override fun onDataChange(snapshot: DataSnapshot) {
for (i in listaNumeroSocios) {
Log.d("lista", "socio = $i")
val membro = dataSnapshot.child("membros")
.child(i.trim().replace(" ", "")).value.toString()
Log.d("lista", "socio = $membro")
val nome = snapshot.child(membro).child("name").value
.toString()
Log.d("lista", "nome = $nome")
val carta =
snapshot.child(membro).child("Carta Caçadore")
.value.toString()
Log.d("lista", "carta= $carta")
values.add(
Model(
nome, carta.toInt(), i.trim().replace(" ", "").toInt(),
membro
)
)
lista.adapter = MyListAdapter_ListaSocios(
this@ListaSociosOrgActivity,
R.layout.listview_item_pendentes,
values
)
lista.setOnItemClickListener { parent, _, position, _ ->
val elemnt = parent.getItemAtPosition(position) as Model
Log.d(
"lista",
"fff :$elemnt"
)
}
}
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
})
}
}
override fun onChildChanged(snapshot: DataSnapshot, previousChildName: String?) {
TODO("Not yet implemented")
}
override fun onChildRemoved(snapshot: DataSnapshot) {
TODO("Not yet implemented")
}
override fun onChildMoved(snapshot: DataSnapshot, previousChildName: String?) {
TODO("Not yet implemented")
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
}
mail.addChildEventListener(j)
}
}
}<file_sep>/app/src/main/java/com/example/app/FiltrosActivity.kt
package com.example.app
import android.content.Intent
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import android.provider.AlarmClock
import android.provider.AlarmClock.EXTRA_MESSAGE
import android.util.Log
import android.view.Menu
import android.view.MenuItem
import android.view.View
import com.google.firebase.auth.FirebaseAuth
import com.google.firebase.firestore.FirebaseFirestore
import kotlinx.android.synthetic.main.activity_filtros.*
class FiltrosActivity : AppCompatActivity() {
val mAuth = FirebaseFirestore.getInstance()
val Auth = FirebaseAuth.getInstance()
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_filtros)
val maior = bMaior
val menor = bMenor
val tudo = bTudo
val esperas = bEsperas
val montaria = bMontarias
val tordos = bTordos
val rolas = bRolas
val dias = bDias
val marca = intent.getIntExtra(EXTRA_MESSAGE, -1)
if (marca == 1) {
//startActivity(Intent(this@FiltrosActivity, FiltrosActivity::class.java))
val marca = 0
val intent = Intent(this, FiltrosActivity::class.java).apply {
putExtra(EXTRA_MESSAGE, marca)
}
startActivity(intent)
}
rolas.setVisibility(View.INVISIBLE)
tordos.setVisibility(View.INVISIBLE)
montaria.setVisibility(View.INVISIBLE)
esperas.setVisibility(View.INVISIBLE)
dias.setVisibility(View.INVISIBLE)
if (marca == 0) {
maior.setOnClickListener(View.OnClickListener {
rolas.setVisibility(View.INVISIBLE)
dias.setVisibility(View.INVISIBLE)
tordos.setVisibility(View.INVISIBLE)
montaria.setVisibility(View.VISIBLE)
esperas.setVisibility(View.VISIBLE)
esperas.setOnClickListener{ view ->
val filtro = "esperas"
val intent = Intent(this, HomeActivity::class.java).apply {
putExtra(EXTRA_MESSAGE, filtro)
}
startActivity(intent)
Log.d(
"filtro",
"g : ${
filtro
}"
)
}
montaria.setOnClickListener {
val filtro = "montaria"
val intent = Intent(this, HomeActivity::class.java).apply {
putExtra(EXTRA_MESSAGE, filtro)
}
startActivity(intent)
Log.d(
"filtro",
"g : ${
filtro
}"
)
}
})
menor.setOnClickListener(View.OnClickListener { view ->
val marca = intent.getIntExtra(EXTRA_MESSAGE, 0)
if (marca == 1) {
startActivity(Intent(view.context, FiltrosActivity::class.java))
}
rolas.setVisibility(View.VISIBLE)
tordos.setVisibility(View.VISIBLE)
dias.setVisibility(View.VISIBLE)
montaria.setVisibility(View.INVISIBLE)
esperas.setVisibility(View.INVISIBLE)
tordos.setOnClickListener(View.OnClickListener { view ->
val filtro = "tordos"
val intent = Intent(this, HomeActivity::class.java).apply {
putExtra(EXTRA_MESSAGE, filtro)
}
startActivity(intent)
})
rolas.setOnClickListener(View.OnClickListener { view ->
val filtro = "rolas"
val intent = Intent(this, HomeActivity::class.java).apply {
putExtra(EXTRA_MESSAGE, filtro)
}
startActivity(intent)
})
dias.setOnClickListener(View.OnClickListener { view ->
val filtro = "dias"
val intent = Intent(this, HomeActivity::class.java).apply {
putExtra(EXTRA_MESSAGE, filtro)
}
startActivity(intent)
})
})
tudo.setOnClickListener(View.OnClickListener { view ->
val marca = intent.getIntExtra(EXTRA_MESSAGE, 0)
if (marca == 1) {
startActivity(Intent(view.context, FiltrosActivity::class.java))
}
rolas.setVisibility(View.INVISIBLE)
tordos.setVisibility(View.INVISIBLE)
dias.setVisibility(View.INVISIBLE)
montaria.setVisibility(View.INVISIBLE)
esperas.setVisibility(View.INVISIBLE)
val filtro = "tudo"
val intent = Intent(this, HomeActivity::class.java).apply {
putExtra(EXTRA_MESSAGE, filtro)
}
startActivity(intent)
})
}
}
override fun onCreateOptionsMenu(menu: Menu?): Boolean {
val inflater = menuInflater
inflater.inflate(R.menu.menu_direita, menu)
return true
}
override fun onOptionsItemSelected(item: MenuItem?): Boolean {
if (item!!.itemId == R.id.signOut) {
Auth.signOut()
val intent = Intent(this, LoginActivity::class.java)
intent.flags =
Intent.FLAG_ACTIVITY_CLEAR_TASK.or(Intent.FLAG_ACTIVITY_NEW_TASK)
startActivity(intent)
//startActivity(Intent (this, MainActivity :: class.java ))
}
if (item.itemId == R.id.profile) {
startActivity(Intent(this, ProfileActivity::class.java))
}
if (item.itemId == R.id.Lis) {
startActivity(Intent(this, ListaGruposActivity::class.java))
}
if (item.itemId == R.id.grupo) {
startActivity(Intent(this, VerGrupoActivity::class.java))
}
if (item.itemId == R.id.home) {
val marca = 0
val intent = Intent(this, FiltrosActivity::class.java).apply {
putExtra(EXTRA_MESSAGE, marca)
}
startActivity(intent)
}
return super.onOptionsItemSelected(item)
}
}<file_sep>/app/src/main/java/com/example/app/AdmissaoActivity.kt
package com.example.app
import android.app.AlertDialog
import android.content.Intent
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import android.provider.AlarmClock.EXTRA_MESSAGE
import android.util.Log
import android.view.Menu
import android.view.MenuItem
import android.widget.ListView
import android.widget.Toast
import com.google.firebase.auth.FirebaseAuth
import com.google.firebase.database.*
import kotlinx.android.synthetic.main.activity_admissao.*
import kotlinx.android.synthetic.main.adesao_custom_view.view.*
import java.util.HashMap
class AdmissaoActivity : AppCompatActivity() {
val mAuth = FirebaseDatabase.getInstance()
val auth = FirebaseAuth.getInstance()
lateinit var lista: ListView
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_admissao)
lista = ListViewPendentes
dados()
}
private fun dados() {
val user = auth.currentUser?.uid
val num = intent.getStringExtra(EXTRA_MESSAGE)?.toInt()
var n: String
var c: Int
var s: Int
var uid: String
var socio = ""
if (user != null) {
val mail = mAuth.getReference("Grupos")
val values = ArrayList<Model>()
val valor = ArrayList<String>()
val j = object : ChildEventListener {
override fun onChildAdded(dataSnapshot: DataSnapshot, previousChildName: String?) {
//val g = dataSnapshot.child("nome").getValue().toString()
val admin = dataSnapshot.child("admin").value.toString()
val numeroGrupo = dataSnapshot.child("Numero").value.toString()
val nameGrupo = dataSnapshot.child("nome").value.toString()
Log.d("adesa", "numero= $numeroGrupo")
Log.d(
"VerGrupo2",
"$user"
)
// val m = mAuth.getReference("Grupos").child(g)
// Log.d(
// "VerGrupo2",
// " ${m}"
// )
val t = mAuth.getReference("Users")
if (num == numeroGrupo.toInt()) {
if (admin == user) {
t.addValueEventListener(object : ValueEventListener {
override fun onDataChange(snapshot: DataSnapshot) {
val teste = snapshot.children
//valor.add(teste.toString())
Log.d("adesa", "teste= $teste")
//Log.d("adesa", "valor= $valor")
for (i in teste) {
Log.d("adesa", "t= ${i.key}")
val existe =
dataSnapshot.child("Pendentes")
.value.toString()
if (existe.contains(i.key.toString())) {
val nome =
snapshot.child("${i.key}").child("name")
.value.toString()
Log.d("adesa", "nome= $nome")
val carta =
snapshot.child("${i.key}").child("Carta Caçadore")
.value.toString()
Log.d("adesa", "carta= $carta")
socio =
dataSnapshot.child("Pendentes").child("${i.key}")
.child("numero socio").value
.toString()
Log.d("adesa", "g= $socio")
values.add(
Model(
nome, carta.toInt(), socio.toInt(),
i.key.toString()
)
)
uid = i.key.toString()
}
lista.adapter = MyListAdapter(
this@AdmissaoActivity,
R.layout.listview_item_pendentes,
values
)
lista.setOnItemClickListener { parent, _, position, _ ->
// lista.onItemClickListener =
// object : AdapterView.OnItemClickListener {
// override fun onItemClick(
// parent: AdapterView<*>?,
// view: View?,
// position: Int,
// id: Long
// ) {
val elemnt = parent.getItemAtPosition(position) as Model
val itemValue = lista.getItemIdAtPosition(position)
Log.d(
"adesa",
"ffff :$elemnt"
)
mAuth.getReference("Users").child(elemnt.toString())
.addValueEventListener(object :
ValueEventListener {
override fun onDataChange(snapshot: DataSnapshot) {
val name =
snapshot.child("name")
.value.toString()
val cartacc =
snapshot.child("Cart<NAME>")
.value.toString()
val numSocio = snapshot.child("Grupos")
.child(
num.toString()
).child("Socio")
.value.toString()
val refUser = snapshot.child("uid")
.value.toString()
n = name
c = cartacc.toInt()
s = socio.toInt()
uid = refUser
Log.d(
"adesa",
"ffff :${nameGrupo}"
)
open(n, c, s, nameGrupo, uid, numeroGrupo)
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
})
// }
//}
}
}
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
})
}
}
}
override fun onChildChanged(snapshot: DataSnapshot, previousChildName: String?) {
startActivity(
Intent(
this@AdmissaoActivity,
CriarOrgEventoActivity::class.java
).apply {
putExtra(
EXTRA_MESSAGE,
num.toString()
)
})
}
override fun onChildRemoved(snapshot: DataSnapshot) {
TODO("Not yet implemented")
}
override fun onChildMoved(snapshot: DataSnapshot, previousChildName: String?) {
TODO("Not yet implemented")
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
}
mail.addChildEventListener(j)
}
}
fun open(
name: String,
numCC: Int,
numSoc: Int,
nomeGrupo: String,
uid: String,
numeroGrupo: String
) {
val inflater = layoutInflater
val inflateview = inflater.inflate(R.layout.adesao_custom_view, null)
val texto = inflateview.textViewShow
var num: Int
val valu = ArrayList<String>()
// texto.text =
// "Nome: " + nome + "\n" + "Nº Carta Caçador: " + numeroCC + "\n" + "Nº Socio: " + numeroSocio
texto.text = getString(R.string.adesao_custom_textViewShow, name, numCC, numSoc)
val alertDialog = AlertDialog.Builder(this)
alertDialog.setTitle("Socio Pendente")
alertDialog.setView(inflateview)
alertDialog.setCancelable(false)
alertDialog.setNegativeButton("Rejeitar") { _, _ ->
Toast.makeText(this, "Rejeitado", Toast.LENGTH_LONG).show()
}
alertDialog.setPositiveButton("Aceitar") { _, _ ->
mAuth.getReference("Grupos").child(nomeGrupo).addListenerForSingleValueEvent(
object : ValueEventListener {
override fun onDataChange(dataSnapshot: DataSnapshot) {
val a = dataSnapshot.child("membros").value.toString()
valu.add(a)
val b = dataSnapshot.child("membros").childrenCount
// var c = a.split('=') as ArrayList<String>
//
// Log.d(
// "adesa",
// "DocumentSnapshot data: ${a.split('=')}"
// )
//
// Log.d(
// "adesa",
// "DocumentSnapshot data: ${c}"
// )
//
//
// Log.d(
// "adesa",
// "DocumentSnapshot data: ${c[0]}"
// )
// Log.d(
// "adesa",
// "DocumentSnapshot data: ${c[2]}"
// )
Log.d(
"adesa",
"DocumentSnapshot data: $b"
)
num = valu.size
num += 1
Log.d(
"adesa",
"DocumentSnapshot data: $num"
)
Log.d(
"adesa",
"DocumentSnapshot data: $numSoc"
)
val update = HashMap<String, Any>()
update["$numSoc"] = uid
val updateUser = HashMap<String, Any>()
updateUser[numeroGrupo] = numSoc
//adiciona ao grupo nos membros se ele for aceite
mAuth.getReference("Grupos").child(nomeGrupo).child("membros")
.updateChildren(update)
//adiciona no utilizador o a secçao dos grupos o seu numero de socio se ele for aceite
mAuth.getReference("Users").child(uid).child("Grupos")
.updateChildren(updateUser)
//remove o utilizador da lista de pendetes
mAuth.getReference("Grupos").child(nomeGrupo).child("Pendentes")
.child(uid)
.removeValue()
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
})
Toast.makeText(this, "Aceitou", Toast.LENGTH_LONG).show()
}
val dialog = alertDialog.create()
dialog.show()
}
override fun onCreateOptionsMenu(menu: Menu?): Boolean {
val inflater = menuInflater
inflater.inflate(R.menu.menu_direita_org, menu)
return true
}
override fun onOptionsItemSelected(item: MenuItem): Boolean {
if (item.itemId == R.id.signOut2) {
auth.signOut()
val intent = Intent(this, LoginActivity::class.java)
intent.flags = Intent.FLAG_ACTIVITY_CLEAR_TASK.or(Intent.FLAG_ACTIVITY_NEW_TASK)
startActivity(intent)
}
if (item.itemId == R.id.grupo2) {
startActivity(Intent(this, OrgActivity::class.java))
}
// if (item.itemId == R.id.pendente) {
//
// startActivity(Intent(this, AdmissaoActivity::class.java))
// }
return super.onOptionsItemSelected(item)
}
}<file_sep>/app/src/main/java/com/example/app/PreferenciasActivity.kt
package com.example.app
import android.content.Intent
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import android.provider.AlarmClock
import android.provider.AlarmClock.EXTRA_MESSAGE
import android.util.Log
import android.view.Menu
import android.view.MenuItem
import android.view.View
import android.widget.AdapterView
import android.widget.ArrayAdapter
import android.widget.ListView
import com.google.firebase.auth.FirebaseAuth
import com.google.firebase.database.*
import com.google.firebase.firestore.FirebaseFirestore
import kotlinx.android.synthetic.main.activity_preferencias.*
class PreferenciasActivity : AppCompatActivity() {
val Auth = FirebaseAuth.getInstance()
val mAuth = FirebaseDatabase.getInstance()
lateinit var gv: VariaveisGlobais
lateinit var listView: ListView
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_preferencias)
val pular = bPular
val change = Auth.currentUser?.uid.toString()
pular.setOnClickListener {
val marca = 1
val intent = Intent(this, FiltrosActivity::class.java) .apply {
putExtra(EXTRA_MESSAGE, marca)
}
val first = mAuth.getReference("Users").child(change)
first.addValueEventListener(object : ValueEventListener {
override fun onDataChange(snapshot: DataSnapshot) {
val pessoa = HashMap<String, Any>()
pessoa["FirstTime"] = false
mAuth.getReference("Users").child(change).updateChildren(pessoa)
intent.flags =
Intent.FLAG_ACTIVITY_CLEAR_TASK.or(Intent.FLAG_ACTIVITY_NEW_TASK)
startActivity(intent)
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
})
}
listView = findViewById(R.id.listViewPre)
var d = mAuth.getReference("Grupos")
var list = ArrayList<String>()
val c = object : ChildEventListener {
override fun onChildAdded(dataSnapshot: DataSnapshot, previousChildName: String?) {
val grupo = dataSnapshot.getValue()
val g = dataSnapshot.child("nome").getValue().toString()
list.add(
"${g}"
)
Log.d(
"Preferencias",
" pref $grupo"
)
Log.d(
"Preferencias",
" pref $g"
)
Log.d(
"Preferencias",
" pref $list"
)
val adapter3 = ArrayAdapter(this@PreferenciasActivity, R.layout.listview_item, list)
listView.adapter = adapter3
listView.onItemClickListener =
object : AdapterView.OnItemClickListener {
override fun onItemClick(
parent: AdapterView<*>,
view: View,
position: Int,
id: Long
) {
val itemValue = listView.getItemAtPosition(position)
val message = itemValue as String
Log.d("Preferencias", "mensagem: $message" + "item: $itemValue ")
var b = mAuth.getReference("Grupos").child(itemValue.toString())
//var b = mAuth.collection("Grupos").document(itemValue.toString())
b.addValueEventListener(object : ValueEventListener {
override fun onDataChange(snapshot: DataSnapshot) {
startActivity(
Intent(view.context, AdesaoActivity::class.java).apply {
putExtra(EXTRA_MESSAGE, message)
}
)
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
})
}
}
}
override fun onChildChanged(snapshot: DataSnapshot, previousChildName: String?) {
startActivity(Intent(this@PreferenciasActivity, PreferenciasActivity::class.java))
}
override fun onChildRemoved(snapshot: DataSnapshot) {
TODO("Not yet implemented")
}
override fun onChildMoved(snapshot: DataSnapshot, previousChildName: String?) {
TODO("Not yet implemented")
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
}
d.addChildEventListener(c)
}
override fun onCreateOptionsMenu(menu: Menu?): Boolean {
val inflater = menuInflater
inflater.inflate(R.menu.menu_direita, menu)
return true
}
override fun onOptionsItemSelected(item: MenuItem?): Boolean {
if (item!!.itemId == R.id.signOut) {
Auth.signOut()
val intent = Intent(this, LoginActivity::class.java)
intent.flags =
Intent.FLAG_ACTIVITY_CLEAR_TASK.or(Intent.FLAG_ACTIVITY_NEW_TASK)
startActivity(intent)
//startActivity(Intent (this, MainActivity :: class.java ))
}
if (item.itemId == R.id.profile) {
startActivity(Intent(this, ProfileActivity::class.java))
}
if (item.itemId == R.id.Lis) {
startActivity(Intent(this, ListaGruposActivity::class.java))
}
if (item.itemId == R.id.grupo) {
startActivity(Intent(this, VerGrupoActivity::class.java))
}
if (item.itemId == R.id.home) {
val intent = Intent(this, FiltrosActivity::class.java)
intent.flags =
Intent.FLAG_ACTIVITY_CLEAR_TASK.or(Intent.FLAG_ACTIVITY_NEW_TASK)
startActivity(intent)
}
return super.onOptionsItemSelected(item)
}
}<file_sep>/app/src/main/java/com/example/app/AdesaoActivity.kt
package com.example.app
import android.content.Intent
import android.os.Bundle
import android.provider.AlarmClock.EXTRA_MESSAGE
import android.util.Log
import android.view.Menu
import android.view.MenuItem
import androidx.appcompat.app.AppCompatActivity
import com.google.firebase.auth.FirebaseAuth
import com.google.firebase.database.DataSnapshot
import com.google.firebase.database.DatabaseError
import com.google.firebase.database.FirebaseDatabase
import com.google.firebase.database.ValueEventListener
import com.google.firebase.firestore.FieldValue
import com.google.firebase.firestore.FirebaseFirestore
import kotlinx.android.synthetic.main.activity_adesao.*
class AdesaoActivity : AppCompatActivity() {
val Auth = FirebaseAuth.getInstance()
val mAuth = FirebaseDatabase.getInstance()
lateinit var gv: VariaveisGlobais
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
gv = application as VariaveisGlobais
setContentView(R.layout.activity_adesao)
val texto = tInfo
val botao = bEntrar
val user = Auth.currentUser
val message = intent.getStringExtra(EXTRA_MESSAGE)
if (user != null) {
val mail = mAuth.getReference("Grupos").child(message)
mail.addListenerForSingleValueEvent(object : ValueEventListener {
override fun onDataChange(dataSnapshot: DataSnapshot) {
val name = dataSnapshot.child("nome").getValue().toString()
val numero = dataSnapshot.child("Numero").getValue().toString()
texto.text = "nome: $name\nnumero de associativa:$numero"
Log.d(
"adesao", "DocumentSnapshot data: ${name} }"
)
}
override fun onCancelled(error: DatabaseError) {
Log.d("adesao", "No such document")
}
})
}
botao.setOnClickListener {
showAlert()
}
}
private fun showAlert() {
val cod = grupoCodigo
val codigo = cod.text.toString()
val message = intent.getStringExtra(EXTRA_MESSAGE)
val c = mAuth.getReference("Grupos").child(message)
c.addListenerForSingleValueEvent(object : ValueEventListener {
override fun onDataChange(dataSnapshot: DataSnapshot) {
val use = Auth.currentUser
val socio: MutableMap<String, Any> = HashMap()
socio["numero socio"] = codigo
c.child("Pendentes")
.child(use!!.uid).setValue(socio)
val marca = 0
val intent = Intent(this@AdesaoActivity, FiltrosActivity::class.java).apply {
putExtra(EXTRA_MESSAGE, marca)
}
startActivity(intent)
Log.d("adesao", "DocumentSnapshot data: ${message} ")
}
override fun onCancelled(error: DatabaseError) {
TODO("Not yet implemented")
}
})
}
override fun onCreateOptionsMenu(menu: Menu?): Boolean {
val inflater = menuInflater
inflater.inflate(R.menu.menu_direita, menu)
return true
}
override fun onOptionsItemSelected(item: MenuItem?): Boolean {
if (item!!.itemId == R.id.signOut) {
Auth.signOut()
val intent = Intent(this, LoginActivity::class.java)
intent.flags = Intent.FLAG_ACTIVITY_CLEAR_TASK.or(Intent.FLAG_ACTIVITY_NEW_TASK)
startActivity(intent)
}
if (item.itemId == R.id.profile) {
startActivity(Intent(this, ProfileActivity::class.java))
}
if (item.itemId == R.id.grupo) {
startActivity(Intent(this, VerGrupoActivity::class.java))
}
if (item.itemId == R.id.Lis) {
startActivity(Intent(this, ListaGruposActivity::class.java))
}
if (item.itemId == R.id.home) {
val marca = 0
val intent = Intent(this, FiltrosActivity::class.java).apply {
putExtra(EXTRA_MESSAGE, marca)
}
startActivity(intent)
}
return super.onOptionsItemSelected(item)
}
}
| 0a00a56dfada75c995b0ed4122705e2e66041299 | [
"Kotlin",
"Gradle"
] | 22 | Kotlin | andreneves7/AppHuntig | a5edec75fd35c2904056567d72cf418d2c8ae2d7 | 1e7cdb2213f6a0fa70b0eb0537eec361e3541f68 |
refs/heads/master | <repo_name>matsko/be-fe-docker-compose<file_sep>/README.md
BE / FE Docker Project
===
# Installation
1. Install docker and docker-hub
1. Install `docker-compose`
1. Clone the repo and run `make dev`
# Usage
```bash
# run the project
make dev
# rebuild the project (whenever `docker-compose.yaml` is updated)
make dev-rebuild
# restart all processes
make dev-restart
```
## Docker Commands
```bash
# list docker processes
docker ps
# connect into a docker client
docker exec -it DOCKER_PID /bin/sh
# kill a docker comand
docker kill DOCKER_PID
# restart a docker service
docker-compose restart SERVICE
```
<file_sep>/api/src/main.ts
import express from 'express';
import {buildSchema} from 'graphql';
import {graphqlHTTP} from 'express-graphql';
const app = express();
const port = 3000;
app.get('/', (req, res) => {
res.send('Hello World!')
})
// Construct a schema, using GraphQL schema language
var schema = buildSchema(`
type Query {
hello: String
}
`);
// The root provides a resolver function for each API endpoint
var root = {
hello: () => {
return 'Hello world!';
},
};
app.use('/graphql', graphqlHTTP({
schema: schema,
rootValue: root,
graphiql: true,
}));
app.listen(port, () => {
console.log(`Example app listening at http://localhost:${port}`)
});<file_sep>/docker-compose.yaml
version: '3'
services:
nginx:
build:
dockerfile: Dockerfile.dev
context: ./nginx
hostname: '0.0.0.0'
restart: always
ports:
- '3000:80'
depends_on:
- client
- api
api:
build:
dockerfile: Dockerfile.dev
context: ./api
hostname: '0.0.0.0'
restart: always
ports:
- '5555:3000'
volumes:
- ./api/node_modules:/app/node_modules
- ./api:/app
command:
yarn dev
client:
build:
dockerfile: Dockerfile.dev
context: ./client
hostname: '0.0.0.0'
restart: always
stdin_open: true
volumes:
- ./client/node_modules:/app/node_modules
- ./client:/app
ports:
- '7777:3000'
command: yarn dev<file_sep>/Makefile
dev:
docker-compose up
dev-rebuild:
docker-compose up --build
dev-restart:
docker-compose restart<file_sep>/client/pages/_app.js
import '../styles/globals.css'
import { Provider, Client, defaultExchanges } from 'urql'
const client = new Client({
url: '/api/graphql',
exchanges: defaultExchanges
})
function MyApp({ Component, pageProps }) {
return <Provider value={client}>
<Component {...pageProps} />
</Provider>
}
export default MyApp
| 361d58d1bba603763413467d438324110bf05e12 | [
"YAML",
"Markdown",
"JavaScript",
"Makefile",
"TypeScript"
] | 5 | Markdown | matsko/be-fe-docker-compose | cb892907373075b0cb887ec099be823ed25b8fe1 | 8e609c12ccccd3134f3f420eb808454c5469619a |
refs/heads/master | <file_sep># Talking-Clock
PHP Solution with some html and bootstrap 4
<file_sep><?php
/**
* @method validateTime validates time using regex
* @param $time is string
* @param return returns boolean
*/
function validateTime($time) {
$validation = false;
if(preg_match('/^[0][0-9]|[1][0-9]|[2][0-3]:[0-5][0-9]*$/', $time)) {
$validation = true;
}
return $validation;
}
function checkDayTime ($hour) {
$dayTime = "am";
if ($hour > 11) {
$dayTime = "pm";
}
return $dayTime;
}
/**
* method converts 24hr to 12hr hourstring
* @param $hour int
* @param return returns string
*/
function hourString($hour) {
$hourToString = null;
$hourToTwelve = array();
if($hour < 1) {
$hourToTwelve = explode('0', $hour);
$hour = $hourToTwelve[1];
}
switch ($hour) {
case 0:
case 12:
$hourToString = "twelve";
break;
case 1:
case 13:
$hourToString = "one";
break;
case 2:
case 14:
$hourToString = "two";
break;
case 3:
case 15:
$hourToString = "three";
break;
case 4:
case 16:
$hourToString = "four";
break;
case 5:
case 17:
$hourToString = "five";
break;
case 6:
case 18:
$hourToString = "six";
break;
case 7:
case 19:
$hourToString = "seven";
break;
case 8:
case 20:
$hourToString = "eight";
break;
case 9:
case 21:
$hourToString = "nine";
break;
case 10:
case 22:
$hourToString = "ten";
break;
case 11:
case 23:
$hourToString = "eleven";
break;
default: "Hour not found";
}
return $hourToString;
}
function minuteString($minute) {
$minuteToWord = "";
$ones = array(
0 => "",
1 => "one",
2 => "two",
3 => "three",
4 => "four",
5 => "five",
6 => "six",
7 => "seven",
8 => "eight",
9 => "nine",
10 => "ten",
11 => "eleven",
12 => "twelve",
13 => "thirteen",
14 => "fourteen",
15 => "fifteen",
16 => "sixteen",
17 => "seventeen",
18 => "eighteen",
19 => "nineteen"
);
$tens = array(
0 => "",
1 => "ten",
2 => "twenty",
3 => "thirty",
4 => "forty",
5 => "fifty",
);
if($minute > 0 && $minute < 20 && $minute != 10 && $minute != 20 && $minute != 30 && $minute != 40 && $minute != 50) {
if($minute < 10) {
$minuteToWord = "oh ".$ones[$minute[1]];
}
else{
$minuteToWord = $ones[$minute];
}
}
else if($minute < 20 && $minute < 9 && $minute!= 00) {
$minuteToWord = $ones[$minute];
}
else{
if($minute[1]!=0){
$minuteToWord = $tens[$minute[0]]. " ".$ones[$minute[1]];
}
else{
$minuteToWord = $tens[$minute[0]];
}
}
return $minuteToWord;
}
//Time in 24 hour formart
$time = null;
$error = null;
$talkingClock = null;
if (isset($_POST['button'])){
$time = trim($_POST['time']);
$validation = validateTime($time);
$error = "";
if($validation != true) {
$error = "Error! The time ".$time ." is invalid please enter time in 24hr formart.";
}
else{
//split time into hours and minutes
$timeArray = explode(':', $time);
$hour = $timeArray[0];
$minute = $timeArray[1];
$dayTime = checkDayTime($hour);
$hourWord = hourString($hour);
$minuteWord = minuteString($minute);
$talkingClock = "It's ".$hourWord ." ".$minuteWord." ".$dayTime;
}
}
<file_sep><?php include 'clock.php';
?>
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.1.3/css/bootstrap.min.css">
<title>Talking clock</title>
</head>
<body>
<div class="container">
<h1>Talking Clock</h1>
<h3>Enter Time in 24hr Format 00:00</h3>
<h5 class="text-danger"><?php echo $error;?></h5>
<h5 class="text-success"><?php echo $talkingClock;?></h5>
<form class="form-inline" method="post">
<div class="form-group mx-sm-3 mb-2">
<label for="time" class="sr-only">Time</label>
<input type="text" name="time" class="form-control" id="time" placeholder="00:00">
</div>
<button type="submit" name="button" class="btn btn-primary mb-2">Convert Time</button><br>
<div class="col-auto">
<button type="submit" class="btn btn-default mb-2">Reset</button>
</div>
</form>
</div>
</body>
</html>
| 334a19429daf3122fce86302c590c01662c28de8 | [
"Markdown",
"PHP"
] | 3 | Markdown | tinasheMh/Talking-Clock | a0d014999369a4f883c17cae45a53b8e0f669cbd | 0d78aeff867aa356a2e4e924536306b38a5fda70 |
refs/heads/master | <file_sep>describe("Captcha", function() {
it("should show 1 + ONE correctly", function() {
//Arrange
var captcha = new Captcha(1,1,1,1);
//Act
var result = captcha.show();
//Assert
expect(result).toEqual("1 + ONE");
});
it("should show 2 + ONE correctly", function() {
//Arrange
var captcha = new Captcha(1,2,1,1);
//Act
var result = captcha.show();
//Assert
expect(result).toEqual("2 + ONE");
});
it("should show 3 + ONE correctly", function() {
//Arrange
var captcha = new Captcha(1,3,1,1);
//Act
var result = captcha.show();
//Assert
expect(result).toEqual("3 + ONE");
});
it("should show 1 - ONE correctly", function() {
//Arrange
var captcha = new Captcha(1,1,2,1);
//Act
var result = captcha.show();
//Assert
expect(result).toEqual("1 - ONE");
});
it("should show 1 * ONE correctly", function() {
//Arrange
var captcha = new Captcha(1,1,3,1);
//Act
var result = captcha.show();
//Assert
expect(result).toEqual("1 * ONE");
});
it("should show 1 / ONE correctly", function() {
//Arrange
var captcha = new Captcha(1,1,4,1);
//Act
var result = captcha.show();
//Assert
expect(result).toEqual("1 / ONE");
});
}); | 5a9b93fe29b2bbb96573f5e2963f6aa8227e869c | [
"JavaScript"
] | 1 | JavaScript | ladarat/CaptchaJS | b6f676e79396b853e706addf0bb42b1ddea80b96 | acd3f020396004046f4491c2e77187e5e2e8c5c5 |
refs/heads/master | <repo_name>the-inhuman-account/PinPointPrevent<file_sep>/requirements.txt
dlib==19.13.0
nose==1.3.7
numpy==1.14.3
Pillow==5.1.0
<file_sep>/full_demo.py
import sys
from demo import main as demo_main
from main import main as main_main
demo_main(sys.argv[1])
main_main()
<file_sep>/mass_prepare_data.py
# mass_prepare_data.py
# This python program takes an input directory and in output directory (in that order) from the command line, and crops out faces in subdirectories.
import os
from prepare_data import main
for d in os.listdir(sys.argv[1]):
if not os.path.exists(os.path.join('training',d)):
os.makedir(os.path.join('training',d))
main(os.path.join(sys.argv[1],d),d)
<file_sep>/demo.py
import cv2
import os
import time
cas = cv2.CascadeClassifier('face_cascade.xml')
def main(name=None):
cap = cv2.VideoCapture(0)
dt = time.time()
if name is None:
name = raw_input('What is your name? ')
if not os.path.exists(os.path.join('training',name)):
os.mkdir(os.path.join('training',name))
# print(name)
num_faces = 0
while True:
if round(time.time() - dt) >= 30:
break
_,im = cap.read()
faces = cas.detectMultiScale(im,scaleFactor=1.2,minNeighbors=5)
if len(faces) > 0:
(x,y,w,h) = faces[0]
num_faces += 1
cv2.imwrite(os.path.join('.','training',name,str(num_faces)+'.jpg'),im[y:y+h,x:x+w])
cv2.rectangle(im,(x,y),(x+w,y+h),(0,255,0),3)
cv2.putText(im,'Writing image training/{}/{}'.format(name,num_faces),(10,60),cv2.FONT_HERSHEY_SIMPLEX,1,(0,255,0),2,cv2.LINE_AA)
cv2.imshow('Demo',im)
k = cv2.waitKey(30)
if k == 27:
break
cap.release()
cv2.destroyAllWindows()
if __name__ == '__main__':
main()
<file_sep>/README.md
# PinPointPrevent
### To Run the program, you will first need.
- Python 3.x (We used python 3.6.5 for this project)
- OpenCV2 (We used version 3.4.1)
- Dlib (We used version 19.13.0)
- Numpy (We used version 1.14.3)
### For the graphical version (recognize a face that is shown to the camera):
```bash
python3 main.py [training-data-directory] [number-of-training-images]
```
"training-data-directory" should be a directory inside the "training" directory
### For the command-line version (this version will output a confidence):
```bash
python3 main.py [training-data-directory] [number-of-training-images] -i [path-to-test-data]
```
"training-data-directory" should be a directory inside the "training" directory
"path-to-test-data" should be a path to an image file (png,jpg,etc.)
<file_sep>/main_python3.py
import cv2 # OpenCV 2 version 3.4.1
import dlib # dlib is a c++ library which we are using to detect the front of a face (with dlib.get_frontal_face_detector())
import numpy as np # numpy is a library which opencv uses for it's images, so it is useful for conversion
import math # used for sine, cosine, and pi
import sys # used for sys.argv
import os # used to fiddle with the filesystem
pred = dlib.shape_predictor('shape_predictor_68_face_landmarks.dat')
detect = dlib.get_frontal_face_detector()
rec = cv2.face.LBPHFaceRecognizer_create(threshold=95)
cas = cv2.CascadeClassifier('face_cascade.xml')
def det(gray_image,save):
store = []
for (x,y,w,h) in cas.detectMultiScale(gray_image,1.3,6):
save += [(x,y,w,h)]
f = cv2.resize(gray_image[y:y+h,x:x+w],(100,100))
store += [f]
return store
# Borrowed from http://pranavdheer.co/face-recognition-a-step-by-step-guide/
def align(save):
output = []
flag = 0 #check if we entered loop
for ix in range(0,len(save)):
flag=0
detections = detect(save[ix],2)
for k,d in enumerate(detections):
shape = pred(save[ix], d) #68 facial points
p1 = [(shape.part(45).x,shape.part(45).y),(shape.part(36).x,shape.part(36).y)]
p2 = [((int(0.7*100),33)),(int(0.3*100),33)]
s60 = math.sin(60*math.pi/180);
c60 = math.cos(60*math.pi/180);
inPts = np.copy(p1).tolist();
outPts = np.copy(p2).tolist();
xin = c60*(inPts[0][0] - inPts[1][0]) - s60*(inPts[0][1] - inPts[1][1]) + inPts[1][0];
yin = s60*(inPts[0][0] - inPts[1][0]) + c60*(inPts[0][1] - inPts[1][1]) + inPts[1][1];
inPts.append([np.int(xin), np.int(yin)]);
xout = c60*(outPts[0][0] - outPts[1][0]) - s60*(outPts[0][1] - outPts[1][1]) + outPts[1][0];
yout = s60*(outPts[0][0] - outPts[1][0]) + c60*(outPts[0][1] - outPts[1][1]) + outPts[1][1];
outPts.append([np.int(xout), np.int(yout)]);
tform = cv2.estimateRigidTransform(np.array([inPts]), np.array([outPts]), False);
img2 = cv2.warpAffine(save[ix], tform, (100,100));
detections = detect(img2,3)
for k,d in enumerate(detections):
flag = 1
face = [[abs(d.left()),abs(d.right())],[abs(d.top()),abs(d.bottom())]]
shape = pred(img2, d)
l_eye = np.asarray([(shape.part(36).x,shape.part(36).y),(shape.part(37).x,shape.part(37).y),(shape.part(38).x,shape.part(38).y),(shape.part(39).x,shape.part(39).y),(shape.part(40).x,shape.part(40).y),(shape.part(41).x,shape.part(41).y)])
r_eye = np.asarray([(shape.part(42).x,shape.part(42).y),(shape.part(43).x,shape.part(43).y),(shape.part(44).x,shape.part(44).y),(shape.part(45).x,shape.part(45).y),(shape.part(46).x,shape.part(46).y),(shape.part(47).x,shape.part(47).y)])
eye_left = np.mean(l_eye,axis=0)
eye_right = np.mean(r_eye,axis=0)
face[0][0] = int((eye_left[0]+face[0][0])/2.0)
face[0][1] = int((eye_right[0]+face[0][1])/2.0)
face[1][1] = int((shape.part(10).y+shape.part(57).y)/2.0)
img2_cropped = img2[face[1][0]:face[1][1],face[0][0]:face[0][1]]
img2_cropped = cv2.resize(img2_cropped,(100,100))
output.append(img2_cropped)
if flag == 0:
del(save[ix]) #delete face coordinates with improper alignment
if len(output) == 0:
return ('n',1)
return ('y',output)
def train(name):
data = [cv2.resize(
cv2.cvtColor(
cv2.imread(os.path.join('training',name,flname)),cv2.COLOR_BGR2GRAY),(100,100))
for flname in os.listdir(os.path.join('training',name)) if flname.split('.')[-1] in ['JPG','jpg','PNG','png']]
images = []
for im in data:
save = []
save += [im]
output = align(save)
if output[0] == 'y':
images += [output[1][0]]
rec.train(np.asarray(images),np.asarray([1 for i in images]))
def main(cmd_input=False,img=None,path=sys.argv[1],tr=True,outp=True):
if tr:
train(path)
faces_save = []
save = []
if cmd_input == False:
cap = cv2.VideoCapture(0) #reading video
frame_width = int(cap.get(3))
frame_height = int(cap.get(4))
frame_rate = 24
frame_count = 0 #frame number, that we are reading
while(cap.isOpened()): #while video is still open
ret, frame = cap.read()
frame_count = frame_count+1
(dimensions_x,dimensions_y,z) = frame.shape
images = frame
copy_image = images
images = cv2.cvtColor(images,cv2.COLOR_BGR2GRAY) #convert into grayscale
detected = []
if frame_count >= frame_rate or frame_count == 1: #condition is valid once in a frame rate
save = []
faces_save = det(images,save)
output = align(faces_save)
frame_count = 1 #Again wait for 24 more frames to process the video
if output[0]=='y':
for ix in range (0,len(output[1])):
x,y,w,h=save[ix][0],save[ix][1],save[ix][2],save[ix][3]
out = output[1][ix]
(iden,conf) = rec.predict(out) #Predict label of face
if iden == 1:
detected += [(x,y,w,h,conf)]
else:
cv2.rectangle(copy_image,(x,y),(x+w,y+h), (0, 0, 255), 2)
if len(detected) > 0:
detected.sort(key=lambda x: x[4])
(x,y,w,h,c) = detected[0]
cv2.rectangle(copy_image,(x,y),(x+w,y+h), (0, 255, 0), 2)
cv2.putText(copy_image,'Name: {}'.format(path),(x,y-40), cv2.FONT_HERSHEY_SIMPLEX, 1,(0,255,0),2,cv2.LINE_AA)
cv2.putText(copy_image,'Confidence: {}/100'.format(round(c * 100) / 100),(x,y-10), cv2.FONT_HERSHEY_SIMPLEX, 1,(0,255,0),2,cv2.LINE_AA)
cv2.imshow('video',copy_image)
key = cv2.waitKey(30)
if key == 27:
break
cap.release()
cv2.destroyAllWindows()
else:
(dimensions_x,dimensions_y,z) = img.shape
images = img
copy_image = images
images = cv2.cvtColor(images,cv2.COLOR_BGR2GRAY) #convert into grayscale
detected = []
faces_save = det(images,save)
output = align(faces_save)
if(output[1] != 1):
retrn = []
for ix in range (0,len(output[1])):
x,y,w,h=save[ix][0],save[ix][1],save[ix][2],save[ix][3]
out = output[1][ix]
lab = rec.predict(out) #Predict label of face
if lab[0]==1 and outp:
print('Found {}, confidence: {}, [x:{},y:{},w:{},h:{}]'.format(path,lab[1],x,y,w,h))
elif outp:
print('Suspicious individual detected at [x:{},y:{},w:{},h:{}]'.format(x,y,w,h))
#cv2.rectangle(copy_image,(x,y),(x+w,y+h), (0, 0, 255), 2)
retrn += [[x,y,w,h,lab[0],lab[1]]]
return retrn
def multiple():
res = []
for i in os.listdir('training'):
train(i)
x = main(cmd_input=True,img=cv2.imread(sys.argv[2]),tr=False,outp=False)
if x != None:
for j in x:
j.append(sys.argv[3])
res += x
res.sort(key=lambda y:y[5])
#print(res)
print('Found {}, confidence: {}, [x:{},y:{},w:{},h:{}]'.format(res[0][-1],res[0][-2],res[0][0],res[0][1],res[0][2],res[0][3]))
if __name__ == '__main__':
if len(sys.argv) > 4:
if sys.argv[3] == '-i':
# python3 main.py [path-to-training-images-inside-training-directory] [num-training-images] -i [path-to-test-image]
# ex. python3 main.py Abhi 10 -i training/Abhi/1.jpg
main(cmd_input=True,tr=True,outp=True,img=cv2.imread(sys.argv[4]))
elif sys.argv[1] == '-m':
multiple()
else:
main()
<file_sep>/Dockerfile
FROM python:3.6.5
ADD .
WORKDIR .
RUN pip install -r requirements.txt
CMD python3 main.py Abhi 10
<file_sep>/prepare_data.py
import os
import sys
import cv2
cas = cv2.CascadeClassifier('face_cascade.xml')
def main(d,output_dir):
data = [cv2.imread(os.path.join(d,fl)) for fl in os.listdir(d)]
i = 0
for k in data:
faces = cas.detectMultiScale(k,scaleFactor=1.2,minNeighbors=5,minSize=(20,20))
for (x,y,w,h) in faces:
print('Writing image training/{}/{}.jpg'.format(output_dir,i))
i += 1
cv2.imwrite(os.path.join('training',output_dir,str(i)+'.jpg'),k[y:y+h,x:x+w])
if __name__ == '__main__' and len(sys.argv) > 1:
main(sys.argv[1],sys.argv[2])
| 05fce8804e9be010d5c0b649141d2506e2e3660e | [
"Markdown",
"Python",
"Text",
"Dockerfile"
] | 8 | Text | the-inhuman-account/PinPointPrevent | 5576b125f41937b014192274d6151f23d031ca30 | f4d79737ecb5c33f31fe9b425e8db0bbf153449c |
refs/heads/master | <repo_name>BhumikaKhatwani/AML<file_sep>/PGM_BN.py
import numpy as np
import pandas as pd
from math import log
from pgmpy.models import BayesianModel
#subtracting features of two images
def sub_features(image1, image2):
image3 = abs(np.subtract(image1, image2))
return image3;
#Joint Probability clculation for Similar data
def cal_prob():
cal1 = f1_cpd[image3[0]][image3[3]*vcard[6] + image3[6]]
cal2 = f2_cpd[image3[1]][image3[0]*vcard[5] + image3[5]]
cal3 = f3_cpd[image3[2]][image3[5]]
cal4 = f4_cpd[image3[3]][image3[2]*vcard[5] + image3[5]]
cal5 = f5_cpd[image3[4]][image3[1]*vcard[7] +image3[7]]
cal6 = f6_cpd[image3[5]]
cal7 = f7_cpd[image3[6]][image3[3]*vcard[7] + image3[7]]
cal8 = f8_cpd[image3[7]][image3[2]]
cal9 = f9_cpd[image3[8]][image3[4]*vcard[6] + image3[6]]
joint_prob = cal1*cal2*cal3*cal4*cal5*cal6*cal7*cal8*cal9
return joint_prob
#Joint Probability clculation for Dissimilar data
def cal_prob2():
cal1 = f1d_cpd[image3[0]][image3[3]*vcard_d[6] + image3[6]]
cal2 = f2d_cpd[image3[1]][image3[0]*vcard_d[5] + image3[5]]
cal3 = f3d_cpd[image3[2]][image3[5]]
cal4 = f4d_cpd[image3[3]][image3[2]*vcard_d[5] + image3[5]]
cal5 = f5d_cpd[image3[4]][image3[1]*vcard_d[7] + image3[7]]
cal6 = f6d_cpd[image3[5]]
cal7 = f7d_cpd[image3[6]][image3[3]*vcard_d[7] + image3[7]]
cal8 = f8d_cpd[image3[7]][image3[2]]
cal9 = f9d_cpd[image3[8]][image3[4]*vcard_d[6] + image3[6]]
joint_probd = cal1*cal2*cal3*cal4*cal5*cal6*cal7*cal8*cal9
return joint_probd
#Checking whether similar or not
def check_similarity():
if cal_prob()>cal_prob2():
return 1;
else:
return 0;
#Calculating Log-likelihood Ratio
def get_llr():
prob1 = cal_prob()
if prob1 !=0:
prob1= log(prob1)
prob2 = cal_prob2()
if prob2!=0:
prob2= log(prob2)
return (prob1 - prob2)
#read training data from csv file
df = pd.read_csv("AND_Features.csv")
df = df.sort_values(by=['ImageId'])
df1 = df.copy()
#read testing data from csv file
test_input = pd.read_csv("PGMTestData.csv")
#read test pairs data from csv file
test_pairs = pd.read_csv("PGMTestPairs.csv")
#column names for dataframe for storing similar and dissimilar data
columns = ('f1','f2','f3','f4','f5','f6','f7','f8','f9')
#column names for dataframe for storing test data output
columns_test =("FirstImage","SecondImage","LLR","SameOrDifferent")
#dataframe to store similar data
df2 = pd.DataFrame(columns=columns)
#dataframe to store dissimilar data
df3 = pd.DataFrame(columns=columns)
#dataframe for test data output
test_output= pd.DataFrame(columns=columns_test)
#creating similar data
k=0
n = len(df['ImageId'])
for i in range(n):
if i <= len(df['f1'])-4:
for j in range(i,i+4):
if df.ImageId.str[:4].iloc[i] == df1.ImageId.str[:4].iloc[j]:
f1=abs(df.f1.iloc[i] - df1.f1.iloc[j])
f2=abs(df.f2.iloc[i] - df1.f2.iloc[j])
f3=abs(df.f3.iloc[i] - df1.f3.iloc[j])
f4=abs(df.f4.iloc[i] - df1.f4.iloc[j])
f5=abs(df.f5.iloc[i] - df1.f5.iloc[j])
f6=abs(df.f6.iloc[i] - df1.f6.iloc[j])
f7=abs(df.f7.iloc[i] - df1.f7.iloc[j])
f8=abs(df.f8.iloc[i] - df1.f8.iloc[j])
f9=abs(df.f9.iloc[i] - df1.f9.iloc[j])
df2.loc[k] = [f1,f2, f3, f4, f5, f6, f7, f8, f9]
k+=1
#creating dissimilar data
k=0
for i in range(n):
if i <= len(df['f1'])-10:
for j in range(i+3,i+10):
if df.ImageId.str[:4].iloc[i] != df1.ImageId.str[:4].iloc[j]:
f1=abs(df.f1.iloc[i] - df1.f1.iloc[j])
f2=abs(df.f2.iloc[i] - df1.f2.iloc[j])
f3=abs(df.f3.iloc[i] - df1.f3.iloc[j])
f4=abs(df.f4.iloc[i] - df1.f4.iloc[j])
f5=abs(df.f5.iloc[i] - df1.f5.iloc[j])
f6=abs(df.f6.iloc[i] - df1.f6.iloc[j])
f7=abs(df.f7.iloc[i] - df1.f7.iloc[j])
f8=abs(df.f8.iloc[i] - df1.f8.iloc[j])
f9=abs(df.f9.iloc[i] - df1.f9.iloc[j])
df3.loc[k] = [f1,f2, f3, f4, f5, f6, f7, f8, f9]
k+=1
#bayesian model creation for similar data
and_model = BayesianModel([('f1', 'f2'),
('f2', 'f5'),
('f3', 'f4'),
('f3', 'f8'),
('f4', 'f1'),
('f4', 'f7'),
('f5', 'f9'),
('f6', 'f2'),
('f6', 'f3'),
('f6', 'f4'),
('f7', 'f1'),
('f7', 'f9'),
('f8', 'f5'),
('f8', 'f7'),
])
#taking 80 percent data as training data
nlen = (int(0.8*len(df2)))
train=df2[:nlen]
and_model.fit(train)
a=and_model.get_cpds()
and_model.check_model()
f1_cpd = a[0].get_values()
f2_cpd = a[1].get_values()
f3_cpd = a[2].get_values()
f4_cpd = a[3].get_values()
f5_cpd = a[4].get_values()
f6_cpd = a[5].get_values()
f7_cpd = a[6].get_values()
f8_cpd = a[7].get_values()
f9_cpd = a[8].get_values()
#length of each variable
vcard = []
for i in range(9):
vcard.append(a[i].variable_card)
#bayesian model creation for dissimilar data
and_model2 = BayesianModel([('f1', 'f2'),
('f2', 'f5'),
('f3', 'f4'),
('f3', 'f8'),
('f4', 'f1'),
('f4', 'f7'),
('f5', 'f9'),
('f6', 'f2'),
('f6', 'f3'),
('f6', 'f4'),
('f7', 'f1'),
('f7', 'f9'),
('f8', 'f5'),
('f8', 'f7'),
])
#taking 80 percent data as training data
nlen2 = (int(0.8*len(df3)))
train=df3[:nlen2]
and_model2.fit(train)
b=and_model2.get_cpds()
and_model2.check_model()
f1d_cpd = b[0].get_values()
f2d_cpd = b[1].get_values()
f3d_cpd = b[2].get_values()
f4d_cpd = b[3].get_values()
f5d_cpd = b[4].get_values()
f6d_cpd = b[5].get_values()
f7d_cpd = b[6].get_values()
f8d_cpd = b[7].get_values()
f9d_cpd = b[8].get_values()
#length of each variable
vcard_d = []
for i in range(9):
vcard_d.append(b[i].variable_card)
#checking accuracy for Similar testing data
s=0
for i in range(nlen,len(df2)):
image3= df2.loc[i]
s = s+check_similarity()
print("Accuracy with Similar testing data", s/(len(df2)-nlen))
#checking accuracy for Dissimilar testing data
w=0
for i in range(nlen2,len(df3)):
image3= df3.loc[i]
w = w+check_similarity()
print("Accuracy with Dissimilar Testing data", (len(df3)-nlen2-w)/(len(df3) - nlen2))
#checking accuracy for Dissimilar and Similar Combined testing data
df2['f10'] = 1
df3['f10'] = 0
df4= df2[nlen:len(df2)].append(df3[nlen2:len(df3)], ignore_index=True)
count=0
for i in range(len(df4)):
image3 = df4.loc[i]
if df4.f10.iloc[i]== check_similarity():
count+=1
print("Accuracy with combined training data", count/len(df4))
#Determining same or different writer and calculating log-likelihood ratio
for i in range(len(test_pairs)):
for j in range(len(test_input)):
if test_pairs.FirstImage.str[:3].iloc[i] == test_input.ImageId.iloc[j]:
break;
for l in range(len(test_input)):
if test_pairs.SecondImage.str[:3].iloc[i] == test_input.ImageId.iloc[l]:
break;
image1 = test_input.iloc[j,2:11]
image2 = test_input.iloc[l,2:11]
image3 = sub_features(image1, image2)
s_d = check_similarity()
llr = get_llr()
test_output.loc[i] = [test_pairs.FirstImage.str[:3].iloc[i],test_pairs.SecondImage.str[:3].iloc[i],llr,s_d ]
#printing test output
print(test_output)
#Writing to the csv file
test_output.to_csv("PGMTestOutput.csv", sep=',',encoding='utf-8') | aca226fe8e8128fbed906af3f0dbfae2c5cf26d4 | [
"Python"
] | 1 | Python | BhumikaKhatwani/AML | 7c17155399257048ee5a6969add5fba636b0fa7a | 3ed701a16c56fa2d62f3c9476c054ec3ab7179e7 |
refs/heads/master | <file_sep>t1 = 3
t2 = 4
colorT = 10
x = noise(t1)
y = noise(t2)
def setup():
size(640,640)
background(0,155,0)
def draw():
global x,t1,t2,colorT
background((noise(colorT)*100))
colorT = colorT+0.01
x = noise(t1)
y = noise(t2)
x = x*1000
y = y*1000
t1 = t1+0.01
t2 = t2+0.01
ellipse(x,y,30,30)
<file_sep>
str_len = 0
angle = 3.14/4
angularVelocity = 0
angularAcceleration = 0
# class kushal():
# #constructor function
# def __init__(self,pos,rad):
# self.positionX = pos[0];
# self.positionY = pos[1];
# # self.velocity = vel
# self.radius = rad
# def update(self,_force):
# self.velocity += _force[0]
# def display(self):
# ellipse(self.positionX,self.positionY,self.radius,self.radius)
def setup():
global origin,bob,str_len,newshit
size(640,360)
str_len = 180
origin = [width/2,0]
bob = [width/2,str_len]
def draw():
global origin,bob,str_len,angle,angularVelocity,angularAcceleration
origin = [width/2,0]
bob = [width/2,str_len]
background(100)
bob[0] = origin[0] + str_len * sin(angle)
bob[1] = origin[1] + str_len * cos(angle)
line(origin[0],origin[1],bob[0],bob[1])
ellipse(bob[0],bob[1],60,60)
angularAcceleration = 0.01 * sin(angle)
angle = angle + angularVelocity
angularVelocity = angularVelocity + (-1)*angularAcceleration
angularVelocity = angularVelocity*0.99
# newshit = kushal([100,100],30);
# newshit.display();
<file_sep># AnimationProcessingPy
My hands on with Animation using Mathematics/Physics using programming on Processing Py
| 8c20fe90449920cd499d3190f87723f217f20ed9 | [
"Markdown",
"Python"
] | 3 | Python | kushalshm1/AnimationProcessingPy | 24294166436d5b06f104cefa29c0aaf2af6e84c4 | f5224cb2f714fbe852b06ef29ea3879577d05cc6 |
refs/heads/master | <file_sep>dbct
====
Dat Block Cipher Tho.....
## Description
This is an implementation of a block cipher designed by me, <NAME>, and <NAME> for BYU's Mathematical Cryptography class (Math 485). It is very simple and probably vulnerable to all sorts of attacks, but that's not a huge deal right?
The algorithm operates on a block size of 10 characters. It also uses a key of size EQUAL TO 10 characters. The algorithm consists of a bitwise XOR and a shift. These two operations happen every round for six rounds.
#### Encryption
Step 1 - block XOR key
Step 2 - shift the first three characters to the end of the block
#### Decryption
Step 1 - shift the last three characters to the front of the block.
Step 2 - block XOR key
## How to use the script (for n00bs)
If you are on Linux I assume you know Git and can handle running these scripts just fine.
Use the "download zip" button on the side to download the scripts. Extract the zip file so that you have the dbct.py script. You need to have Python 2.7 installed. You can get it from [python.org](http://www.python.org/getit/). Choose the newest Python 2.7 version (should be 2.7.5) and install it for Windows. You need to have 2.7 in order to use these scripts (uses the argparse module).
Windows users should have the Python interpreter installed in C:\Python27\python.exe. To run the scripts, go to your start menu and in the search type cmd.exe. Open it. The following examples assume your user name is Sean and that your script is on the Desktop. Also, ignore the README.md, LICENSE.md, and the hexEncde.py files that will come in the zip file (although the license does lay out terms of use).
### Examples
#### Encrypt
C:\Python27\python.exe C:\Users\Sean\Desktop\dbct.py -e -k "your 10 character key goes here" -m "your message goes here"
#### Decrypt
C:\Python27\python.exe C:\Users\Sean\Desktop\dbct.py -d -k "your 10 character key goes here" -m "the output from encryption goes here"
Please note that you cannot copy anything from cmd.exe, but you can paste into it by right clicking then selecting paste. To solve this the output is printed out and is also saved to a file called output.exe. Since cmd.exe runs from C:\Users\Sean (or whatever your username is) the file will be saved to C:\Users\Sean.
<file_sep># should take a message and a key as input
import sys
import argparse
parser = argparse.ArgumentParser(prog='Dat Block Cipher Tho...', description='Encrypt a message using the dbct algorithm.\nWhen you pass in a message, supply it in hexedecimal rather than normal characters.', add_help=True)
parser.add_argument('-k', '--key', type=str, action='store', help='specify a 10 character key in hexedecimal')
parser.add_argument('-m', '--msg', type=str, action='store', help='specify a message in hexedecimal')
parser.add_argument('-e', '--encrypt', action='store_true', help='Encrypt a message, where the message specified is in hexedecimal')
parser.add_argument('-d', '--decrypt', action='store_true', help='Decrypt a message, where the message specified is in hexedecimal')
args = parser.parse_args()
def shiftBlock(block):
t = block[0]
t1 = block[1]
t2 = block[2]
block[0] = block[3]
block[1] = block[4]
block[2] = block[5]
block[3] = block[6]
block[4] = block[7]
block[5] = block[8]
block[6] = block[9]
block[7] = t
block[8] = t1
block[9] = t2
def invShiftBlock(block):
t = block[7]
t1 = block[8]
t2 = block[9]
block[9] = block[6]
block[8] = block[5]
block[7] = block[4]
block[6] = block[3]
block[5] = block[2]
block[4] = block[1]
block[3] = block[0]
block[2] = t2
block[1] = t1
block[0] = t
def xorBlockAndKey(block,key):
for i in range(0,10):
block[i] = block[i] ^ key[i]
def encrypt(block,key):
for i in range(0,6):
xorBlockAndKey(block,key)
shiftBlock(block)
def decrypt(block,key):
for i in range(0,6):
invShiftBlock(block)
xorBlockAndKey(block,key)
if args.encrypt and args.decrypt:
print "Please only specify either encryption or decryption"
sys.exit(1)
if not args.encrypt and not args.decrypt:
print 'Either -e or -d are required'
sys.exit(1)
if args.msg == None:
print 'No message specified'
sys.exit(1)
if args.key == None:
print 'No key specified'
sys.exit(1)
msg = ''
key = ''
if args.encrypt:
msg = args.msg.encode("hex")
key = args.key.encode("hex")
if args.decrypt:
msg = args.msg
key = args.key.encode("hex")
k = map(ord, key.decode("hex"))
m = map(ord, msg.decode("hex"))
while len(m) % 10 != 0:
m.append(0)
i = 0
result = []
block = []
while i != len(m):
block.append(m[i])
if (i+1) % 10 == 0:
if args.encrypt:
encrypt(block,k)
for j in range(0,10):
result.append(block[j])
if args.decrypt:
decrypt(block,k)
for j in range(0,10):
result.append(block[j])
block[:] = []
i = i + 1
r = ''
i = 0
while i != len(result):
r += "%02x" % result[i]
i += 1
if args.decrypt:
r = r.decode("hex")
print r
f = open('output.txt','w')
f.write(r)
f.close()
<file_sep>import argparse
import sys
parser = argparse.ArgumentParser(prog='Hex encoder/decoder', description='Supply a string and get the result encoded or decoded into/from hexedecimal', add_help=True)
parser.add_argument('-m', '--msg', type=str, action='store', help='Specify the string to be encoded or decoded in quotes with this flag')
parser.add_argument('-e', '--encode', action='store_true', help='Encode a string into hex')
parser.add_argument('-d', '--decode', action='store_true', help='Decode a hex encoded string')
args = parser.parse_args()
if args.encode and args.decode:
print 'Please specify only -e or -d'
sys.exit(1)
if not args.encode and not args.decode:
print 'Please specify either -e or -d'
sys.exit(1)
if args.msg == None and args.encode:
print 'Please specify a string to encode'
sys.exit(1)
if args.msg == None and args.decode:
print 'Please specify a string to decode'
sys.exit(1)
r = ''
if args.decode:
r = args.msg.decode("hex")
if args.encode:
r = args.msg.encode("hex")
print r
| e1dfbd58b40d3b8908258e05160666cbb5c954a1 | [
"Markdown",
"Python"
] | 3 | Markdown | ThaWeatherman/dbct | 002bccd3d5d8a3d267001dec5affab282bc2ebf4 | 0617c3eac7b3664f7a61a32cd0e9d0ba52ec0e1d |
refs/heads/master | <file_sep>#Test script, most features tried here are included in app.R
setwd("PATH)")
sample=c("XYZ")
sample.pdf=paste(sample,"TestPCA.pdf",sep=".")
evec.in="file.evec"
eval.in="file.eval"
data <- read.table(evec.in)
eval <- read.table(eval.in)
#data <- data[nrow(data):1,]
## data must match the dimensions of data matrix (eg. (1,10) if the first 10 PCs are given)
pca <- data[,seq(1,10)+1]
# -> CALCULATE PCA CONTRIBUTIONS
pct.varPC1 = paste("PC1 (",round(100*eval[1,]/sum(eval),2),"%)",sep="")
pct.varPC2 = paste("PC2 (",round(100*eval[2,]/sum(eval),2),"%)",sep="")
pct.varPC1
pct.varPC2
####
individual.name <- gsub(".*:","",data[,1])
sample.idx <- match(sample,gsub(".*:","",data[,1]))
#### population information:
popinfo <- read.csv("popinfoBot1517.txt",sep="",header=T)
## popinfo file can be longer than the actual number of samples ploted as long as the ID is matched by the index function
## but if there is an excess of samples with corresponding regions and colors, these will be ploted in the legend too
## therefore, ideal to have a popinfo file for each plotting session
regions <- as.character(unique(popinfo[,13]))
populations <- as.character(unique(popinfo[,9]))
#regions.color <- c("black","#00008090", "#1E90FF90", "#0000FF90", "#87CEEB90", "#00F5F590")
regions.color <- c("black","navyblue", "dodgerblue", "blue", "skyblue", "turquoise1", "darkgreen", "lightgreen")
names(regions.color) <- regions
print(cbind(regions,regions.color))
#### map to the pop infor.
nsamples <- length(individual.name)
idx <- match(individual.name, popinfo[,2])
pop.pc1.pc2 <- cbind(popinfo[idx,13],data[,2:3])
colors <- regions.color[as.character(popinfo[idx,13])]
##########################################################################################
## WITH SOLID CIRCLES
##########################################################################################
pdf(sample.pdf)
plot(pca[,1],pca[,2],col = colors, pch = 19, xlab =pct.varPC1,ylab=pct.varPC2)
abline(v=0,h=0,lty=2,col="grey")
legend("topright", regions, ncol=1,col= regions.color,pch=19, cex=0.7,bty = "n")
points(pca[sample.idx,c(1,2)], cex = 1, pch = 19, col = "black")
text(pca[sample.idx,c(1,2)],sample,pos=4,offset=0.25,cex=0.6)
##########################################################################################
## WITH POP ID NAMES
##########################################################################################
#pdf(sample.ids.pdf)
plot(pca[,1],pca[,2],type="n",col = colors, pch = 19, xlab =pct.varPC1,ylab=pct.varPC2)
abline(v=0,h=0,lty=2,col="grey")
text(pca[,1],pca[,2], labels=popinfo[idx,1],col=colors,cex=0.5)
points(pca[sample.idx,c(1,2)], cex = 1, pch = 19, col = "black")
text(pca[sample.idx,c(1,2)],sample,pos=4,offset=0.25,cex=0.6)
dev.off()
plot(pca[,1],pca[,2],type="n",col = colors, pch = 19, xlab =pct.varPC1,ylab=pct.varPC2, xlim=c(-0.03,-0.02),ylim =c(-0.025,-0.015))
abline(v=0,h=0,lty=2,col="grey")
text(pca[,1],pca[,2], labels=popinfo[idx,1],col=colors,cex=0.5)
points(pca[sample.idx,c(1,2)], cex = 1, pch = 19, col = "black")
text(pca[sample.idx,c(1,2)],sample,pos=4,offset=0.25,cex=0.6)
dev.off()
NeartBot17 <- popinfo[popinfo$ID=="TAH-487",]
append(NeartBot17,popinfo[popinfo$ID=="1023",])
#SUBSELECT POPULATION
sbP = "XYZ"
sbP.idn <- as.vector(popinfo[popinfo$POP==sbP,2])
sbP.idx <- match(sbP.idn, data[,1])
plot(pca[,1],pca[,2],type="n",col = colors, pch = 19, xlab =pct.varPC1,ylab=pct.varPC2)
text(pca[sbP.idx,c(1,2)],sbP.idn,pos=4,offset=0.25,cex=0.5, col= "cyan")
text(pca[sample.idx,c(1,2)],sample,pos=4,offset=0.25,cex=0.6)
roof(max(pca[sbP.idx,c(1)])*1.10,3)
plot(pca[,1],pca[,2],type="n",col = colors, pch = 19, xlab =pct.varPC1,ylab=pct.varPC2, xlim=c(-0.0417,0.0063),ylim =c(-0.0263,-0.0052))
#xlim=zLimits[0:2],ylim = zLimits[3:4]
limits <- c()
limits <- c(limits,min(pca[subpop,c(1)])-abs(min(pca[subpop,c(1)])*0.1))
limits <- c(limits,max(pca[subpop,c(1)])+abs(max(pca[subpop,c(1)])*0.1))
limits <- c(limits,min(pca[subpop,c(2)])-abs(min(pca[subpop,c(2)])*0.1))
limits <- c(limits,max(pca[subpop,c(2)])+abs(max(pca[subpop,c(2)])*0.1))
<file_sep># AUTO MDS PLOTTER (Shiny)
# Shiny: app.R
# Author: <NAME>
# Developed at: LANGEBIO - Mexico
# Last Edit: March 10 2016
# Requirements:
# - MDS file from the plink. Only one per directory.
# - popinfo file
# Pipeline:
# 1. Reads first .mds file from a chosen directory.
# 2. Identifies names, regions and populations from a given popinfo.txt
# 3. Generates a MDS plot with color-coded regions.
# Features:
# - Plot types: Can select between points or tags for the plot.
# - Select Population: Shows only a particular population.
# - Emphasize Population: Highlights points or tags for a chosen population.
# - Color by Category: User can choose the criteria for coloring.
# - Show/Hide Legend: Displays all values from the selected category.
# - Zoom: Expands view to better visualize a population.
#<START>
# Load required libraries
library(shiny)
#<INPUT>
# Choose data directory
setwd(choose.dir(default = "", caption = "Select folder with MDS files"))
#</INPUT>
#<PREPARATIONS>
# Read popinfo file (.txt):
pifile <- dir(pattern = "\\.txt$")[1]
popinfo <- read.csv(pifile,sep="",header=TRUE)
poptags <- as.character(unique(popinfo$POP))
uniquecols <- sapply(popinfo,function(x) length(unique(x)))
fields <- names(uniquecols[uniquecols>1])
if ("POP_SIMPLE" %in% colnames(popinfo)) {
pops <- unique(subset(popinfo, select = c(POP,POP_SIMPLE)))
pops <- gsub("_"," ",(paste(pops$POP," (",pops$POP_SIMPLE,")",sep="")))
} else {
pops <- as.character(unique(popinfo$POP))
}
# Load MDS files
mds <- dir(pattern = "\\.mds$")[1]
data <- read.table(mds, header=TRUE)
IDcol <-2
xcol <- 4
ycol <- 5
popIDs <- as.character(data[,2])
nsamples <- length(popIDs)
idx <- match(popIDs, popinfo$ID)
#</PREPARATIONS>
#<FUNCTIONS>
ZoomLim <- function(subpop = integer(0), pct = 0.1) {
limits <- c()
if(length(subpop) == 0){subpop <- c(1:nsamples)}
limits <- c(limits,min(data[subpop,xcol])-abs(min(data[subpop,xcol])* pct))
limits <- c(limits,max(data[subpop,xcol])+abs(max(data[subpop,xcol])* pct))
limits <- c(limits,min(data[subpop,ycol])-abs(min(data[subpop,ycol])* pct))
limits <- c(limits,max(data[subpop,ycol])+abs(max(data[subpop,ycol])* pct))
return(limits)
}
#</FUNCTIONS>
#<UI>
ui <- fluidPage(
# Page Title
img(src="MorLabLogo.jpg", style = "float:right"),
titlePanel("Auto MDS Plotter"),
helpText("Author: <NAME> | Developed at: LANGEBIO (MX)"),
hr(),
# Sidebar
sidebarLayout(
# Input Panels
sidebarPanel(width=2,
h3("Settings"),
selectizeInput("pops", label = "Populations displayed:",
choices= pops, selected = NULL,
options= list(maxItems = length(pops)-1,
placeholder = 'Select population(s)',
onInitialize = I('function() { this.setValue(""); }'))
),
selectizeInput("pope", label = "Populations emphasis:",
choices= pops, selected = NULL,
options= list(maxItems = 1,
placeholder = 'Choose population',
onInitialize = I('function() { this.setValue(""); }'))
),
hr(),
selectizeInput("flds", label = "Group Coloring.",
choices= fields, selected = NULL,
options= list(maxItems = 1,
placeholder = 'Choose color grouping',
onInitialize = I('function() { this.setValue(""); }'))),
radioButtons("type", label = "Type:",
choices = list("Points" = 1, "Text" = 2),
selected = 1),
checkboxInput("leg", label = "Legend", value = FALSE),
checkboxInput("zoom", label = "Zoom In", value = FALSE),
sliderInput("zoomaxis", label = "Zoom Out %:", min = 100, max = 800,
value = 120, step = 10)
),
# Plotting Area
mainPanel(width=10,
plotOutput("MDSPlot",width="100%",height="900px")
)
)
)
#</UI>
#<SERVER>
server <- function(input, output) {
#<REACTIVES>
# Inputs
sub.pops <- reactive(as.vector(popinfo[popinfo$POP %in% substr(input$pops,1,3),IDcol]))
pope.idn <- reactive(as.vector(popinfo[popinfo$POP %in% substr(input$pope,1,3),IDcol]))
zoomaxis <- reactive(input$zoomaxis/100 - 1)
grouping <- reactive(if(input$flds != "")as.character(unique(popinfo[,input$flds])) else(""))
#</REACTIVES>
#<PROCESSING>
hiplot <- reactive({
sbP.idx <- which(popIDs %in% sub.pops())
pope.idx <- which(popIDs %in% pope.idn())
if (input$flds != "") {
coloring <- c(grouping(),"(Emphasis)")
groups.color <- c(rainbow(length(coloring)-1),"#000000")
names(groups.color) <- coloring
data$Colors <- groups.color[as.character(popinfo[idx,input$flds])]
} else {
coloring <- c("Samples","(Emphasis)")
groups.color <- c("#666666","#000000")
data$Colors <- "#666666"
}
if(length(pope.idx) > 0){coloring[length(coloring)] <- input$pope}
zLimits <- ZoomLim(if (length(pope.idx) > 0) pope.idx else (sbP.idx),zoomaxis())
plot(data[,c(xcol,ycol)], col= data$Colors, pch= 20, xlab= "C1",
ylab= "C2", cex.main = 1.5,
main= ifelse(is.null(input$pops),"All Populations",paste(input$pops, collapse =" / ")),
type= ifelse(is.null(input$pops) && input$type == 1,"p","n"), cex=3,
xlim= if (input$zoom == TRUE) zLimits[0:2] else (range(data[,xcol])),
ylim= if (input$zoom == TRUE) zLimits[3:4] else (range(data[,ycol]))
)
abline(v= 0, h= 0, lty= 2, col= "grey")
if(input$leg) legend("topright", gsub("_"," ",coloring), ncol= 1, col= groups.color, pch= 20,
pt.cex= 3, bty= "n")
if (input$type==1) {
points(data[sbP.idx,c(xcol,ycol)], cex= 2, pch= 19, col= data$Colors[sbP.idx])
points(data[pope.idx,c(xcol,ycol)], cex= 2.5, pch= 23, bg="#000000", col= "#FFFFFF")
}
else {
if (is.null(input$pops)) text(data[,c(xcol,ycol)], labels= popinfo[idx,IDcol], col= data$Colors, cex= 0.9)
else text(data[sbP.idx,c(xcol,ycol)], labels=data[sbP.idx,IDcol], cex= 1, col= data$Colors[sbP.idx])
if (length(pope.idx) > 0) text(data[pope.idx,c(xcol,ycol)], labels= data[pope.idx,IDcol], font= 2, cex=1, col= "#000000")
}
})
#</PROCESSING>
#<OUTPUT>
output$MDSPlot <- renderPlot(hiplot())
#</OUTPUT>
}
#</SERVER>
#<APP>
shinyApp(ui = ui, server = server)
#</APP>
#<END><file_sep>pi <- read.csv("1KGP3.popinfo.txt", sep="\t")
pops <- as.character(unique(pi$POP))
nsam <- 5
popsel <- c()
for (i in pops){
popsel <- c(popsel,sort(sample(which(pi$POP==i),nsam)))
}
popx <- pi[popsel,]
pids <- popx[,c("FAMID","ID")]
write.table(popx,file="1KG.sample.popinfo.txt",quote=F,sep="\t")
write.table(pids,file="1KG.sample.ids.txt",quote=F,sep="\t",row.names = F,col.names = F)
<file_sep># Population Sample (Shiny)
# Shiny: app.R
# Author: <NAME>
# Developed at: LANGEBIO - Mexico
# Pipeline:
# 1. Reads a popinfo file
# 2. Identifies categories for easier subsetting.
# 3. Generates a table with random samples.
#
# Features:
# - Choose number of samples
# - Subset regions/populations
# - Preview the table
# - Save table as a popinfo file and a list of IDs for plink command --keep
popfile <- file.choose()
pi <- read.csv(popfile, sep="\t")
if (ncol(pi) == 1) {pi <- read.csv(popfile,header=TRUE, sep= " ")}
pop <- as.character(unique(pi$POP))
ui <- fluidPage(
img(src="MorLabLogo.jpg", style = "float:right"),
titlePanel("Population Sampler"),
helpText("Author: <NAME> | Developed at: LANGEBIO (MX)"),
hr(),
sidebarLayout(
sidebarPanel(width=2,
numericInput("ns", label = "Samples per population:",
value= 1, min= 1, max=9999),
h5("Total samples selected:"),
textOutput("totsam"),
br(),
selectizeInput("pops", label = "Populations selected:",
choices= pop, selected = NULL,
options= list(maxItems = length(pop)-1,
placeholder = 'Select population(s)',
onInitialize = I('function() { this.setValue(""); }'))),
actionButton("goButton", "Save Tables"),
textOutput("value")
),
mainPanel(
tableOutput("SampleTable")
)
)
)
server <- function(input, output) {
dataset <- reactive({
popsel <- c()
if(length(input$spops) > 0){
for (i in input$spops){popsel <- c(popsel,sort(sample(which(pi$SPOP==i),input$ns)))}
} else if(length(input$pops) > 0){
for (i in input$pops){popsel <- c(popsel,sort(sample(which(pi$POP==i),input$ns)))}
} else {
for (i in pop){popsel <- c(popsel,sort(sample(which(pi$POP==i),input$ns)))}
}
pi[popsel,]
})
output$totsam <- reactive(dim(dataset())[1])
output$SampleTable <- renderTable({dataset()})
output$value <- renderText(paste(Export()))
Export <- eventReactive(input$goButton,{
setwd("output")
write.table(dataset(),file="Sample.popinfo.txt",quote=F,sep="\t",eol="\r",row.names = F)
write.table(dataset()[,c(1,2)],file="Sample.ids.txt",quote=F,sep="\t",row.names = F,col.names = F,eol="\r")
setwd("..")
return("Success: Tables saved to /output")
})
}
shinyApp(ui = ui, server = server)<file_sep># ShinyPopGen: Shiny Apps for Population Genomics
*By <NAME> (<EMAIL>)*
*Human Population and Evolutionary Genomics Lab - LANGEBIO*
## About
*Documentation: [Shiny by RStudio](http://shiny.rstudio.com/)*
This document explains how to use R Shiny Apps that perform common plotting tasks in population genetics analyses. All apps are available in their respective directories (titled as *Application name [APP]*).
If you find a bug or are otherwise unable to use these apps, feel free to contact me for support.
## Shiny
Shiny is an R package for building interactive web applications using R, without requiring knowledge in HTML, CSS nor JavaScript. It is very useful for visualizing and manipulating data without tampering with code.
### Installation
To install, simply run the following command on an opened R session:
`install.packages("shiny")`
### Learning Shiny
If you are insterested in developing Shiny apps, its webpage have some useful tutorials:
[Welcome to Shiny](http://shiny.rstudio.com/tutorial/lesson1/)
Some example apps with basic and advanced code are also available here:
[Shiny Gallery](http://shiny.rstudio.com/gallery/)
Using Shiny is much better in RStudio, an IDE for R:
[RStudio for Desktop](https://www.rstudio.com/products/rstudio-desktop/)
## Requirements
**All apps require R and its Shiny library.**
Each app requires an specific set of files to be able to run, indicated in its documentation here. Still, a single directory may contain all the different files required, as long as they are derived from the same sample.
Possible directory contents:
- **popinfo file** (*see below for details*), and no other TXT file.
- PCA files: .eval and .evec (only one of each file).
- MDS files: .mds file.
- Admixture files: all .Q files of the same sample, each with different K numbers.
### About the popinfo file
A `popinfo` is a simple text file that contains additional information about the samples, usually provided with the reference panel.
The format is as follows:
* The first row must be the headers of the columns.
* Each following row contains the corresponding information for a sample.
* Each field is separated either by spaces or tabulations.
* The file is saved as a TXT type, and no other .txt files must be present in the directory.
Required columns:
* ID - Unique sample names, used for matching the sample names between the different files.
* POP - Tag for the population of origin, used for grouping together several members from the same population.
Recommended columns:
* POP_SIMPLE - Contains the name of the population. If present, menus will show both tag and name for easier browsing.
* REGION - Geographical region designation that can be used to group many populations together. Useful for coloring and subsetting.
Other columns may be added to provide more information (linguistic family, continents, dataset, etc.), but are optional for this procedure. It is recommended that only those samples that were used in the analyses are contained in this file.
*The more comprehensive a popinfo file is, the more options will become available for the plotting apps.* These options include sorting, labeling, coloring and subsetting. Errors in the popinfo file may hinder or break the apps, so consider checking your popinfo file if an app fails.
### Running an App
To open an app, you have these alternatives:
* Open RStudio, load the `app.R` file and then click the button `Run`. A web browser window must open.
* Copy the entire code of `app.R` as text and paste it in an R console. A web browser window containing the app must open.
If an app is unable to open or breaks during operation, close the app window, use the `STOP` button to end all current processes, and restart it.
## PCA Plotter [PCAPlot]
Creates interactive PCA (principal component analysis) plots, where samples can be subset and colored by category.
Requirements:
- EVAL and EVEC files from the PCA program (e.g. *smartpca*). Only one per directory.
- popinfo file
Pipeline:
1. Reads first .evec and .eval files from a chosen directory.
2. Identifies names, regions and populations from a given popinfo.txt
3. Generates a PCA plot with color-coded regions.
Features:
- Plot types: Can select between points or tags for the plot.
- Select Population: Shows only a particular population.
- Emphasize Population: Highlights points or tags for a chosen population.
- Color by Category: User can choose the criteria for coloring.
- Show/Hide Legend: Displays all values from the selected category.
- (NEW) Interactive Zoom: select an area and double click to zoom in, double click again to zoom out.
Output:
- Plots can be saved by right-clicking the image. Default height: 900px.
## MDS Plotter [MDSPlot]
Creates interactive MDS (multidimensional scaling) plots, where samples can be subset and colored by category.
Requirements:
- MDS file from the plink. Only one per directory.
- popinfo file
Pipeline:
1. Reads first .mds file from a chosen directory.
2. Identifies names, regions and populations from a given popinfo.txt
3. Generates a MDS plot with color-coded regions.
Features:
- Plot types: Can select between points or tags for the plot.
- Select Population: Shows only a particular population.
- Emphasize Population: Highlights points or tags for a chosen population.
- Color by Category: User can choose the criteria for coloring.
- Show/Hide Legend: Displays all values from the selected category.
- Zoom: Expands view to better visualize a population.
Output:
- Plots can be saved by right-clicking the image. Default height: 900px.
## ADMIXTURE Plotter [AdmixPlot]
Creates interactive admixture plots, where samples can be sorted and/or subset, and components can be custom-recolored.
Requirements:
- R library: `shinyjs`
- Q files from the admixture program (all Q files in the directory are read)
- popinfo file
Pipeline:
1. Reads popinfo file from a chosen directory.
2. Reads .Q file of selected K.
3. Generates plot.
Features:
- Changing the 'Ks' parameter changes the plot showed.
- Select two or more populations for display.
- Add borders to individual bars.
- Collapse sample tags in population tags.
- Arrange samples by increasing biggest component.
- Subset by a value from a category.
- Custom re-coloring of components with visual color picker.
Output:
- Plots can be saved by right-clicking the image. Default height: 720px.
## 1000 Genomes Sampler [1KGSample]
Generates a list and a popinfo table for a subset of 1000 Genomes Project samples, useful for plink merging.
Requirements:
- None, all data required is contained in the app directory.
Pipeline:
1. Reads the popinfo file for the 1000 Genomes Project samples.
2. Identifies categories for easier subsetting.
3. Generates a table with random samples.
Features:
- Choose number of samples
- Subset regions/populations
- Preview the table
- Save table as a popinfo file and a list of IDs for the plink command `--keep`
Output:
- Sample ID list and its corresponding popinfo file are both saved within the `1KGSample/output`
## Population Sampler [POPSample]
Generates a list and a popinfo table from a given popfile, useful for plink merging.
Requirements:
- The input popinfo file should contain a column header named "POP"
Pipeline:
1. Reads the popinfo file given.
2. Identifies populations for easier subsetting.
3. Generates a table with random samples.
Features:
- Choose number of samples
- Subset regions/populations
- Preview the table
- Save table as a popinfo file and a list of IDs for the plink command `--keep`
Output:
- Sample ID list and its corresponding popinfo file are both saved within the `POPSample/output` directory. Files are overwritten each time the "Save Tables" button is clicked.<file_sep># Admixture Plotting
# Author: <NAME>
# Developed at: LANGEBIO - Mexico
# Original Script: <NAME> (Nov 2012)
# Pipeline:
# 1. Reads .Q File of selected K.
# 2. Reads popinfo file.
# 3. Generates plot.
# Features:
# - Changing the 'Ks' parameter changes the plot showed.
#<START>
#<INPUT>
#Set working directory
setwd("PATH")
#Set Base File Name (all characters before ".[K-number].Q"):
basefile <- "File"
#Declare popinfo File:
pifile <-"popinfo.txt"
#Set K number to read:
Ks <- 7
#</INPUT>
#Data Extraction
actfile <- paste(basefile,".",Ks,".Q",sep="")
kData <- read.table(actfile,header=FALSE)
popinfo <- read.csv(pifile,header=TRUE, sep= " ")
barnames <- as.vector(popinfo[,8])
popnames <- unique(barnames)
#Data Sorting
for(p in popnames[2:length(popnames)]){
pop <- as.numeric(rownames(popinfo[popinfo$POP==p,]))
vsort <- names(sort(apply(kData[c(pop),],2,mean), decreasing = TRUE))[1]
popsort <- order(kData[c(pop),][vsort[1]])
kData[min(pop):max(pop),] <- kData[pop[popsort],]
}
#Data Formatting
KData <- t(kData)
spaces <- c(0,diff(popinfo[,8]))
spaces <- replace(spaces, spaces != 0, 2)
for (i in 2:length(barnames)){if(spaces[i] == 0){barnames[i] <- ""}}
barcolors <- c("red2","blue3","green4","blueviolet","darkorange","cyan2","gold"
,"deeppink1","chartreuse2","dodgerblue","saddlebrown")
#<OUTPUT>
#Plot ancestry proportions
barplot(KData, col=barcolors, border=NA, space=spaces, las=2,
xlab= "Population",ylab= paste("K=",Ks), names.arg = barnames,
cex.names = 0.7, cex.axis= 0.7, cex=0.5)
#</OUTPUT>
#<END>
#<SANDBOX>
sel.col = c(1,2,3,30:300)
barplot(KData[,sel.col], col=barcolors, border=NA, space= spaces[sel.col], las=2,
xlab= "Population", ylab= paste("K=",Ks), names.arg= barnames[sel.col],
cex.names = 0.7, cex.axis= 0.7, cex=0.5)
<file_sep># 1000 Genomes Project Sampler (Shiny)
# Shiny: app.R
# Author: <NAME>
# Developed at: LANGEBIO - Mexico
# Pipeline:
# 1. Reads the popinfo file for the 1000 Genomes Project samples.
# 2. Identifies categories for easier subsetting.
# 3. Generates a table with random samples.
#
# Features:
# - Choose number of samples
# - Subset regions/populations
# - Preview the table
# - Save table as a popinfo file and a list of IDs for plink command --keep
#setwd(dirname(parent.frame(1)$ofile))
pi <- read.csv("1KGP3.popinfo.txt", sep="\t")
pop <- as.character(unique(pi$POP))
regs <- as.character(unique(pi$SPOP))
ui <- fluidPage(
img(src="MorLabLogo.jpg", style = "float:right"),
titlePanel("1000 Genomes Project Sampler"),
helpText("Author: <NAME> | Developed at: LANGEBIO (MX)"),
hr(),
sidebarLayout(
sidebarPanel(width=2,
numericInput("ns", label = "Samples per region/population:",
value= 10, min= 1, max=9999),
h5("Total samples selected:"),
textOutput("totsam"),
br(),
selectizeInput("spops", label = "Regions selected:",
choices= regs, selected = NULL,
options= list(maxItems = length(regs)-1,
placeholder = 'Select population(s)',
onInitialize = I('function() { this.setValue(""); }'))),
selectizeInput("pops", label = "Populations selected:",
choices= pop, selected = NULL,
options= list(maxItems = length(pop)-1,
placeholder = 'Select population(s)',
onInitialize = I('function() { this.setValue(""); }'))),
actionButton("goButton", "Save Tables"),
textOutput("value")
),
mainPanel(
tableOutput("SampleTable")
)
)
)
server <- function(input, output) {
dataset <- reactive({
popsel <- c()
if(length(input$spops) > 0){
for (i in input$spops){popsel <- c(popsel,sort(sample(which(pi$SPOP==i),input$ns)))}
} else if(length(input$pops) > 0){
for (i in input$pops){popsel <- c(popsel,sort(sample(which(pi$POP==i),input$ns)))}
} else {
for (i in pop){popsel <- c(popsel,sort(sample(which(pi$POP==i),input$ns)))}
}
pi[popsel,]
})
output$totsam <- reactive(dim(dataset())[1])
output$SampleTable <- renderTable({dataset()})
output$value <- renderText(paste(Export()))
Export <- eventReactive(input$goButton,{
setwd("output")
write.table(dataset(),file="1KG.sample.popinfo.txt", row.names=F, quote=F,sep="\t",eol="\n")
write.table(dataset()[,c("FAMID","ID")],file="1KG.sample.ids.txt",quote=F,sep="\t",row.names = F,col.names = F,eol="\n")
setwd("..")
return("Success: Tables saved to /output")
})
}
shinyApp(ui = ui, server = server)<file_sep># AUTO ADMIXTURE PLOTTER (Shiny)
# Shiny: app.R
# Author: <NAME>
# Developed at: LANGEBIO - Mexico
# Last Edit: March 10 2016
# Requirements:
# - R library: shinyjs
# - Q files from the admixture program (all Q files in the directory are read).
# - popinfo file
# Pipeline:
# 1. Reads popinfo file.
# 2. Reads .Q file of selected K.
# 3. Generates plot.
# Features:
# - Changing the 'Ks' parameter changes the plot showed.
# - Select two or more populations for display.
# - Add borders to individual bars.
# - Collapse sample tags in population tags.
# - Arrange samples by increasing biggest component.
# - Subset by a value from a category.
# - Custom re-coloring of components with visual color picker.
#<START>
# Load required libraries
library(shiny)
#library(shinyjs)
require(colourpicker)
#<INPUT>
# Choose data folder
setwd(choose.dir(default = "", caption = "Select folder with ADMIXTURE files"))
#</INPUT>
#<PREPARATIONS>
# Read popinfo file (.txt):
pifile <- dir(pattern = "\\popinfo.txt$")[1]
popinfo <- read.csv(pifile,header=TRUE, sep= "\t")
if (ncol(popinfo) == 1) {popinfo <- read.csv(pifile,header=TRUE, sep= " ")}
uniquecols <- sapply(popinfo,function(x) length(unique(x)))
fields <- names(uniquecols[uniquecols>1 & uniquecols < dim(popinfo)[1]/2])
if ("POP_SIMPLE" %in% colnames(popinfo)) {
pops <- unique(subset(popinfo, select = c(POP,POP_SIMPLE)))
pops <- gsub("_"," ",(paste(pops$POP," (",pops$POP_SIMPLE,")",sep="")))
} else {
pops <- as.character(unique(popinfo$POP))
}
barnames <- as.vector(popinfo$ID)
grpnames <- as.vector(popinfo$POP)
popnames <- unique(grpnames)
spaces <- c(0,diff(popinfo$POP))
spaces <- replace(spaces, spaces != 0, 2)
popinfo$NewOrd <- match(popinfo$POP,popnames)
index <- c(1, diff(as.numeric(popinfo$NewOrd)))
index[index !=0] <- 1
borders <- seq(1, length(index))[index==1]
offset <- round(diff(c(borders,length(index) ) )/2)
newnames <- rep("", length(grpnames))
newnames[borders+offset] <- as.character(grpnames[borders+offset])
# List all Q files
kmin <- 99
kmax <- 1
qfiles <- list()
for(f in dir(pattern = "\\.Q$")){
key <- count.fields(f)[1]
kmin <- ifelse(key < kmin, key, kmin)
kmax <- ifelse(key > kmax, key, kmax)
qfiles[[key]] <- paste(f)
}
#</PREPARATIONS>
ui <- fluidPage(
# Page Title
img(src="MorLabLogo.jpg", style = "float:right"),
titlePanel("Auto ADMIXTURE Plotter"),
helpText("Author: <NAME> | Developed at: LANGEBIO (MX)"),
hr(),
# Sidebar
sidebarLayout(
# Input Panels
sidebarPanel(width=2,
h4("Parameters"),
textInput("plottitle", label="Title", value = ""),
numericInput("Ks", label = "Number of K",
value = kmin,
min=kmin, max=kmax),
hr(),
selectizeInput("pops", label = "Population",
choices= pops, selected = NULL,
options= list(maxItems = length(pops)-1,
placeholder = 'Select population(s)',
onInitialize = I('function() { this.setValue(""); }'))
),
selectizeInput("pope", label = "Zoom 5x (All)",
choices= pops, selected = NULL,
options= list(maxItems = length(pops)-1,
placeholder = 'Select population(s)',
onInitialize = I('function() { this.setValue(""); }'))
),
checkboxInput("brdr", label = "Border", value = FALSE),
checkboxInput("ptag", label = "POP Tags", value = TRUE),
checkboxInput("sort", label = "Sorted", value = TRUE),
br(),
h4("Auto-Subset"),
selectizeInput("fctr", label = "Factor",
choices= fields, selected = NULL,
options= list(maxItems = 1,
placeholder = 'Choose factor',
onInitialize = I('function() { this.setValue(""); }'))),
uiOutput("choosegrps"),
br(),
h4("Coloring"),
div(style = "display:inline-block",
colourInput("col1", label= "Color 1:", value = "#FF0000", palette = "limited"),
colourInput("col2", label= "Color 2:", value = "#0000FF", palette = "limited"),
colourInput("col3", label= "Color 3:", value = "#00FF00", palette = "limited"),
colourInput("col4", label= "Color 4:", value = "#FFFF00", palette = "limited"),
colourInput("col5", label= "Color 5:", value = "#00FFFF", palette = "limited"),
colourInput("col6", label= "Color 6:", value = "#FF00FF", palette = "limited")),
div(style = "display:inline-block",
colourInput("col7", label= "Color 7:", value = "#FF7F00", palette = "limited"),
colourInput("col8", label= "Color 8:", value = "#1E90FF", palette = "limited"),
colourInput("col9", label= "Color 9:", value = "#008B00", palette = "limited"),
colourInput("col10", label= "Color 10:", value = "#9400D3", palette = "limited"),
colourInput("col11", label= "Color 11:", value = "#8B4500", palette = "limited"),
colourInput("col12", label= "Color 12:", value = "#999999", palette = "limited"))
),
# Plotting Area
mainPanel(width=10,
plotOutput("AdmixPlot", height = "480px", width= "1280px")
)
)
)
#</UI>
#<SERVER>
server <- function(input, output) {
#<REACTIVES>
# Switches
all.pops <- reactive(ifelse(is.null(input$pops) && input$grps == "",TRUE,FALSE))
sub.pops <- reactive(substr(input$pops,1,3))
pop.emph <- reactive(substr(input$pope,1,3))
barcolors <- reactive(c(input$col1,input$col2,input$col3,input$col4,
input$col5,input$col6,input$col7,input$col8,
input$col9,input$col10,input$col11,input$col12))
#</REACTIVES>
#<OUTPUT>
#<UI>
output$choosegrps <- renderUI({
if(length(input$fctr) == 0) return()
groups <- unique(as.vector(popinfo[[input$fctr]]))
selectizeInput("grps", label = "Group",
choices= groups, selected = NULL,
options= list(maxItems = 1,
placeholder = 'Choose factor value',
onInitialize = I('function() { this.setValue(""); }')))
})
#</UI>
# Plot Rendering
output$AdmixPlot <- renderPlot({
#<PROCESSING>
# Load Q File
kData <- read.table(qfiles[[input$Ks]],header=FALSE)
# Data Sorting
if(input$sort){
for(p in popnames[1:length(popnames)]){
pop <- as.numeric(rownames(popinfo[popinfo$POP==p,]))
vsort <- names(sort(apply(kData[c(pop),],2,mean), decreasing = TRUE))[c(1,2,3)]
popsort <- order(kData[c(pop),][vsort[1]],kData[c(pop),][vsort[2]],kData[c(pop),][vsort[3]])
barnames[pop] <- barnames[pop[popsort]]
kData[min(pop):max(pop),] <- kData[pop[popsort],]
}
}
KData <- t(kData)
#</PROCESSING>
par(las=2, font.lab=2, mar=c(4,4,4,0)+0.1)
wide1 <- rep(1,dim(KData)[2])
# Check: All populations selected?
if(all.pops()) {
widee <- wide1
if (!is.null(input$pope)) {
pope.idx <- c()
for(i in pop.emph()){pope.idx <- c(pope.idx,which(popinfo$POP %in% i))}
widee <- wide1
widee <- replace(widee,pope.idx,3)
}
barplot(KData, space=spaces, cex.names = 0.8,
col=barcolors(), border=ifelse(input$brdr,"#AAAAAA",NA),
names.arg = if(input$ptag) newnames else (barnames),
width= if(is.null(input$pope)) wide1 else (widee),
main= input$plottitle, cex.axis = 0.8,
xlab= "Population",ylab= paste("K= ",input$Ks))
} else { # Subpopulation Selected
sbPs.idx <- c()
if (input$grps != ""){
sbPs.idx <- which(popinfo[[input$fctr]] %in% input$grps)
sbPs.title <- paste(input$fctr, ": ", input$grps, sep="")
} else {
for(i in sub.pops()){sbPs.idx <- c(sbPs.idx,which(popinfo$POP %in% i))}
sbPs.title <- as.character(unique(popinfo$POP[sbPs.idx]))
}
subspaces <- c(0,diff(popinfo$POP[sbPs.idx]))
subspaces <- replace(subspaces, subspaces != 0, 1)
barplot(KData[,sbPs.idx], space= subspaces, cex.names = 1,
col=barcolors(), border=ifelse(input$brdr,"#AAAAAA",NA),
names.arg= if(input$ptag) newnames[sbPs.idx] else (barnames[sbPs.idx]),
width= if(is.null(input$pope)) wide1 else (widee),
main = input$plottitle,
xlab= "Population",ylab= paste("K= ",input$Ks))
}
})
#</OUTPUT>
}
#</SERVER>
#<APP>
shinyApp(ui = ui, server = server)
#</APP>
#<END><file_sep># AUTO PCA PLOTTER (Shiny) v.2
# Shiny: app.R
# Author: <NAME>
# Developed at: LANGEBIO - Mexico
# Last Edit: May 08 2017
# Requirements:
# - EVAL and EVEC files from the smartpca*. Only one per directory.
# - popinfo file
# Pipeline:
# 1. Reads first .evec and .eval files from a chosen directory.
# 2. Identifies names, regions and populations from a given popinfo.txt
# 3. Generates a color-coded PCA plot.
# Features:
# - Plot types: Can select between points or tags for the plot.
# - Select Population: Displays only selected population(s).
# - Emphasize Population: Highlights points or tags for a chosen population.
# - Color by Category: User can choose the criteria for coloring using the popinfo.
# - Auto-Legend: Shows color codings for the selected category.
# - Interactive Zoom: select an area and double click to zoom in, double click again to zoom out.
# - [TBD] ASH transformation for contextual zoom.
#<START> ####
# Load required libraries
require(shiny)
library(scales)
library(ggplot2)
#<INPUT> ####
# Choose data directory
setwd(choose.dir(default = "", caption = "Select folder with PCA files"))
#</INPUT>
#<PREPARATIONS> ####
# FUNCTIONS
refact <- function (x){
if (!is.factor(x))
x <- factor(x)
ll <- as.character(na.omit(unique(x)))
if (anyNA(x))
ll <- c(ll, NA)
factor(x, levels = ll, exclude = NULL)
}
# Load PCA files
pca.data <- read.table(dir(pattern = "\\.evec$")[1], stringsAsFactors = F)
eval <- scan(dir(pattern = "\\.eval$")[1])
ncomps <- table(sapply(pca.data, class))["numeric"]
PC1Col <- match("numeric",sapply(pca.data, class))
IDcol <- PC1Col - 1
colnames(pca.data)[IDcol:(IDcol+ncomps)] <- c("ID",paste0("PC",1:ncomps))
popIDs <- gsub(".*:","",pca.data$ID)
# Read popinfo file (.txt):
pifile <- dir(pattern = "\\popinfo.txt$")[1]
popinfo <- read.table(pifile,sep="\t",header=TRUE,comment.char = "")
# Data merging
pca.data <- merge(pca.data, popinfo, by.x = "ID", sort = F)
pca.data <- as.data.frame(lapply(pca.data, function(x) if(class(x) == "factor"){refact(x)} else{x}))
# Data field scan
pops <- as.character(unique(pca.data$POP))
uniquecols <- sapply(pca.data,function(x) length(unique(x)))
fields <- names(uniquecols[uniquecols>1])[-(IDcol:(IDcol+ncomps))]
if ("POP_SIMPLE" %in% colnames(pca.data)) {
names(pops) <- unique(paste0(pca.data$POP," (",pca.data$POP_SIMPLE,")"))
}
deftitle <- unlist(strsplit(pifile,"[.]"))[1]
#</PREPARATIONS>
#<UI> ####
ui <- fluidPage(
# Page Title
img(src="MorLabLogo.jpg", style = "float:right"),
titlePanel("Auto PCA Plotter"),
helpText("Author: <NAME> | Developed at: LANGEBIO (MX)"),
hr(),
# Sidebar
sidebarLayout(
# Input Panels
sidebarPanel(width=3,
h4("Settings"),
textInput("plottitle", label="Title", value = ""),
numericInput("PCa", label = "First Component (X)", value = 1,
min = 1, max = ncomps, step = 1),
numericInput("PCb", label = "Second Component (Y)", value = 2,
min = 1, max = ncomps, step = 1),
selectizeInput("pops", label = "Populations displayed:",
choices= pops, selected= NULL,
options= list(maxItems = length(pops)-1,
placeholder = 'Select population(s)',
onInitialize = I('function() { this.setValue(""); }'))
),
selectizeInput("pope", label = "Populations emphasis:",
choices= pops, selected= NULL, multiple= F,
options= list(placeholder = 'None',
onInitialize = I('function() { this.setValue(""); }'))
),
hr(),
selectizeInput("flds", label = "Group Coloring:",
choices= fields, selected = "POP",
options= list(maxItems = 1,
placeholder = 'Choose color grouping'#,
#onInitialize = I('function() { this.setValue(""); }')
)),
radioButtons("type", label = "Type:",
choices = list("Points" = 1, "Text" = 2),
selected = 1),
hr(),
downloadButton('dlPlot', 'Save as PDF'),
hr(),
h5("Points Info"),
verbatimTextOutput("brshinfo")
),
# Plotting Area
mainPanel(width=9,
plotOutput("PCAPlot", width = "1080px", height= "960px", dblclick = "dclk", brush = brushOpts(id= "brsh", resetOnNew = TRUE))
)
)
)
#</UI>
#<SERVER> ####
server <- function(input, output) {
#<REACTIVES>
# Inputs
sub.pops <- reactive(pca.data[pca.data$POP %in% input$pops,IDcol])
pope.idn <- reactive(pca.data[pca.data$POP %in% input$pope,IDcol])
groups <- reactive(as.character(levels(pca.data[pca.data$POP != input$pope,input$flds])))
PCaCol <- reactive(paste0("PC",input$PCa))
PCbCol <- reactive(paste0("PC",input$PCb))
pct.PCa <- reactive(paste(PCaCol()," (",percent(eval[input$PCa]/sum(eval)),")",sep=""))
pct.PCb <- reactive(paste(PCbCol()," (",percent(eval[input$PCb]/sum(eval)),")",sep=""))
ranges <- reactiveValues(x = NULL, y = NULL)
#</REACTIVES>
#<OBSERVERS>
observeEvent(input$dclk, {
brush <- input$brsh
if (!is.null(brush)) {
ranges$x <- c(brush$xmin, brush$xmax)
ranges$y <- c(brush$ymin, brush$ymax)
} else {
ranges$x <- NULL
ranges$y <- NULL
}
})
#</OBSERVERS>
#<OUTPUT> ####
output$PCAPlot <- renderPlot({
# Data subset
pca.keep <- pca.data
if (!is.null(input$pops)){
pca.keep <- pca.data[ popIDs %in% sub.pops(), , drop= F]
}
if (input$pope %in% pops){
pca.emph <- pca.data[ popIDs %in% pope.idn(), , drop= F]
pca.emph$plotColors <- "#000000FF"
pca.keep <- subset(pca.keep, !(ID %in% pca.emph$ID))
}
#Coloring
if (input$flds == "POP") {
if("COLOR" %in% colnames(pca.keep)){
pca.keep$plotColors <- paste0(pca.keep$COLOR,"FF")
} else {
grp.colors <- setNames(rainbow(length(pops), s = 0.9, v = 0.9), pops)
pca.keep$plotColors <- grp.colors[pca.keep$POP]
}
} else {
grp.colors <- setNames(rainbow(length(na.omit(groups())), s = 0.9, v = 0.9), na.omit(groups()))
if(any(is.na(groups()))){grp.colors <- c(grp.colors,setNames("#777777FF", NA))}
pca.keep$plotColors <- grp.colors[as.character(pca.keep[,input$flds])]
}
pca.keep$plotColors <- factor(pca.keep$plotColors, unique(pca.keep$plotColors))
if(any(is.na(groups()))){
levels(pca.keep$plotColors) <- c(levels(pca.keep$plotColors),"#777777FF")
pca.keep$plotColors[is.na(pca.keep$plotColors)] <- "#777777FF"
}
#Plotting
pca.plot <- ggplot(data= pca.keep, aes_string(x= PCaCol(), y= PCbCol(), color="plotColors")) + theme_light() +
geom_hline(yintercept= 0, colour= "#333333") + geom_vline(xintercept= 0, colour= "#333333") +
{if (input$type == 1) { #POINTS
geom_point(size= 3, alpha= 0.8)
}} +
{if (input$type == 2) { #TEXT
geom_text(aes(label= ID), size = 3, alpha= 0.8)
}} +
#Emphasis
{if (input$pope %in% pops & input$type == 1) {
geom_point(data= pca.emph, aes_string(x= PCaCol(), y= PCbCol(), color="plotColors"), size= 4, alpha= 0.8, shape= 23, fill= "black", color= "white")}} +
{if (input$pope %in% pops & input$type == 2) {
geom_label(data= pca.emph, aes_string(x= PCaCol(), y= PCbCol(), label= "ID"), size = 3, alpha= 0.8, color= "white", fill= pca.emph$plotColors, fontface="bold")}} +
scale_color_identity(name= input$flds, labels= groups(), guide= "legend") +
ggtitle(ifelse(input$plottitle == "", deftitle, input$plottitle)) + theme(plot.title = element_text(lineheight=.8, face="bold", hjust = 0.5)) +
labs(x= pct.PCa(), y= pct.PCb()) + coord_cartesian(xlim= ranges$x, ylim= ranges$y, expand = T)
#Output
ggsave("PCAplot.pdf",pca.plot, width= 24, height= 21, units="cm")
pca.plot
})
output$dlPlot <- downloadHandler(
filename = "PCAplot.pdf",
content = function(file) {
file.copy("PCAplot.pdf", file, overwrite=TRUE)
}
)
output$brshinfo <- renderPrint({pca.data[rownames(brushedPoints(pca.data[,],input$brsh, xvar= PCaCol(), yvar= PCbCol())),c("ID","POP","POP_SIMPLE")]})
#</OUTPUT>
}
#</SERVER>
#<APP> ####
shinyApp(ui = ui, server = server)
#</APP>
#<SANDBOX> ####
#<END> #### | eaeed237ac137fc49b03daab1b707d8d3195f990 | [
"Markdown",
"R"
] | 9 | R | SciWilro/PopGenRShiny | 8ece68be70d13f9d5f0cc0cd0d131bf037aa1d2f | b4a8aa8c00ebd79f628f8cb7ef5a3c0f08dc709a |
refs/heads/master | <file_sep>// ViewController.swift
// SlidingMatrixWeb
//
// !! Important:
// Please add the WebKit framework in target/Build Phases.
//
import UIKit
import WebKit
class ViewController: UIViewController, WKUIDelegate {
@IBOutlet weak var wv: WKWebView!
//[!] Connect to a WKWebView on the StoryBoard
override func viewDidLoad() {
super.viewDidLoad()
self.view.backgroundColor = UIColor(red: 4/255, green: 68/255, blue: 60/255, alpha: 1)
wv.uiDelegate = self
let smURL = URL(string: "https://eEhyQx.github.io/SlidingMatrix/SlidingMatrix.html")
let request = URLRequest(url: smURL!)
wv.load(request)
let swipeLeft = UISwipeGestureRecognizer(target:self, action:#selector(swipeLeft(_:)))
swipeLeft.direction = .left
let swipeRight = UISwipeGestureRecognizer(target:self, action:#selector(swipeRight(_:)))
swipeRight.direction = .right
let swipeUp = UISwipeGestureRecognizer(target:self, action:#selector(swipeUp(_:)))
swipeUp.direction = .up
let swipeDown = UISwipeGestureRecognizer(target:self, action:#selector(swipeDown(_:)))
swipeDown.direction = .down
self.view.addGestureRecognizer(swipeLeft)
self.view.addGestureRecognizer(swipeRight)
self.view.addGestureRecognizer(swipeUp)
self.view.addGestureRecognizer(swipeDown)
}
@objc func swipeLeft(_ recognizer:UISwipeGestureRecognizer){
print("swipe left")
var js: String
if(recognizer.location(in: self.view).x < UIScreen.main.bounds.width/2) {
js = "dir=" + keyCode["A"]! + ";cmd=core_location(dir);if (cmd!=null){move(core_position[cmd[0]],cmd[0],cmd[1]);if (is_neat()){if(battle_mod){document.getElementById('solved_window_battle').style.display=\"block\"}else{document.getElementById('more_window_finished').style.display = \"block\"}}}draw();"
} else {
js = "dir=" + keyCode["J"]! + ";cmd=core_location(dir);if (cmd!=null){move(core_position[cmd[0]],cmd[0],cmd[1]);if (is_neat()){if(battle_mod){document.getElementById('solved_window_battle').style.display=\"block\"}else{document.getElementById('more_window_finished').style.display = \"block\"}}}draw();"
}
wv.evaluateJavaScript(js, completionHandler: nil)
}
@objc func swipeRight(_ recognizer:UISwipeGestureRecognizer){
print("swipe right")
var js: String
if(recognizer.location(in: self.view).x < UIScreen.main.bounds.width/2) {
js = "dir=" + keyCode["D"]! + ";cmd=core_location(dir);if (cmd!=null){move(core_position[cmd[0]],cmd[0],cmd[1]);if (is_neat()){if(battle_mod){document.getElementById('solved_window_battle').style.display=\"block\"}else{document.getElementById('more_window_finished').style.display = \"block\"}}}draw();"
} else {
js = "dir=" + keyCode["L"]! + ";cmd=core_location(dir);if (cmd!=null){move(core_position[cmd[0]],cmd[0],cmd[1]);if (is_neat()){if(battle_mod){document.getElementById('solved_window_battle').style.display=\"block\"}else{document.getElementById('more_window_finished').style.display = \"block\"}}}draw();"
}
wv.evaluateJavaScript(js, completionHandler: nil)
}
@objc func swipeUp(_ recognizer:UISwipeGestureRecognizer){
print("swipe up")
var js: String
if(recognizer.location(in: self.view).x < UIScreen.main.bounds.width/2) {
js = "dir=" + keyCode["W"]! + ";cmd=core_location(dir);if (cmd!=null){move(core_position[cmd[0]],cmd[0],cmd[1]);if (is_neat()){if(battle_mod){document.getElementById('solved_window_battle').style.display=\"block\"}else{document.getElementById('more_window_finished').style.display = \"block\"}}}draw();"
} else {
js = "dir=" + keyCode["I"]! + ";cmd=core_location(dir);if (cmd!=null){move(core_position[cmd[0]],cmd[0],cmd[1]);if (is_neat()){if(battle_mod){document.getElementById('solved_window_battle').style.display=\"block\"}else{document.getElementById('more_window_finished').style.display = \"block\"}}}draw();"
}
wv.evaluateJavaScript(js, completionHandler: nil)
}
@objc func swipeDown(_ recognizer:UISwipeGestureRecognizer){
print("swipe down")
var js: String
if(recognizer.location(in: self.view).x < UIScreen.main.bounds.width/2) {
js = "dir=" + keyCode["S"]! + ";cmd=core_location(dir);if (cmd!=null){move(core_position[cmd[0]],cmd[0],cmd[1]);if (is_neat()){if(battle_mod){document.getElementById('solved_window_battle').style.display=\"block\"}else{document.getElementById('more_window_finished').style.display = \"block\"}}}draw();"
} else {
js = "dir=" + keyCode["K"]! + ";cmd=core_location(dir);if (cmd!=null){move(core_position[cmd[0]],cmd[0],cmd[1]);if (is_neat()){if(battle_mod){document.getElementById('solved_window_battle').style.display=\"block\"}else{document.getElementById('more_window_finished').style.display = \"block\"}}}draw();"
}
wv.evaluateJavaScript(js, completionHandler: nil)
}
override func motionEnded(_ motion: UIEvent.EventSubtype, with event: UIEvent?) {
let js = "dir=" + keyCode["O"]! + ";cmd=core_location(dir);if (cmd!=null){move(core_position[cmd[0]],cmd[0],cmd[1]);if (is_neat()){if(battle_mod){document.getElementById('solved_window_battle').style.display=\"block\"}else{document.getElementById('more_window_finished').style.display = \"block\"}}}draw();"
wv.evaluateJavaScript(js, completionHandler: nil)
}
}
let keyCode = ["W": "87",
"A": "65", "S": "83", "D": "68",
"I": "73",
"J": "74", "K": "75", "L": "76",
"O": "79"
] as [String: String]
| 1435ac25325568a875f8b6a0bbcb6a0f028501a8 | [
"Swift"
] | 1 | Swift | wangyiwei2015/SlidingMatrix | b95a8e736cf2b80dfb7dddf2667c61a0d1a1e0ae | 355efd7d4259fa8c7799e5192fa91d4dcd4f5859 |
refs/heads/master | <repo_name>DarrylD/react-native-sidemenu-examples<file_sep>/README.md
# react native sidemenu shrink example
Wanted to experiment with animations on android.
<img src="https://raw.githubusercontent.com/DarrylD/react-native-sidemenu-shrink/master/animation.gif" width="275" height="auto" alt="Buttons">
<img src="https://raw.githubusercontent.com/DarrylD/react-native-sidemenu-shrink/master/animation-fab.gif" width="275" height="auto" alt="Buttons">
## Setup
```
$ npm install
```
## Running on emulator
Start the emulator and run:
```
$ npm start
```
## Running on Device
Connect device (with usb debugging):
```
$ npm run android
```
## Todo
- [x] fix the scale animation (it's not animating...)
- [x] abstract components
- [ ] animate each menu item sequentially in side menu
- [ ] make proper components and use this as kitchen sink
# License
MIT
<file_sep>/components/Sidemenu.js
/**
* Sample React Native App
* https://github.com/facebook/react-native
* @flow
*/
import React, { Component } from 'react';
import {
AppRegistry,
StyleSheet,
Text,
View,
ScrollView,
LayoutAnimation,
UIManager,
Platform,
TouchableOpacity,
Animated
} from 'react-native';
export default class Sidemenu extends Component {
constructor(props){
super(props)
this.state = {
active:false,
fadeAnim: new Animated.Value(0),
shrink: new Animated.Value(0),
}
this.setAnimation();
this.links = [
'fab',
'slide',
'side',
'Entries',
'Search',
'Logout',
]
this.animatedInStyles = {
transform: [
{
scale: this.state.shrink.interpolate({
inputRange: [0, 1],
outputRange: [1, 2],
})
},
],
flex: 5,
right: -180,
}
this.animatedOutStyles = {
transform: [
{
scale: this.state.shrink.interpolate({
inputRange: [0, 1],
outputRange: [1.5, 4],
})
},
],
flex: 1,
right: 0,
}
}
componentWillUpdate() {
LayoutAnimation.easeInEaseOut();
this.setAnimation();
}
componentWillReceiveProps(nextProps){
this.setState({active:nextProps.active}, this.handleActivateMenuComplete)
}
setAnimation(){
const animationOptions ={
toValue: -.2, // Returns to the start
velocity: 3, // Velocity makes it move
tension: -4, // Slow
friction: 4, // Oscillate a lot
}
Animated
.spring(this.state.shrink, animationOptions)
.start();
}
handleActivateMenu(){
this.props.afterMenuClick()
}
handleActivateMenuComplete(){
const animationOptions = {
toValue: this.state.active ? 1 : 0,
duration: 550,
}
Animated
.timing(this.state.fadeAnim, animationOptions )
.start()
}
handleToggleMenu(selectedMenu){
//sending state back to parent
this.props.handleToggleMenu()
this.props.handleChangeMenu(selectedMenu)
}
renderMenu(){
const menuState = this.props.active
? styles.menuActive
: styles.menu
const last = this.links.length - 1
return (
<View style={menuState}>
<Animated.View style={{opacity: this.state.fadeAnim}}>
{this.links.map( (name, i) =>
<TouchableOpacity key={name} style={[styles.linkWrapper, i === last ? styles.last : null]} onPress={()=> this.handleToggleMenu(name)} >
<Text style={styles.link}>{name}</Text>
</TouchableOpacity>
)}
</Animated.View>
</View>
)
}
render() {
const activeMenu = this.props.active
? this.animatedInStyles
: this.animatedOutStyles
return (
<View style={styles.outer}>
{this.renderMenu()}
<Animated.View style={[styles.outer, activeMenu]}>
{this.props.children}
</Animated.View>
</View>
);
}
}
const styles = StyleSheet.create({
outer: {
backgroundColor: '#ccc',
flex: 1,
},
menu:{
position:'absolute',
margin: 20,
left: -50
},
menuActive:{
position:'absolute',
margin: 20,
left: 0
},
linkWrapper:{
borderBottomWidth: 1,
borderBottomColor: '#bbb',
marginBottom: 20,
width: 100,
paddingLeft:10
},
last:{
borderBottomWidth: 0,
},
link:{
paddingTop:10,
paddingBottom:10,
fontSize:18
}
});
<file_sep>/android/settings.gradle
rootProject.name = 'NativeSidemenu'
include ':app'
<file_sep>/components/Fabmenu.js
/**
* Sample React Native App
* https://github.com/facebook/react-native
* @flow
*/
import React, { Component } from 'react';
import {
AppRegistry,
StyleSheet,
Text,
View,
ScrollView,
LayoutAnimation,
UIManager,
Platform,
TouchableOpacity,
Animated,
Dimensions,
ListView,
Easing
} from 'react-native';
const _ = require('lodash');
const {height, width} = Dimensions.get('window');
const ITEMS_PER_ROW = 3
export default class Fabmenu extends Component {
constructor(props){
super(props)
this.state = {
active:false,
cardAnimation: new Animated.Value(0),
menuAnimation: new Animated.Value(0),
}
//our fake links to the app
this.links = [
'fab',
'slide',
'side',
'Entries',
'Search',
'Logout',
]
}
componentDidMount(){
this.handleMoveCard()
}
componentWillUpdate() {
//want to keep this here to keep any animatons on state change
//not needed now, maybe later
LayoutAnimation.easeInEaseOut();
}
componentWillReceiveProps(nextProps){
this.setState({active:nextProps.active}, this.handleMoveCardStart)
}
handleToggleMenu(selectedMenu){
//sending state back to parent
this.props.handleToggleMenu()
if(selectedMenu) this.props.handleChangeMenu(selectedMenu)
}
handleMoveCardStart(){
this.setState({active:this.state.active}, ()=> this.handleMoveCard() )
}
handleMoveCard(){
const menuAnimationOptions = {
toValue: this.props.active ? 1 : 0,
duration: this.props.active ? 400 : 100,
}
const cardAnimationOptions = {
toValue: this.props.active ? -150 : 0,
easing: Easing.elastic(1.3),
duration: 350,
}
Animated.parallel([
Animated.timing(this.state.menuAnimation, menuAnimationOptions),
Animated.timing(this.state.cardAnimation, cardAnimationOptions),
]).start( ()=> this.handleMoveCardEnd() );
}
handleMoveCardEnd(){
console.log('Done with animation')
this.props.afterAnimation()
}
renderRow(links) {
return links.map((name, i) => {
return (
<TouchableOpacity key={i} style={[styles.linkBlock]} onPress={()=> this.handleToggleMenu(name)} >
<Text style={styles.link}>{name}</Text>
</TouchableOpacity>
)
})
}
renderItemsInGroupsOf(count) {
return _.chunk(this.links, ITEMS_PER_ROW).map( (itemsForRow, i) => {
return (
<View style={styles.linkRow} key={i}>
{this.renderRow(itemsForRow)}
</View>
)
})
}
renderMenu(){
const menuStateStyles = {
position:'absolute',
opacity: this.state.menuAnimation,
bottom: 80,
left: 20,
width: width - 40
}
return (
<Animated.View style={menuStateStyles}>
<ScrollView onLayout={this.handleRotation} contentContainerStyle={styles.scrollView}>
{this.renderItemsInGroupsOf(ITEMS_PER_ROW)}
</ScrollView>
</Animated.View>
)
}
render() {
const cardAnimationStyle = {
top: this.state.cardAnimation
}
return (
<View style={styles.outer}>
{this.renderMenu()}
<Animated.View style={[styles.inner, cardAnimationStyle]}>
{this.props.children}
</Animated.View>
<View style={styles.btn}>
<TouchableOpacity style={{width:50, height:50}} onPress={()=> this.handleToggleMenu() } />
</View>
</View>
);
}
}
const styles = StyleSheet.create({
outer: {
backgroundColor: '#F6F6F6',
flex: 1,
padding:20,
justifyContent: 'center',
alignItems: 'center',
},
inner:{
flex: 1,
elevation: 2,
width: width-40, //setting the width of our main card
},
btn: {
padding: 12,
backgroundColor: 'white',
margin:10,
marginBottom:0,
width:50,
height:50,
borderRadius: 40,
elevation: 2
},
btnInner:{
width:50,
height:50
},
linkRow: {
flexDirection: 'row',
alignItems: 'center',
justifyContent: 'flex-start',
},
linkBlock:{
flex:1,
padding:10,
borderWidth: 1,
borderColor: '#ccc',
borderRadius: 4,
margin:5,
alignItems: 'center',
},
link:{
fontSize:14,
justifyContent: 'center',
}
});
| a3fac61b02b6d79e7729f7c725e07854dc768df8 | [
"Markdown",
"JavaScript",
"Gradle"
] | 4 | Markdown | DarrylD/react-native-sidemenu-examples | de7bd784f1da519f43ec24d489ef1fdb664c02d5 | 7cfb5f1712dba5cab7768e1a37bbec6b8d67aa5b |
refs/heads/master | <repo_name>rionik92/HW7.0my<file_sep>/src/com/company/Main.java
package com.company;
import java.util.HashMap;
import java.util.Map;
import java.util.Scanner;
import java.util.Set;
public class Main {
public static void main(String[] args) {
Map<String, String[]> dict = new HashMap<>();
dict.put("green", new String[]{"apple", "grass"});
dict.put("yellow", new String[]{"chicken", "sun"});
dict.put("black", new String[]{"space", "night", "босс"});
dict.put("blue", new String[]{"ocean", "sky"});
Set<Map.Entry<String, String[]>> r = dict.entrySet();
Scanner scanner = new Scanner(System.in);
System.out.println("Color?");
String word = scanner.nextLine();
for (Map.Entry<String, String[]> rd : r) {
for (String st : rd.getValue()) {
if (word.equals(st)) {
System.out.println(rd.getKey());
for (String s : rd.getValue()) {
System.out.println(s);
}
}
}
}
for (Map.Entry<String, String[]> rd : r) {
if (word.equals(rd.getKey())) {
for (String s : rd.getValue()) {
System.out.println(s);
}
}
}
Map<String, String[]> dictionaryMap = new HashMap<>();
for (Map.Entry<String, String[]> returnKeyValue : r) {
String key = returnKeyValue.getKey();
String[] value = returnKeyValue.getValue();
for (int i = 0; i < value.length; i++) {
String synonym = value[i];
String[] values = new String[value.length];
for (int a = 0; a < values.length; a++) {
String value2 = value[a];
if (value2.equals(synonym)) {
values[a] = key;
} else {
values[a] = value2;
}
}
dictionaryMap.put(synonym, values);
}
}
}
}
| 399d0c29b78c57ab7fabbd86e981cd2fe84914e4 | [
"Java"
] | 1 | Java | rionik92/HW7.0my | 632c9544d79d84ff4fb7a9ef48d07b03b8300038 | a1609f7be4723d325201a7cb938b450e7741dae7 |
refs/heads/master | <file_sep>#encoding=utf-8
class IniFile():
def __init__(self, filename):
try:
self.iniFile = open(filename, 'r')
except IOError as e:
print 'open ini file %s failed : %s' % (filename,e)
return
self.data = {}
modelName = ""
for line in self.iniFile.readlines():
pos = line.find("#")
if pos != -1:
line = line[:pos]
line = line.strip()
if len(line) < 3:
continue
if line[0] == '[' and line[-1] == ']':
modelName = line[1:-1]
self.data[modelName] = {}
continue
pos = line.find("=")
if pos != -1:
self.data[modelName][line[:pos].strip()] = line[pos + 1:].strip()
if self.data.has_key(''):
del self.data['']
self.outData = {}
def GetData(self, modelName, para, default=''):
if self.data.has_key(modelName) and self.data[modelName].has_key(para):
return self.data[modelName][para]
return default
def PutData(self, modelName, para, value):
self.outData[modelName][para] = value;
def GetMode(self, modelName):
if self.data.has_key(modelName):
return self.data[modelName]
return {}
def FlushData(self, fileName):
try:
file = open(fileName, "w+")
except IOError:
print 'FlushData: open %s failed' % fileName
return
outStr = ""
for key, val in self.outData.items():
outStr += "\n[%s]\n" % key
for key, val in self.outData[key].items():
outStr += "%s=%s\n" % (key, val)
file.write(outStr)
file.close()
<file_sep>[redis]
#----redis ip
redis = 127.0.0.1
[spider]
#----specify the directory to read url
DataSrc = ./dataset
#----specify the format of picture extracted from the web page
PicFormat = jpg|png|jpeg|bmp|gif
#----log file name
LogFile = ./DisCrawler.log
#----specify rate of badless picture in a web page
unhealthrate = 0.1
[caffe]
DetectorFile = ./common/classify_nsfw.py
ModelDef = ./common/nsfw_model/deploy.prototxt
PreTrainedModel = ./common/nsfw_model/resnet_50_1by2_nsfw.caffemodel
#---specify the threadhold of identifying the badless picture
Threshold=0.8
#---specify the rate of badless word in a web page
InvalidWordThread=0.15
<file_sep>#encoding=utf-8
import pickle
import sys
from master_LogCmd import master_LogCmd
from slaver_PicAnalysisCmd import slaver_PicAnalysisCmd
import random
import os
sys.path.append('../common')
from DataStore import RedisList, RedisHash
from Parase import Parser
'''
基于文本分析不良网页的任务
'''
class slaver_WordAnalysisCmd():
Parser = None
redisList = None
@classmethod
def Init(cls, config):
cls.Parser = Parser(config)
cls.redisList = RedisList(cls.Name(), redishost=config.get('redis', 'redis'))
slaver_PicAnalysisCmd.Init(config)
print 'finish init %s' % cls.Name()
@classmethod
def Name(cls):
return cls.__name__
#优先级,值越低优先级越大
@staticmethod
def Priority():
return 9
@classmethod
def WriteTaskData(cls, content, url):
cls.redisList.rpush(pickle.dumps((content, url)))
@classmethod
def ReadTaskData(cls):
data = cls.redisList.lpop()
if data != None:
data = pickle.loads(data)
return data
@classmethod
def IOStage(cls, data):
return data
@classmethod
def CPUStage(cls, data):
content = data[0]
url = data[1]
print "%s get url : %s" % (cls.Name(), url)
if cls.Parser.IsInvalidPage(content): # 判断是否含有非法文字信息
picName = str(random.randint(0, 10000000)) + '.jpg'
os.popen('phantomjs snapshot.js %s %s' % (url, picName)) #通过phantomjs对网页截图
master_LogCmd.WriteTaskData("!!!! found invalid html by %s: url: %s, picname:%s" % (cls.Name(), url, picName))
else: #通过将网页内容写入到redis中“基于图片分析不良网页的任务”队列,触发该任务的执行
slaver_PicAnalysisCmd.WriteTaskData(content, url)
<file_sep>#encoding=utf-8
import sys
import time
import signal
import os
import socket
import ConfigParser
sys.path.append('common')
from ScheTaskManager import TaskLoad
from IniFile import IniFile
from DataStore import RedisList, RedisHash, FlushRedis
from Framework import StartCrawlerFramework
SOCKET = None
#接收ctrl+c信号以优雅的退出进程
bRunning = True
def CtrlCHandle(sig, frame):
print 'exit'
global bRunning
bRunning = False
#入参解析
def ParaseArgs():
if len(sys.argv) != 5:
print "Usage: %s [master|slaver] scripyProcCnt scripythreadCnt parseStorePoc" % sys.argv[0]
sys.exit()
if sys.argv[1] != 'master' and sys.argv[1] != 'slaver':
print 'parameter 1 should be [master] or [slaver]'
sys.exit()
if not str(sys.argv[2]).isdigit():
print 'parameter scripyProcCnt should be digital'
sys.exit()
if not str(sys.argv[3]).isdigit():
print 'parameter scripythreadCnt should be digital'
sys.exit()
if not str(sys.argv[4]).isdigit():
print 'parameter parsePoc should be digital'
sys.exit()
return sys.argv[1], int(sys.argv[2]), int(sys.argv[3]), int(sys.argv[4])
def InitMaster(config):
#通过端口bind保证master脚本单例运行
try:
global SOCKET
SOCKET = socket.socket()
SOCKET.bind((socket.gethostname(), 60123))
except:
print "instance is running..."
sys.exit(0)
redisIp = config.get('redis', 'redis')
FlushRedis(redisIp)
#初始化redis中管理爬虫状态的hash
FrameInfo = RedisHash('FrameInfo', redishost=redisIp)
FrameInfo.hset('handleCnt', '0')
if __name__ == '__main__':
reload(sys)
sys.setdefaultencoding('utf-8')
# 注册中断信号处理函数,响应Ctrl + C
signal.signal(signal.SIGTERM, CtrlCHandle)
signal.signal(signal.SIGINT, CtrlCHandle)
#解析入参
role, IOProcCnt, IOThreadCnt, CPUProcCnt = ParaseArgs()
#启动主框架
childProcPid = StartCrawlerFramework(IOProcCnt, IOThreadCnt, CPUProcCnt)
#读入配置文件
config = ConfigParser.ConfigParser()
config.read('./spider.ini')
#根绝启动的角色加载相应任务
print '----Start %s----' % role
if role == 'master':
InitMaster(config)
TaskLoad(role, config)
#休眠,
while bRunning:
time.sleep(10)
#杀死各个进程,退出
for pid in childProcPid:
os.popen('kill -9 %d' % pid)
print 'exit'
<file_sep>#encoding=utf-8
import sys
import time
import os
from slaver_ScripyCmd import slaver_ScripyCmd
sys.path.append('../common')
from DataStore import RedisHash, RedisList
'''
管理待抓取url任务:从本地文件读入待抓取url,写入redis中“url抓取任务”的队列
'''
class master_ReadUrlCmd():
FrameInfo = None
RunTime = None
DataPath = None
redisList = None
StartTime = time.time()
slaverScripyCmd = None
@classmethod
def Init(cls, config):
redisIp = config.get('redis', 'redis')
master_ReadUrlCmd.FrameInfo = RedisHash('FrameInfo', redishost = redisIp)
master_ReadUrlCmd.RunTime = 'RunTime'
master_ReadUrlCmd.DataPath = config.get("spider", "DataSrc").strip()
master_ReadUrlCmd.redisList = RedisList(master_ReadUrlCmd.Name(), redishost=redisIp)
slaver_ScripyCmd.Init(config)
#触发任务
master_ReadUrlCmd.WriteTaskData(' ')
print 'finish init %s' % cls.Name()
@classmethod
def Name(cls):
return cls.__name__
@staticmethod
def Priority():
return 10
@classmethod
def IOStage(cls, data):
for file in os.listdir(master_ReadUrlCmd.DataPath):
path = os.path.join(master_ReadUrlCmd.DataPath, file)
with open(path, 'r') as f:
for url in f.readlines():
if not url.startswith('http://'):
url = 'http://' + url
url = url.strip()
print url
slaver_ScripyCmd.WriteTaskData(url)
#抓取队列中大于1000个url则休眠,防止redis内存过大
while slaver_ScripyCmd.redisList.llen() >= 1000:
master_ReadUrlCmd.FrameInfo.hset(master_ReadUrlCmd.RunTime, (time.time() - master_ReadUrlCmd.StartTime) / 60)
time.sleep(2)
master_ReadUrlCmd.FrameInfo.hset(master_ReadUrlCmd.RunTime, (time.time() - master_ReadUrlCmd.StartTime) / 60)
#再次触发任务
#master_ReadUrlCmd.WriteTaskData(' ')
return None
@classmethod
def CPUStage(cls, data):
return ''
@classmethod
def WriteTaskData(cls, data):
cls.redisList.rpush(data)
@classmethod
def ReadTaskData(cls):
return cls.redisList.lpop()<file_sep>#encoding=utf-8
import pickle
from bs4 import BeautifulSoup
import re
import socket, fcntl, struct
import threading
import sys
import uuid
sys.path.append('../common')
from scripy import CrawlerHandler
from Common import Common
from master_LogCmd import master_LogCmd
from slaver_WordAnalysisCmd import slaver_WordAnalysisCmd
from DataStore import RedisList, RedisHash
from Parase import Parser
'''
网页抓任务
'''
class slaver_ScripyCmd():
para = {'phone' : 0, 'UA' : 1, 'url' : 2}
Parser = None
handleCnt = None
FrameInfo = None
redisList = None
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
LocalIp = socket.inet_ntoa(fcntl.ioctl(s.fileno(), 0x8915, struct.pack('256s', 'eth0'[:15]))[20:24])
@classmethod
def Init(cls, config):
cls.Parser = Parser(config)
cls.handleCnt = 'handleCnt'
cls.FrameInfo = RedisHash('FrameInfo', redishost=config.get('redis', 'redis')) #记录爬虫状态,hash结构,如正在抓取url的redis
cls.redisList = RedisList(cls.Name(), redishost=config.get('redis', 'redis'))
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(('8.8.8.8', 80))
cls.mac = uuid.UUID(int = uuid.getnode()).hex[-12:].upper()
master_LogCmd.Init(config)
slaver_WordAnalysisCmd.Init(config)
@classmethod
def Name(cls):
return cls.__name__
#优先级,值越低优先级越大
@staticmethod
def Priority():
return 10
@classmethod
def WriteTaskData(cls, url):
cls.redisList.rpush(pickle.dumps(url))
@classmethod
def ReadTaskData(cls):
data = cls.redisList.lpop()
if data != None:
data = pickle.loads(data)
return data
@classmethod
def IOStage(cls, url):
#在redis中记录当前抓取的线程id
key = "%s_%d" % (cls.mac, threading.currentThread().ident)
cls.FrameInfo.hset(key, url)
# 抓取数据
contenttype, content = Common.GetContentByUrl(url)
#在redis更新抓取网页统计值
urlHandleCnt = int(cls.FrameInfo.hget(cls.handleCnt)) + 1
cls.FrameInfo.hset(cls.handleCnt, urlHandleCnt)
cls.FrameInfo.hdel(key)
#抓取异常判断
if content == None or contenttype == None or content == '':
master_LogCmd.WriteTaskData("Failed : %s" % url)
return None
#对于网页内容,则触发“基于文本分析不良网页的任务”
if contenttype.find("text/html") != -1: #是网页资源
slaver_WordAnalysisCmd.WriteTaskData(content, url)
return None
@classmethod
def CPUStage(cls, data):
pass<file_sep>#encoding=utf-8
import os
import pyinotify
from multiprocessing import Manager
import sys
import threading
import json
class TaskManager(object):
'''
任务调度管理器,字典数据结构,value: List(Task),任务的List; key: 任务队列的优先级
'''
ScheduleTaskDict = dict()
'''
任务必须定义的方法,缺一不可,各接口说明如下:
'''
TaskMethod = ['Init', #类方法,入参:配置文件对象,功能:初始化任务;在redis中为本任务创建FIFO队列
'Name', #类方法,入参:无; 出参:任务名称字符串; 功能:唯一标识任务名称,以及任务在redis中的List名称
'Priority', #类静态方法,入参:无; 出参:整型优先级值; 功能:设定任务优先级,值越低优先级越大
'IOStage', #类方法,入参:任务框架第一阶段执行必要的数据,各任务自定义; 出参:失败返回None,成功返回第二阶段必要的数据;
#功能:处理网络IO密集型任务,在爬虫场景主要实现网络抓取
'CPUStage', #类方法,入参:任务框架第二阶段执行必要的数据,各任务自定义; 出参:无;
#功能:处理CPU密集型任务,目前主要基于caffe分析不良图片
'WriteTaskData', #类方法,入参:任务执行必要的数据; 出参:无; 功能:通过往任务队列放入数据来触发任务
'ReadTaskData' #类方法,入参:无; 出参:任务执行必须的数据; 功能:从任务队列读出数据,为后续任务调度提供数据
]
def __getitem__(self, method):
if method in self.TaskMethod:
return self.__getattribute__(method)
print 'Not Support Method %s' % method
return None
'''
功能:在任务调度管理器中移除任务task
'''
@classmethod
def RemoveTask(cls, task):
for (priority, taskList) in cls.ScheduleTaskDict.items():
if not task in taskList:
continue
taskList.remove(task)
if len(cls.ScheduleTaskDict[task.Priority()]) == 0:
del cls.ScheduleTaskDict[task.Priority()]
print 'remove Task %s' % task.Name()
return
'''
功能:在任务调度管理器中添加任务task
'''
@classmethod
def AddTask(cls, task):
cls.RemoveTask(task)
if not cls.ScheduleTaskDict.has_key(task.Priority()):
cls.ScheduleTaskDict[task.Priority()] = []
cls.ScheduleTaskDict[task.Priority()].append(task)
print 'Add Task %s\n' % task.Name()
'''
功能:解析任务文件,加入任务调度管理器
参数:
role:角色(master/slaver)
TaskFile:任务文件名
config:配置文件对象
'''
@classmethod
def TaskFile2Task(cls, role, TaskFile, config):
if not TaskFile.endswith('Cmd.py') or not TaskFile.startswith(role):
return
pluginName = os.path.splitext(TaskFile)[0]
print 'pluginName:%s' % pluginName
plugin = __import__("%s" % pluginName, fromlist=[pluginName]) # 相当于import dir.modle
cmdCls = getattr(plugin, pluginName) # getattr 用于从plugin模块下获得pluginName成员
for method in cls.TaskMethod:
if not cmdCls.__dict__.has_key(method):
print 'laod failed: %s should compele %s' % (cmdCls.__name__, method)
return
cmdCls.Init(config)
cls.AddTask(cmdCls)
'''
功能:按照任务优先级从高到低,在redis中遍历队列,提取队列数据,返回任务和数据的元祖
'''
@classmethod
def GetTask(cls):
tasts = []
for (priority, tastList) in cls.ScheduleTaskDict.items():
for task in tastList:
data = task.ReadTaskData()
if data != None:
tasts.append((data, task))
if len(tasts) != 0:
break
return tasts
'''
实现对任务文件的增、删、改进行监控,一旦任务文件有变动则重新加载任务
'''
class TaskNotify(pyinotify.ProcessEvent):
def __init__(self, role, config):
self.role = role
self.config = config
'''
新增任务回调函数
'''
def process_IN_CREATE(self, event):
print "CREATE Task:", event.pathname
TaskManager.TaskFile2Task(self.role, event.pathname.split('/')[-1], self.config)
'''
删除任务回调函数
'''
def process_IN_DELETE(self, event):
print "DELETE Task:", event.pathname
TaskManager.TaskFile2Task(self.role, event.pathname.split('/')[-1], self.config)
'''
修改任务回调函数
'''
def process_IN_MODIFY(self, event):
print "MODIFY Task:", event.pathname
TaskManager.TaskFile2Task(self.role, event.pathname.split('/')[-1], self.config)
'''
功能:注册一个监控任务
参数:
dir:被监控目录
'''
@staticmethod
def RegisterNotify(dir, role, config):
# watch manager
print dir
wm = pyinotify.WatchManager()
wm.add_watch(dir, pyinotify.IN_DELETE | pyinotify.IN_CREATE | pyinotify.IN_MODIFY, auto_add=True, rec=True)
# notifier
notifier = pyinotify.Notifier(wm, TaskNotify(role, config))
#notifier.loop()
while True:
try:
notifier.process_events()
if notifier.check_events():
notifier.read_events()
except KeyboardInterrupt:
notifier.stop()
break
'''
功能:根绝启动的角色(mater/slaver)在cmd目录下加载相应的任务
参数:
role:角色(master/slaver)
config:配置文件对象,供任务初始化使用
'''
def TaskLoad(role, config):
cmdPath = 'cmd'
sys.path.append(cmdPath)
for fileName in os.listdir(cmdPath):
TaskManager.TaskFile2Task(role, fileName, config)
t = threading.Thread(target=TaskNotify.RegisterNotify, args=('./cmd', role, config, ))
t.start()
return t
<file_sep>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import ConfigParser
config = ConfigParser.ConfigParser()
config.read('./spider.ini')
print config.sections()
print config.get("redis", "redis")
print config.get("spider", "DataSrc1", vars = {"DataSrc1":'ahah'})
print config.get("caffe", "PreTrainedModel", vars = {"DataSrc1":'ahah'})<file_sep># coding: utf-8
import os
import shutil
from urlparse import urljoin
from urlparse import urlunparse
from posixpath import normpath
from urlparse import urlparse
import requests
import re
import StringIO
import random
#from requests.packages.urllib3.exceptions import InsecureRequestWarning
import subprocess
from multiprocessing import Process,Lock
import multiprocessing
import sys
import chardet
# 禁用安全请求警告
#requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
class Singleton(object):
def __new__(cls,*args,**kwargs):
if not hasattr(cls,'_inst'):
cls._inst=super(Singleton,cls).__new__(cls,*args,**kwargs)
return cls._inst
class Common(Singleton):
@staticmethod
def Post(url, data, headers):
contenttype = None
dwLen = 0
for key, val in data.items():
dwLen += len(key) + len(val)
dwLen += 2 * len(data) - 1
headers['Content-Length'] = str(dwLen)
try:
r = requests.post(url, data=data, headers=headers)
r.raise_for_status() # 如果响应状态码不是 200,就主动抛出异常
except requests.RequestException as e:
print e
return ""
if "Content-Type" in r.headers.keys():
contenttype = r.headers["Content-Type"]
elif "content-type" in r.headers.keys():
contenttype = r.headers["content-type"]
content = r.content
if contenttype.find("text/html") != -1:
content = r.content.decode(r.encoding, 'ignore').encode('utf8', 'ignore')
return content
@staticmethod
def is_uchar(text):
for uchar in text:
# is chinese
if uchar >= u'\u4e00' and uchar <= u'\u9fa5':
return True
# is digital
if uchar >= u'\u0030' and uchar <= u'\u0039':
return True
# ia alphabat
if (uchar >= u'\u0041' and uchar <= u'\u005a') or (uchar >= u'\u0061' and uchar <= u'\u007a'):
return True
# is puncate
if uchar in ('-', ',', '.', '>', '?', '<', '/', ':', ';', '\'', '\"', ']', '[', '}', '{', '+', '=', '-', '_',')', '(', '*','&', '^', '%', '$', '#', '@', '!', '~', '`'):
return True
return False
@staticmethod
def GetContentByUrl(url, useragent = 'Mozilla / 5.0(Windows NT 6.1; WOW64) AppleWebKit / 537.36(KHTML, like Gecko) Chrome / 53.0.2785.104 Safari / 537.36 Core / 1.53.3226 .400 QQBrowser / 9.6 .11681.400', headers = None):
if headers == None:
headers = {'User-Agent': useragent}
contenttype = None
try:
r = requests.get(url, headers=headers, verify=False, timeout=90)
r.raise_for_status() # 如果响应状态码不是 200,就主动抛出异常
except requests.RequestException as e:
print "GetUrlFaild: %s\n" % str(e)
return None, None
if "Content-Type" in r.headers.keys():
contenttype = r.headers["Content-Type"]
elif "content-type" in r.headers.keys():
contenttype = r.headers["content-type"]
content = r.content
if contenttype and contenttype.find("text/html") != -1:
if len(requests.utils.get_encodings_from_content(r.content)) != 0:
encoding = requests.utils.get_encodings_from_content(r.content)[0]
else:
encoding = r.encoding
content = r.content.decode(encoding, 'ignore').encode('utf8', 'ignore')
r.close()
return contenttype, content
@staticmethod
def RmDir(Path):
if os.path.exists(Path):
shutil.rmtree(Path)
@staticmethod
def MkDir(Path):
if not os.path.exists(Path):
os.makedirs(Path)
@staticmethod
def UrlJoin(base, url):
url1 = urljoin(base, url)
arr = urlparse(url1)
path = normpath(arr[2])
return urlunparse((arr.scheme, arr.netloc, path, arr.params, arr.query, arr.fragment))
@staticmethod
def Copy(SrcPath, DstPath):
if not os.path.exists(SrcPath):
return
if not os.path.exists(DstPath):
os.makedirs(DstPath)
shutil.copy(SrcPath, DstPath)
@staticmethod
def SaveContent(Content, Path, mode = 'w+'):
FileObj = open(Path, mode)
try:
FileObj.write(Content)
finally:
FileObj.close()
@staticmethod
def ConvWebp2JPEG(raw_img):
name = random.randint(0, 10000000)
SrcName = str(name) + '.webp'
DstName = str(name) + '.png'
FileObj = open(SrcName, 'w+')
FileObj.write(raw_img)
FileObj.close()
cmd = 'dwebp %s -o %s' % (SrcName, DstName)
out = subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE)
if 'failed' in out.stderr.read():
os.remove(SrcName)
return ''
FileObj = open(DstName, 'r')
raw_img = FileObj.read()
FileObj.close()
os.remove(DstName)
os.remove(SrcName)
return raw_img
'''将当前进程fork为一个守护进程
注意:如果你的守护进程是由inetd启动的,不要这样做!inetd完成了
所有需要做的事情,包括重定向标准文件描述符,需要做的事情只有chdir()和umask()了
'''
@staticmethod
def daemonize(stdin='/dev/null', stdout='/dev/null', stderr='/dev/null'):
# 重定向标准文件描述符(默认情况下定向到/dev/null)
try:
pid = os.fork()
# 父进程(会话组头领进程)退出,这意味着一个非会话组头领进程永远不能重新获得控制终端。
if pid > 0:
print 'parent'
sys.exit(0) # 父进程退出
except OSError, e:
sys.stderr.write("fork #1 failed: (%d) %s\n" % (e.errno, e.strerror))
sys.exit(1)
# 从母体环境脱离
#os.chdir("/") # chdir确认进程不保持任何目录于使用状态,否则不能umount一个文件系统。也可以改变到对于守护程序运行重要的文件所在目录
os.umask(0) # 调用umask(0)以便拥有对于写的任何东西的完全控制,因为有时不知道继承了什么样的umask。
os.setsid() # setsid调用成功后,进程成为新的会话组长和新的进程组长,并与原来的登录会话和进程组脱离。
# 执行第二次fork
try:
pid = os.fork()
if pid > 0:
sys.exit(0) # 第二个父进程退出
except OSError, e:
sys.stderr.write("fork #2 failed: (%d) %s\n" % (e.errno, e.strerror))
sys.exit(1)
# 进程已经是守护进程了,重定向标准文件描述符
for f in sys.stdout, sys.stderr: f.flush()
si = open(stdin, 'r')
so = open(stdout, 'a+')
se = open(stderr, 'a+', 0)
'''
os.dup2(si.fileno(), sys.stdin.fileno()) # dup2函数原子化关闭和复制文件描述符
os.dup2(so.fileno(), sys.stdout.fileno())
os.dup2(se.fileno(), sys.stderr.fileno())
'''<file_sep>#encoding=utf-8
import random
import array
class BitMap(object):
"""
BitMap class
"""
BITMASK= [0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80]
BIT_CNT = [bin(i).count("1") for i in range(256)]
def __init__(self, maxnum=0):
"""
Create a BitMap
"""
nbytes = int((maxnum + 7) / 8)
self.bitmap = array.array('B', [0 for i in range(nbytes)])
def set(self, pos):
"""
Set the value of bit@pos to 1
"""
self.bitmap[int(pos / 8)] |= self.BITMASK[pos % 8]
def test(self, pos):
"""
Return bit value
"""
return (self.bitmap[int(pos / 8)] & self.BITMASK[pos % 8]) != 0
#k = ln2 * m / n 时 误判率 最小, k为hash函数个数,m为bit数,n为预估要插入的数据量
class BloomFilter:
def __init__(self, mapsize=160000, max_node_size=10000, random_num=8):
self.m = mapsize
self.n = max_node_size
self.k = random_num
self.bitmap = BitMap(maxnum=self.m)
self.count = 0;
pass
def set(self, string):
calcmap = self.calcMap(string)
for x in calcmap:
self.bitmap.set(x)
pass
def test(self, string):
calcmap = self.calcMap(string)
for x in calcmap:
if not self.bitmap.test(x):
return False
return True
def calcMap(self, string):
r = random.Random(string)
lv1random = [r.random() for x in range(self.k)]
return [int(random.Random(x).random()*self.m) for x in lv1random]
def TestAndSet(self, string):
bOk = True
calcmap = self.calcMap(string)
for x in calcmap:
if not self.bitmap.test(x):
bOk = False
self.bitmap.set(x)
return bOk
'''
fb = BloomFilter()
print fb.test("agg")
print fb.set("agg")
print fb.test("agg")
'''<file_sep>#encoding=utf-8
import sqlite3
import multiprocessing
import redis
import pg
class Singleton(object):
def __new__(cls,*args,**kwargs):
if not hasattr(cls,'_inst'):
cls._inst=super(Singleton,cls).__new__(cls,*args,**kwargs)
return cls._inst
def FlushRedis(redishost='localhost', redisport=6379, redisdb=0):
r = redis.Redis(host=redishost, port=redisport, db=redisdb)
r.flushdb()
# 封装redis的list数据结构
class RedisList():
def __init__(self, redisname, redishost='localhost', redisport=6379, redisdb=0):
self.r = redis.Redis(host=redishost, port=redisport, db=redisdb)
self.name = redisname
def lpush(self, data):
self.r.lpush(self.name, data)
def rpush(self, data):
self.r.rpush(self.name, data)
def rpop(self):
return self.r.rpop(self.name)
def lpop(self):
return self.r.lpop(self.name)
def llen(self):
return self.r.llen(self.name)
def delete(self):
self.r.delete(self.name)
#封装redis的hash数据结构
class RedisHash():
def __init__(self, redisname, redishost = 'localhost', redisport = 6379, redisdb = 0):
self.r = redis.Redis(host = redishost, port = redisport, db = redisdb)
self.name = redisname
def hset(self, key, val):
self.r.hset(self.name, key, val)
def hmset(self, dicts): #将字典数据dicts批量写入hash
self.r.hmset(self.name, dicts)
def hget(self, key):
return self.r.hget(self.name, key)
def hkeys(self): #以list返回hash中所有key
return self.r.hkeys(self.name)
def hvals(self): #以list返回hash中所有val
return self.r.hvals(self.name)
def hgetall(self): #以dict返回hash中所有(key, val)
return self.r.hgetall(self.name)
def hexists(self, key):
return self.r.hexists(self.name, key)
def hlen(self):
return self.r.hlen(self.name)
def hdel(self, key):
self.r.hdel(self.name, key)
def delete(self):
self.r.delete(self.name)
#封装redis的set数据结构
class RedisSet():
def __init__(self, redisname, redishost = 'localhost', redisport = 6379, redisdb = 0):
self.r = redis.Redis(host = redishost, port = redisport, db = redisdb)
self.name = redisname
def sadd(self, data): #返回添加成功的数量
return self.r.sadd(self.name, data)
def scard(self): #返回集合中元素的数量
return self.r.scard(self.name)
def sismember(self, data): #判断data是否为集合的成员
return self.r.sismember(self.name, data)
def srem(self, data): #删除指定元素
self.r.srem(self.name, data)
def spop(self): #移除并返回集合中的一个随机元素
return self.r.spop(self.name)
def delete(self):
self.r.delete(self.name)
class Sqlite():
def __init__(self, dbname, lock = multiprocessing.Lock()):
self.conn = sqlite3.connect(dbname, check_same_thread=False)
self.conn
self.conn.text_factory = str
self.sqlite3Cmd = self.conn.cursor()
self.Lock = lock
def __del__(self):
self.conn.close()
def RunCmd(self, CmdList):
if self.Lock.acquire():
for Cmd in CmdList:
try:
self.sqlite3Cmd.execute(str(Cmd).encode('utf8'))
except BaseException, e:
pass
self.conn.commit()
self.Lock.release()
def RunSelect(self, Cmd):
RetList = []
if self.Lock.acquire():
for Ret in self.sqlite3Cmd.execute(Cmd):
RetList.append(Ret)
self.Lock.release()
return RetList
class GP():
def __init__(self, db, host, user, pwd):
# 连接数据库
try:
self.pgdb_conn = pg.connect(dbname=db, host=host, user=user, passwd=pwd)
except Exception, e:
print "conntect postgre database failed, ret = %s" % e.args[0]
return
def WriteCmd(self, sql_desc):
try:
self.pgdb_conn.query(sql_desc)
except Exception, e:
print "run %s failed, ret = %s" % (sql_desc, e.args[0])
return False
return True
def ReadCmd(self, sql_desc):
try:
dictresult = pgdb_conn.query(sql_desc).dictresult()
except Exception, e:
print "run %s failed, ret = %s" % (sql_desc, e.args[0])
dictresult = None
return dictresult
from GBaseConnector import connect, GBaseError
class GBASE():
# 连接
def GBaseConnect(self, ip, db='zxvmax', user='gbase', pwd='<PASSWORD>', port=5258, connect_timeout=600):
cfg = {'host': ip, 'port': port, 'database': db, 'user': user, 'passwd': pwd, 'connect_timeout':connect_timeout}
self.conn = connect()
self.conn.connect(**cfg)
self.cur = self.conn.cursor()
self.row = 0
self.col = 0
#读入sql文件执行
def GBaseCmdFromeFile(self, filePath):
with open(filePath, 'r') as f:
content = f.read()
for sql in content.split(';')[:-1]:
sql = sql + ';'
self.GBaseCmd(sql)
# zhixing
def GBaseSelect(self, cmd, catchErr=0):
cmd = cmd.encode('utf-8')
if catchErr:
try:
self.cur.execute(cmd)
row = self.cur.fetchone()
self.col = len(row)
rows = []
while row != None:
rows.append(row)
row = self.cur.fetchone()
self.row = len(rows)
except BaseException as e:
print e
return []
else:
self.cur.execute(cmd)
row = self.cur.fetchone()
self.col = len(row)
rows = []
while row != None:
rows.append(row)
row = self.cur.fetchone()
self.row = len(rows)
return rows
def GBaseCmd(self, cmd, catchErr=0):
cmd = cmd.encode('utf-8')
if catchErr:
try:
self.cur.execute(cmd)
except BaseException as e:
print e
else:
self.cur.execute(cmd)
def GBaseGetRowCnt(self):
if hasattr(self, 'row'):
return self.row
else:
return 0
def GBaseGetColCnt(self):
if hasattr(self, 'col'):
return self.col
else:
return 0
def GBaseClose(self):
self.cur.close()
self.conn.close()
'''
#测试代码
test = Sqlite('test.db', threading.Lock())
cmd = ['drop table if exists COMPANY',
'CREATE TABLE if not exists COMPANY (ID INT PRIMARY KEY NOT NULL, NAME TEXT NOT NULL, AGE INT NOT NULL, ADDRESS CHAR(50), SALARY REAL);',
"INSERT INTO COMPANY (ID,NAME,AGE,ADDRESS,SALARY) VALUES (1, '%s', 32, 'California', 20000.00 )" % ('你好'),
"INSERT INTO COMPANY (ID,NAME,AGE,ADDRESS,SALARY) VALUES (2, '好', 25, 'Texas', 15000.00 )"]
print type('你好'.decode('utf8'))
print cmd[2]
test.RunCmd(cmd)
cursor = test.RunSelect("SELECT id, name, address, salary from COMPANY")
for row in cursor:
print "ID = ", row[0]
print "NAME = ", row[1]
print "ADDRESS = ", row[2]
print "SALARY = ", row[3], "\n"
test.Close()
'''<file_sep>#encoding=utf-8
import multiprocessing
import threading
import os
import time
from ScheTaskManager import TaskManager
'''
处理IO密集型任务的线程
'''
class ScripThread(threading.Thread):
def __init__(self, OutPutProcQue, Name):
threading.Thread.__init__(self)
self.OutputProcQue = OutPutProcQue
self.Scripying = False
self.Name = Name
'''
从任务调度管理器中提取任务和数据并执行,如果成功则将任务和数据放入到CPU密集型进程中执行
'''
def run(self):
#print 'start ScripyThread %s' % (self.Name)
while True:
for (data, task) in TaskManager.GetTask():
self.Scripying = True
data = task.IOStage(data)
if data != None:
self.OutputProcQue.put((data, task))
self.Scripying = False
time.sleep(0.08)
def IsScripying(self):
return self.Scripying
'''
处理CPU密集型任务的进程
'''
class ParaseStoreProc(multiprocessing.Process):
def __init__(self, OutPutProcQue, Name):
multiprocessing.Process.__init__(self)
self.OutPutProcQue = OutPutProcQue
self.Name = Name
self.Status = False
def run(self):
print 'start ParaseStoreProc %s, pid %d' % (self.Name, os.getpid())
while True:
(data, task) = self.OutPutProcQue.get()
self.Status = True
task.CPUStage(data)
self.Status = False
def IsRuning(self):
return self.Status
def ScrayWorker(scrapyThreadCnt, que):
for i in range(scrapyThreadCnt):
thread = ScripThread(que, str(i))
thread.start()
while True:
time.sleep(10)
'''
功能:启动主框架
参数:
IOProcCnt:指定处理IO密集型任务的进程数
IOThreadCnt:指定每个IO密集型进程下的线程数量
CPUProcCnt:指定处理CPU密集型任务的进程数
'''
def StartCrawlerFramework(IOProcCnt = 1, IOThreadCnt = 1, CPUProcCnt = 1):
que = multiprocessing.Queue()
childProcPid = []
for i in range(IOProcCnt):
p = multiprocessing.Process(target=ScrayWorker, args=(IOThreadCnt,que))
p.start()
childProcPid.append(p.pid)
for i in range(CPUProcCnt):
p = ParaseStoreProc(que, str(i))
p.start()
childProcPid.append(p.pid)
childProcPid.append(os.getpid())
return childProcPid
#Scripyer('a')
'''
if __name__ == '__main__':
multiprocessing.freeze_support()
redisname = 'test'
#CrawlerHandler.RegistScrip('300', Domainsrc.Scripy, Domainsrc.Parse, Domainsrc.Store)
r, scripyThreads, parseStoreProcs = Scripyer(redisname)
r.lpush('300|http://beian.links.cn/beiansitemap/0_1.html')
while True:
time.sleep(2)
'''<file_sep>#encoding=utf-8
import pickle
from bs4 import BeautifulSoup
import re
import socket, fcntl, struct
import threading
import sys
import os
import random
sys.path.append('../common')
from Common import Common
from master_LogCmd import master_LogCmd
from DataStore import RedisList
from Parase import Parser
import pickle
'''
基于图片分析不良网页的任务
'''
class slaver_PicAnalysisCmd():
Parser = None
picFmt = None
unhealthrate = None
redisList = None
@classmethod
def Init(cls, config):
cls.Parser = Parser(config)
cls.picFmt = config.get('spider', 'PicFormat')
cls.unhealthrate = config.get("spider", "unhealthrate")
cls.redisList = RedisList(cls.Name(), redishost=config.get('redis', 'redis'))
master_LogCmd.Init(config)
print 'finish init %s' % cls.Name()
@classmethod
def Name(cls):
return cls.__name__
#优先级,值越低优先级越大
@staticmethod
def Priority():
return 8
@classmethod
def WriteTaskData(cls, content, url):
cls.redisList.rpush(pickle.dumps((content, url)))
@classmethod
def ReadTaskData(cls):
data = cls.redisList.lpop()
if data != None:
data = pickle.loads(data)
return data
@classmethod
def IOStage(cls, data):
return data
@classmethod
def CPUStage(cls, data):
content = data[0]
url = data[1]
print "%s get url : %s" % (cls.Name(), url)
imgSet = cls.GetImgUrls(url, content, cls.picFmt) #提取网页中图片的url
maxUnhealImgCnt = len(imgSet) * float(cls.unhealthrate) #根据设定的不良图片比例,计算出网页中不良图片的数量
unHealthImgCnt = 0
for imgurl in imgSet:
imgtype, content = Common.GetContentByUrl(imgurl) #抓取图片
if content != None and cls.Parser.IsInvalidImg(content, imgtype, imgurl): #分析是否为不良图片
unHealthImgCnt += 1
if unHealthImgCnt >= maxUnhealImgCnt: #网页中不良图片数量超过阈值maxUnhealImgCnt
picName = str(random.randint(0, 10000000)) + '.jpg'
master_LogCmd.WriteTaskData("!!!! found invalid html by %s: url: %s, picname:%s" % (cls.Name(), url, picName))
os.popen('phantomjs snapshot.js %s %s' % (url, picName)) #网页截图
break
'''
通过正则表达式从网页中提取指定格式的图片
'''
@classmethod
def GetImgUrls(cls, url, content, picFormat):
imgurls = set()
#soup = BeautifulSoup(content, "html.parser", from_encoding="utf-8")
#soup.encode('utf-8')
#[imgurls.add(Common.UrlJoin(url, img['src'])) for img in soup.findAll("img", attrs={"src": True}) if img['src'] != '']
pattern = re.compile(r'https?://.+?\.%s' % picFormat)
for imgurl in re.findall(pattern, content):
pos = imgurl.rfind('http://')
if pos != -1:
imgurl = imgurl[pos:]
else:
pos = imgurl.rfind('https://')
if pos not in [-1, 0]:
imgurl = imgurl[pos:]
try:
absUrl = Common.UrlJoin(url, imgurl)
except Exception as e:
master_LogCmd.WriteTaskData('UrlJoin failed, url ; %s, imgurl : %s' % (url, imgurl))
continue
imgurls.add(absUrl)
return imgurls
<file_sep>#encoding=utf-8
import sys
import os
import shutil
DIR = 'dir_'
def Combine(dir):
list = os.listdir(dir)
file_list = [os.path.join(dir, file) for file in list]
file_list.sort()
if len(file_list) == 0:
return
pos = file_list[0].rfind('.') #找文件名和后缀名的分割点
if pos == -1:
print 'files name should has suffix'
sys.exit()
global DIR
file_name = "%s.%s" % (dir[len(DIR):], file_list[0][pos + 1 : ])
print file_name
#print "%s.%s" % (dir[len(DIR):], file_list[0][pos + 1 : ]
with open(file_name, 'w') as f:
for filename in file_list:
f.write(open(filename).read())
#file 要分裂的文件
#size 每次分裂的大小,单位为byte
def Cut(file, size):
pos = file.rfind('.') #找文件名和后缀名的分割点
if pos == -1:
print 'file name should has suffix'
sys.exit()
global DIR
filename = file[ : pos]
dir = DIR + filename
suffix = file[pos + 1 : ]
if os.path.exists(dir):
shutil.rmtree(dir)
os.mkdir(dir)
idx = 0
with open(file, 'r') as f:
cnt = f.read(size)
while len(cnt) > 0:
new_file_name = '%s/%s%d.%s' % (dir, filename, idx, suffix)
with open(new_file_name, 'w') as new_f:
new_f.write(cnt)
cnt = f.read(size)
idx += 1
if __name__ == '__main__':
if len(sys.argv) < 2:
print 'python %s [cut|combine] [file|dir] [size(M) per unit]' % sys.argv[0]
sys.exit()
if sys.argv[1] == 'cut':
if len(sys.argv) != 4:
print 'python %s cut [file] [size(M) per unit]' % sys.argv[0]
sys.exit()
Cut(sys.argv[2], int(sys.argv[3]) << 20)
elif sys.argv[1] == 'combine':
if len(sys.argv) != 3:
print 'python %s combine [dir]' % sys.argv[0]
sys.exit()
Combine(sys.argv[2])
else:
print 'unkonw cmd %s' % str(sys.argv)
<file_sep>一、功能:
python实现的分布式任务调度系统,以redis作为broker,以redis中的任务数据驱动任务调度。
本系统将任务分为IO密集型任务和CPU密集型任务,对于IO密集型任务放在多线程执行,对于CPU密集型任务放在多进程中执行。以规避python的GIL锁带来的性能限制
二、目录:
cmd: 实现任务(cmd/example 为任务样文件)
common:框架文件
DisCrawler.py : 启动文件
三、任务类实现:
在cmd下实现,任务分为两类,master类和slaver类
文件名:master类为master_xxx.py,slaver类为slaver_xxx.py
任务接口:
from DataStore import RedisList
class master_xxx():
'''
说明:任务初始化接口
入参:
config : 解析完spider.ini配置文件后的ConfigParser对象
'''
@classmethod
def Init(cls, config):
cls.redisList = RedisList(cls.Name(), redishost=redisIp) # redis队列
pass
'''
说明:配置任务名称接口
返回值:任务名称
'''
@classmethod
def Name(cls):
return cls.__name__
'''
说明:配置优先级接口,值越低优先级越高。对于redis中存在多种任务的数据中,根据优先级高低取数据执行。
返回值:任务优先级,整形
'''
@staticmethod
def Priority():
pass
'''
说明:IO接口,在多线程中执行
入参:
data:从redis中取出的数据
返回值:
None表示处理完成
非None表示进一步到CPU接口中执行的数据
'''
@classmethod
def IOStage(cls, data):
pass
'''
说明:CPU接口,在多进程中执行
入参;
data:从IO接口中得到的数据
'''
@classmethod
def CPUStage(cls, data):
pass
'''
说明:将data写入到redis队列中,触发其他任务的执行
'''
@classmethod
def WriteTaskData(cls, data):
cls.redisList.rpush(data)
'''
说明:从redis中读取本任务的数据,接下里准备调IOStage
'''
@classmethod
def ReadTaskData(cls):
return cls.redisList.lpop()
四、启动:
启动master类任务: python DisCrawler.py master 执行IO接口的进程数 IO进程下的线程数 执行CPU接口的进程数
启动slaver类任务: python DisCrawler.py slaver 执行IO接口的进程数 IO进程下的线程数 执行CPU接口的进程数
slaver可在多个逻辑节点上执行<file_sep># encoding=utf-8
import sys
import logging
import logging.handlers
sys.path.append('../common')
from DataStore import RedisList
'''
日志管理任务:对其他节点上的任务提供写日志接口,本任务接收其他任务的日志内容并写文件
'''
class master_LogCmd():
logger = None
redisList = None
@classmethod
def Init(cls, config):
redisIp = config.get('redis', 'redis')
logfileName = config.get('spider', 'logfile')
handler = logging.handlers.RotatingFileHandler(logfileName) # 实例化handler
formatter = logging.Formatter('%(asctime)s - %(message)s') # 实例化formatter
handler.setFormatter(formatter) # 为handler添加formatte
cls.logger = logging.getLogger('tst') # #日志文件,获取名为tst的logger
cls.logger.addHandler(handler) # 为logger添加handler
cls.logger.setLevel(logging.DEBUG)
cls.redisList = RedisList(cls.Name(), redishost=redisIp) # redis队列
print 'finish init %s' % cls.Name()
@classmethod
def Name(cls):
return cls.__name__
@staticmethod
def Priority():
return 10
'''
写日志文件
'''
@classmethod
def IOStage(cls, data):
#print "%s: %s" % (cls, data)
master_LogCmd.logger.debug(data)
@classmethod
def CPUStage(cls, data):
pass
@classmethod
def WriteTaskData(cls, data):
cls.redisList.rpush(data)
@classmethod
def ReadTaskData(cls):
return cls.redisList.lpop()<file_sep>#encoding=utf-8
from bs4 import BeautifulSoup
import urllib
import re
import os
import Common
import numpy as np
from PIL import Image
from StringIO import StringIO
import os
os.environ['GLOG_minloglevel'] = '2'
import caffe
def resize_image(data, sz=(256, 256)):
"""
Resize image. Please use this resize logic for best results instead of the
caffe, since it was used to generate training dataset
:param str data:
The image data
:param sz tuple:
The resized image dimensions
:returns bytearray:
A byte array with the resized image
"""
img_data = str(data)
im = Image.open(StringIO(img_data))
if im.mode != "RGB":
im = im.convert('RGB')
imr = im.resize(sz, resample=Image.BILINEAR)
fh_im = StringIO()
imr.save(fh_im, format='JPEG')
fh_im.seek(0)
return bytearray(fh_im.read())
def caffe_preprocess_and_compute(pimg, caffe_transformer=None, caffe_net=None,
output_layers=None):
"""
Run a Caffe network on an input image after preprocessing it to prepare
it for Caffe.
:param PIL.Image pimg:
PIL image to be input into Caffe.
:param caffe.Net caffe_net:
A Caffe network with which to process pimg afrer preprocessing.
:param list output_layers:
A list of the names of the layers from caffe_net whose outputs are to
to be returned. If this is None, the default outputs for the network
are returned.
:return:
Returns the requested outputs from the Caffe net.
"""
if caffe_net is not None:
# Grab the default output names if none were requested specifically.
if output_layers is None:
output_layers = caffe_net.outputs
img_data_rs = resize_image(pimg, sz=(256, 256))
image = caffe.io.load_image(StringIO(img_data_rs))
H, W, _ = image.shape
_, _, h, w = caffe_net.blobs['data'].data.shape
h_off = max((H - h) / 2, 0)
w_off = max((W - w) / 2, 0)
crop = image[h_off:h_off + h, w_off:w_off + w, :]
transformed_image = caffe_transformer.preprocess('data', crop)
transformed_image.shape = (1,) + transformed_image.shape
input_name = caffe_net.inputs[0]
all_outputs = caffe_net.forward_all(blobs=output_layers,
**{input_name: transformed_image})
outputs = all_outputs[output_layers[0]][0].astype(float)
return outputs
else:
return []
class Parser():
invalidword = [] #不健康词汇表
alllen = float(len(invalidword))
threshold = None
InvalidWordThread = None
def __init__(self, config):
self.ThresHold = float(config.get('caffe', 'Threshold'))
self.DetectorFile = config.get('caffe', 'DetectorFile')
self.ModelDef = config.get('caffe', 'ModelDef')
self.PreTrainedModel = config.get('caffe', 'PreTrainedModel')
self.ValHead = 'NSFW score:'
#image_data = open(sys.argv[1]).read()
# Pre-load caffe model.
self.nsfw_net = caffe.Net(self.ModelDef, # pylint: disable=invalid-name
self.PreTrainedModel, caffe.TEST)
# Load transformer
# Note that the parameters are hard-coded for best results
self.caffe_transformer = caffe.io.Transformer({'data': self.nsfw_net.blobs['data'].data.shape})
self.caffe_transformer.set_transpose('data', (2, 0, 1)) # move image channels to outermost
self.caffe_transformer.set_mean('data', np.array([104, 117, 123])) # subtract the dataset-mean value in each channel
self.caffe_transformer.set_raw_scale('data', 255) # rescale from [0, 1] to [0, 255]
self.caffe_transformer.set_channel_swap('data', (2, 1, 0)) # swap channels from RGB to BGR
self.common = Common.Common()
self.InvalidWordThread = float(config.get('caffe', 'InvalidWordThread')) * float(len(Parser.invalidword))
'''
在网页文本中匹配不良词汇,数量超过设定的阈值则认为是不良网页
'''
@classmethod
def IsInvalidPage(cls, content):
cnt = 0
for word in Parser.invalidword:
if word in content:
cnt += 1
if cnt >= cls.InvalidWordThread:
return True
return False
'''
基于caffe框架,利用NSWF模型判断图片的不良程度,超过设定的阈值则认为是不良图片
'''
def IsInvalidImg(self, content, contenttype, url, recurse = True):
if contenttype == 'image/webp':
content = Common.Common.ConvWebp2JPEG(content)
if content == '':
return False
try:
scores = caffe_preprocess_and_compute(content, caffe_transformer=self.caffe_transformer, caffe_net=self.nsfw_net, output_layers=['prob'])
except Exception as e:
print 'url: ' + url
print 'contenttype: ' + contenttype
if recurse:
content = Common.Common.ConvWebp2JPEG(content)
return (content != '' and self.IsInvalidImg(content, contenttype, url, False))
# Scores is the array containing SFW / NSFW image probabilities, scores[1] indicates the NSFW probability
# print 'caffe img:',image_data
if 'scores' in dir() and len(scores) > 1 and scores[1] >= self.ThresHold:
print str(scores[1])
return True
return False
def ParaseContent(self, content):
soup = BeautifulSoup(content, "html.parser", from_encoding="utf-8")
soup.encode('utf-8')
# get title
titletext = soup.select('title')
if len(titletext):
titletext = titletext[0].text.strip()
else:
titletext = ''
titletext = titletext.replace('\n', '')
# pre tack tga div with display:none attribute
for div in soup.select('div'):
if div.has_attr("style") and 'display:none' in div['style']:
for p in div.select('p'):
p.name = 'aaaaa'
# print soup.prettify()
# get main text
maintext = ''
for res in soup.select('div p'):
text = res.text.replace('\n', '').replace('\r', '').replace(' ', '')
if len(text) > 0 and Common.Common.is_uchar(text):
maintext = maintext + text + '\n'
return titletext, maintext
| 5a1b47d65817708150688c3a3866ff30dc065704 | [
"Python",
"Text",
"INI"
] | 17 | Python | 2451086495/DisCrawler | 3c7e17b0e09e72c39b38be086e1569a41a60674a | aa81f3d3167892f8f53ba473058edafd5c39c038 |
refs/heads/master | <file_sep>#!/usr/bin/python
#
# Project Titan
# _____________________________________________________________________________
#
# _.oo.
# April 2020 _.u[[/;:,. .odMMMMMM'
# .o888UU[[[/;:-. .o@P^ MMM^
# helper.py oN88888UU[[[/;::-. dP^
# various helper functions dNMMNN888UU[[[/;:--. .o@P^
# to manage project parameters ,MMMMMMN888UU[[/;::-. o@^
# NNMMMNN888UU[[[/~.o@P^
# <NAME> 888888888UU[[[/o@^-..
# oI8888UU[[[/o@P^:--..
# .@^ YUU[[[/o@^;::---..
# oMP ^/o@P^;:::---..
# .dMMM .o@^ ^;::---...
# dMMMMMMM@^` `^^^^
# YMMMUP^
# ^^
# _____________________________________________________________________________
#
#
# Copyright 2021 <NAME>
#
# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# _____________________________________________________________________________
# ----------------
# import libraries
# ----------------
# standard libraries
# -----
import torch
import numpy as np
import math
import csv
import os
import sys
import errno
import importlib
# custom functions
# -----
import utilities.dataset_handler as dataset_handler
# import visualizer as visualizer
# helper functions
# -----
def mkdir_p(path):
"""
mkdir_p takes a string path and creates a directory at this path if it
does not already exist.
"""
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def largest_indices(arr, n):
"""
Returns the n largest indices from a numpy array.
"""
flat_arr = arr.flatten()
indices = np.argpartition(flat_arr, -n)[-n:]
indices = indices[np.argsort(-flat_arr[indices])]
return np.unravel_index(indices, arr.shape)
def print_misclassified_objects(cm, encoding, n_obj=5):
"""
prints out the n_obj misclassified objects given a
confusion matrix array cm.
"""
np.fill_diagonal(cm, 0)
maxind = largest_indices(cm, n_obj)
most_misclassified = encoding[maxind[0]]
classified_as = encoding[maxind[1]]
print('most misclassified:', most_misclassified)
print('classified as:', classified_as)
pass
def print_tensor_info(tensor, name=None, writer=None):
"""
Takes a torch.tensor and returns name and shape for debugging purposes
"""
if writer:
writer.add_scalar(name + '/min', tensor.min(), global_step=global_step)
writer.add_scalar(name + '/max', tensor.max(), global_step=global_step)
writer.add_scalar(name + '/std', tensor.type(torch.float).std(), global_step=global_step)
writer.add_scalar(name + '/mean', tensor.type(torch.float).mean(), global_step=global_step)
else:
name = name if name else tensor.names
text = "[DEBUG] name = {}, shape = {}, dtype = {}, device = {} \n" + \
"\t min = {}, max = {}, std = {}, mean = {}"
print(text.format(name, list(tensor.shape), tensor.dtype, tensor.device.type,
tensor.min(), tensor.max(), tensor.type(torch.float).std(), tensor.type(torch.float).mean()))
pass
def infer_additional_parameters(configuration_dict):
"""
infer_additional_parameters takes a dict configuration_dict and infers
additional parameters on the grounds of dataset etc.
"""
# define correct network parameters
# -----
# read the number of layers from the network file
if ('ycb' in configuration_dict['dataset']):
configuration_dict['image_height'] = 32
configuration_dict['image_width'] = 32
configuration_dict['image_channels'] = 3
configuration_dict['classes'] = 79
configuration_dict['class_encoding'] = np.array([
'005_tomato_soup_can',
'072-a_toy_airplane',
'065-g_cups',
'063-b_marbles',
'027_skillet',
'036_wood_block',
'013_apple',
'073-e_lego_duplo',
'028_skillet_lid',
'017_orange',
'070-b_colored_wood_blocks',
'015_peach',
'048_hammer',
'063-a_marbles',
'073-b_lego_duplo',
'035_power_drill',
'054_softball',
'012_strawberry',
'065-b_cups',
'072-c_toy_airplane',
'062_dice',
'040_large_marker',
'044_flat_screwdriver',
'037_scissors',
'011_banana',
'009_gelatin_box',
'014_lemon',
'016_pear',
'022_windex_bottle',
'065-c_cups',
'072-d_toy_airplane',
'073-a_lego_duplo',
'065-e_cups',
'003_cracker_box',
'065-f_cups',
'070-a_colored_wood_blocks',
'073-g_lego_duplo',
'033_spatula',
'043_phillips_screwdriver',
'055_baseball',
'073-d_lego_duplo',
'029_plate',
'052_extra_large_clamp',
'021_bleach_cleanser',
'065-a_cups',
'019_pitcher_base',
'018_plum',
'065-h_cups',
'065-j_cups',
'065-d_cups',
'025_mug',
'032_knife',
'065-i_cups',
'026_sponge',
'071_nine_hole_peg_test',
'004_sugar_box',
'056_tennis_ball',
'038_padlock',
'053_mini_soccer_ball',
'059_chain',
'061_foam_brick',
'058_golf_ball',
'006_mustard_bottle',
'073-f_lego_duplo',
'031_spoon',
'051_large_clamp',
'072-b_toy_airplane',
'050_medium_clamp',
'072-e_toy_airplane',
'042_adjustable_wrench',
'010_potted_meat_can',
'024_bowl',
'073-c_lego_duplo',
'007_tuna_fish_can',
'008_pudding_box',
'057_racquetball',
'030_fork',
'002_master_chef_can',
'077_rubiks_cube'
])
elif (('mnist' in configuration_dict['dataset']) and
not('os' in configuration_dict['dataset'])):
configuration_dict['image_height'] = 28
configuration_dict['image_width'] = 28
configuration_dict['image_channels'] = 1
configuration_dict['classes'] = 10
configuration_dict['class_encoding'] = np.array(
['0', '1', '2', '3', '4', '5', '6', '7', '8', '9'])
elif 'cifar10' in configuration_dict['dataset']:
configuration_dict['image_height'] = 32
configuration_dict['image_width'] = 32
configuration_dict['image_channels'] = 3
configuration_dict['classes'] = 10
configuration_dict['class_encoding'] = np.array(
['airplane', 'automobile', 'bird', 'cat', 'deer', 'dog',
'frog', 'horse', 'ship', 'truck'])
else:
configuration_dict['image_height'] = 32
configuration_dict['image_width'] = 32
configuration_dict['image_channels'] = 1
configuration_dict['classes'] = 10
configuration_dict['class_encoding'] = np.array(
['0', '1', '2', '3', '4', '5', '6', '7', '8', '9'])
if 'fashion' in configuration_dict['dataset']:
configuration_dict['class_encoding'] = np.array(
['T-shirt/top', 'Trouser', 'Pullover', 'Dress', 'Coat',
'Sandal', 'Shirt', 'Sneaker', 'Bag', 'Ankle boot'])
elif 'kuzushiji' in configuration_dict['dataset']:
pass # unicode characters not supported
if configuration_dict['color'] == 'grayscale':
configuration_dict['image_channels'] = 1
if configuration_dict['stereo']:
configuration_dict['image_channels'] *= 2
# to crop the images
# store the original values
configuration_dict['image_height_input'] = \
configuration_dict['image_height']
configuration_dict['image_width_input'] = \
configuration_dict['image_width']
# change the image height and image width if the network is supposed
# if configuration_dict['cropped'] or configuration_dict['augmented']:
# configuration_dict['image_height'] = \
# configuration_dict['image_width']\
# // 10 * 4
# configuration_dict['image_width'] = \
# configuration_dict['image_width']\
# // 10 * 4
# else:
# pass
if 'F' in configuration_dict['connectivity']:
configuration_dict['kernel_size'] = (3,3)
configuration_dict['n_features'] = 64
configuration_dict['network_depth'] = 2
elif 'K' in configuration_dict['connectivity']:
configuration_dict['kernel_size'] = (5,5)
configuration_dict['n_features'] = 32
configuration_dict['network_depth'] = 2
elif 'Kx' in configuration_dict['connectivity']:
configuration_dict['kernel_size'] = (6,6)
configuration_dict['n_features'] = 32
configuration_dict['network_depth'] = 2
elif 'D' in configuration_dict['connectivity']:
configuration_dict['kernel_size'] = (3,3)
configuration_dict['n_features'] = 32
configuration_dict['network_depth'] = 4
elif 'GLM' in configuration_dict['connectivity']:
configuration_dict['kernel_size'] = (3,3)
configuration_dict['n_features'] = 32
configuration_dict['network_depth'] = 1
else:
configuration_dict['kernel_size'] = (3,3)
configuration_dict['n_features'] = 32
configuration_dict['network_depth'] = 2
# overwrite the default time_depth if network is not recurrent
if configuration_dict['connectivity'] in ['B', 'BK', 'BKx', 'BF', 'BD', 'GLM']:
configuration_dict['time_depth'] = 0
configuration_dict['time_depth_beyond'] = 0
return configuration_dict
def read_config_file(path_to_config_file):
"""
read_config_file takes a string path_to_config_file and returns a
dict config_dict with all the keys and values from the csv file.
"""
config_dict = {}
with open(path_to_config_file) as config_file:
csvReader = csv.reader(config_file)
for key, value in csvReader:
config_dict[key] = value
config_dict['config_file'] = path_to_config_file
return convert_config_types(config_dict)
def convert_config_types(config_dictionary):
for key, value in config_dictionary.items():
try:
if '.' in value:
config_dictionary[key] = float(value)
elif ('True' in value) or ('False' in value):
config_dictionary[key] = value.lower() in \
("yes", "true", "t", "1")
elif 'None' in value:
config_dictionary[key] = None
elif ',' in value:
mult_values = config_dictionary[key].split(',')
config_dictionary[key] = list([])
for v in mult_values:
config_dictionary[key].append(int(v))
else:
config_dictionary[key] = int(value)
except(ValueError, TypeError):
pass
return config_dictionary
def get_output_directory(configuration_dict, flags):
"""
get_output_directory takes a dict configuration_dict and established the
directory structure for the configured experiment. It returns paths to
the checkpoints and the writer directories.
"""
cfg_name = flags.config_file.split('/')[-1].split('.')[0]
writer_directory = '{}{}/{}/'.format(
configuration_dict['output_dir'], cfg_name,
flags.name)
# architecture string
architecture_string = ''
architecture_string += '{}{}_{}l_fm{}_d{}_l2{}'.format(
configuration_dict['connectivity'],
configuration_dict['time_depth'],
configuration_dict['network_depth'],
configuration_dict['feature_multiplier'],
configuration_dict['keep_prob'],
configuration_dict['l2_lambda'])
if configuration_dict['batchnorm']:
architecture_string += '_bn1'
else:
architecture_string += '_bn0'
architecture_string += '_bs{}'.format(configuration_dict['batchsize'])
if configuration_dict['lr_decay']:
if configuration_dict['lr_cosine']:
architecture_string += '_lr{}-{}-{}'.format(
configuration_dict['learning_rate'],
configuration_dict['lr_decay_rate'],
'cos')
else:
architecture_string += '_lr{}-{}-{}'.format(
configuration_dict['learning_rate'],
configuration_dict['lr_decay_rate'],
str(configuration_dict['lr_decay_epochs']).strip(
'[]').replace(', ', ','))
else:
architecture_string += '_lr{}'.format(
configuration_dict['learning_rate'])
# data string
data_string = ''
if ('ycb' in configuration_dict['dataset']):
data_string += "{}_{}occ_{}p".format(
configuration_dict['dataset'],
configuration_dict['n_occluders'],
configuration_dict['occlusion_percentage'])
else:
data_string += "{}_{}occ_Xp".format(
configuration_dict['dataset'],
configuration_dict['n_occluders'])
# format string
format_string = ''
format_string += '{}x{}x{}'.format(
configuration_dict['image_height'],
configuration_dict['image_width'],
configuration_dict['image_channels'])
format_string += "_{}_{}".format(
configuration_dict['color'],
configuration_dict['label_type'])
writer_directory += "{}/{}/{}/".format(architecture_string,
data_string, format_string)
checkpoint_directory = writer_directory + 'checkpoints/'
# make sure the directories exist, otherwise create them
mkdir_p(checkpoint_directory)
mkdir_p(checkpoint_directory + 'evaluation/')
return writer_directory, checkpoint_directory
def adjust_learning_rate(learning_rate, cosine, lr_decay_rate, epochs, lr_decay_epochs, optimizer, epoch):
lr = learning_rate
if cosine:
eta_min = lr * (lr_decay_rate ** 3)
lr = eta_min + (lr - eta_min) * (
1 + math.cos(math.pi * epoch / epochs)) / 2
else:
steps = np.sum(epoch > np.asarray(lr_decay_epochs))
if steps > 0:
lr = lr * (lr_decay_rate ** steps)
#lr = learning_rate * (lr_decay_rate ** (1/d * epoch)) ?
for param_group in optimizer.param_groups:
param_group['lr'] = lr
def warmup_learning_rate(warm, warm_epochs, warmup_from, warmup_to, epoch, batch_id, total_batches, optimizer):
if warm and epoch <= warm_epochs:
p = (batch_id + (epoch - 1) * total_batches) / \
(warm_epochs * total_batches)
lr = warmup_from + p * (warmup_to - warmup_from)
for param_group in optimizer.param_groups:
param_group['lr'] = lr
# _____________________________________________________________________________
# -----------------
# top-level comment
# -----------------
# medium level comment
# -----
# low level comment
<file_sep>#!/usr/bin/python
#
# Project Titan
# _____________________________________________________________________________
#
# _.oo.
# June 2020 _.u[[/;:,. .odMMMMMM'
# .o888UU[[[/;:-. .o@P^ MMM^
# visualizer.py oN88888UU[[[/;::-. dP^
# visualization functions dNMMNN888UU[[[/;:--. .o@P^
# ,MMMMMMN888UU[[/;::-. o@^
# NNMMMNN888UU[[[/~.o@P^
# <NAME> 888888888UU[[[/o@^-..
# oI8888UU[[[/o@P^:--..
# .@^ YUU[[[/o@^;::---..
# oMP ^/o@P^;:::---..
# .dMMM .o@^ ^;::---...
# dMMMMMMM@^` `^^^^
# YMMMUP^
# ^^
# _____________________________________________________________________________
#
#
# Copyright 2021 <NAME>
#
# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# _____________________________________________________________________________
# ----------------
# import libraries
# ----------------
# standard libraries
# -----
import torch
import torch.nn.functional as F
import numpy as np
import sys, os, re
import itertools
import string
import matplotlib as mpl
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1 import make_axes_locatable
from matplotlib.ticker import FormatStrFormatter
from mpl_toolkits.mplot3d import Axes3D, proj3d
from matplotlib import offsetbox, patches
from matplotlib.markers import MarkerStyle
import seaborn as sns
import pandas as pd
import scipy.optimize as opt
import scipy.stats as st
from PIL import Image
from textwrap import wrap
from math import sqrt
import utilities.metrics as metrics
# van der Maaten TSNE implementations
try:
import utilities.tsne.bhtsne as bhtsne
import utilities.tsne.tsne as tsne
except ImportError:
pass
# ----------------
# create anaglyphs
# ----------------
# anaglyph configurations
# -----
_magic = [0.299, 0.587, 0.114]
_zero = [0, 0, 0]
_ident = [[1, 0, 0],
[0, 1, 0],
[0, 0, 1]]
true_anaglyph = ([_magic, _zero, _zero], [_zero, _zero, _magic])
gray_anaglyph = ([_magic, _zero, _zero], [_zero, _magic, _magic])
color_anaglyph = ([_ident[0], _zero, _zero],
[_zero, _ident[1], _ident[2]])
half_color_anaglyph = ([_magic, _zero, _zero],
[_zero, _ident[1], _ident[2]])
optimized_anaglyph = ([[0, 0.7, 0.3], _zero, _zero],
[_zero, _ident[1], _ident[2]])
methods = [true_anaglyph, gray_anaglyph, color_anaglyph, half_color_anaglyph,
optimized_anaglyph]
def anaglyph(npimage1, npimage2, method=half_color_anaglyph):
"""
anaglyph takes to numpy arrays of shape [H,W,C] and optionally a anaglyph
method and returns a resulting PIL Image and a numpy composite.
Example usage:
im1, im2 = Image.open("left-eye.jpg"), Image.open("right-eye.jpg")
ana, _ = anaglyph(im1, im2, half_color_anaglyph)
ana.save('output.jpg', quality=98)
"""
m1, m2 = [np.array(m).transpose() for m in method]
if (npimage1.shape[-1] == 1 and npimage2.shape[-1] == 1):
im1, im2 = np.repeat(npimage1, 3, -1), np.repeat(npimage2, 3, -1)
else:
im1, im2 = npimage1, npimage2
composite = np.matmul(im1, m1) + np.matmul(im2, m2)
result = Image.fromarray(composite.astype('uint8'))
return result, composite
# ---------------------
# make custom colormaps
# ---------------------
def make_cmap(colors, position=None, bit=False):
'''
make_cmap takes a list of tuples which contain RGB values. The RGB
values may either be in 8-bit [0 to 255] (in which bit must be set to
True when called) or arithmetic [0 to 1] (default). make_cmap returns
a cmap with equally spaced colors.
Arrange your tuples so that the first color is the lowest value for the
colorbar and the last is the highest.
position contains values from 0 to 1 to dictate the location of each color.
'''
bit_rgb = np.linspace(0, 1, 256)
if position is None:
position = np.linspace(0, 1, len(colors))
else:
if len(position) != len(colors):
sys.exit("position length must be the same as colors")
elif position[0] != 0 or position[-1] != 1:
sys.exit("position must start with 0 and end with 1")
if bit:
for i in range(len(colors)):
colors[i] = (bit_rgb[colors[i][0]],
bit_rgb[colors[i][1]],
bit_rgb[colors[i][2]])
cdict = {'red': [], 'green': [], 'blue': []}
for pos, color in zip(position, colors):
cdict['red'].append((pos, color[0], color[0]))
cdict['green'].append((pos, color[1], color[1]))
cdict['blue'].append((pos, color[2], color[2]))
cmap = mpl.colors.LinearSegmentedColormap('my_colormap', cdict, 256)
return cmap
def lighten_color(color, amount=0.5):
"""
Lightens the given color by multiplying (1-luminosity) by the given amount.
Input can be matplotlib color string, hex string, or RGB tuple.
Examples:
>> lighten_color('g', 0.3)
>> lighten_color('#F034A3', 0.6)
>> lighten_color((.3,.55,.1), 0.5)
"""
import matplotlib.colors as mc
import colorsys
try:
c = mc.cnames[color]
except:
c = color
c = colorsys.rgb_to_hls(*mc.to_rgb(c))
return colorsys.hls_to_rgb(c[0], 1 - amount * (1 - c[1]), c[2])
# ---------------------
# define a 2D gaussian distribution
# ---------------------
def full_twoD_Gaussian(pos, amplitude, xo, yo, sigma_x, sigma_y, theta, offset):
x,y = pos
xo = float(xo)
yo = float(yo)
a = (np.cos(theta)**2)/(2*sigma_x**2) + (np.sin(theta)**2)/(2*sigma_y**2)
b = -(np.sin(2*theta))/(4*sigma_x**2) + (np.sin(2*theta))/(4*sigma_y**2)
c = (np.sin(theta)**2)/(2*sigma_x**2) + (np.cos(theta)**2)/(2*sigma_y**2)
g = offset + amplitude*np.exp( - (a*((x-xo)**2) + 2*b*(x-xo)*(y-yo)
+ c*((y-yo)**2)))
return g.ravel()
def twoD_Gaussian(pos, amplitude, xo, yo, sigma_x, sigma_y, theta, offset):
x,y = pos
xo = float(xo)
yo = float(yo)
a = (np.cos(theta)**2)/(2*sigma_x**2) + (np.sin(theta)**2)/(2*sigma_y**2)
b = -(np.sin(2*theta))/(4*sigma_x**2) + (np.sin(2*theta))/(4*sigma_y**2)
c = (np.sin(theta)**2)/(2*sigma_x**2) + (np.cos(theta)**2)/(2*sigma_y**2)
g = offset + np.abs(amplitude)*np.exp( - (a*((x-xo)**2) + 2*b*(x-xo)*(y-yo)
+ c*((y-yo)**2)))
return g.ravel()
def plot_distribution(measure, ax, lab=None, xlabel=''):
sns.distplot(measure, fit=st.norm, kde=False, label=lab, ax=ax)
ax.set_xlabel(xlabel)
ax.set_ylabel('Probability density')
ax.axvline(x=measure.mean(),
ymin=0.0, ymax = 5, linewidth=1, color='gray')
ax.axvline(x=measure.mean()-measure.std(),
ymin=0.0, ymax = 5, linewidth=1, color='gray', linestyle='--')
ax.axvline(x=measure.mean()+measure.std(),
ymin=0.0, ymax = 5, linewidth=1, color='gray', linestyle='--')
# ---------------------
# make custom image annotations
# ---------------------
# helper function to make markers
def makeMarker(image, zoom=.65):
"""
makeMarker takes an image (height, weight) numpy array and optionally a zoom factor
and returns a matplotlib offsetbox containing an image for plotting
"""
return offsetbox.OffsetImage(image,zoom=zoom, cmap='Greys')
class ImageAnnotations3D():
def __init__(self, xyz, imgs, ax3d,ax2d):
self.xyz = xyz
self.imgs = imgs
self.ax3d = ax3d
self.ax2d = ax2d
self.annot = []
for s,im in zip(self.xyz, self.imgs):
x,y = self.proj(s)
self.annot.append(self.image(im,[x,y]))
self.lim = self.ax3d.get_w_lims()
self.rot = self.ax3d.get_proj()
self.cid = self.ax3d.figure.canvas.mpl_connect("draw_event",self.update)
self.funcmap = {"button_press_event" : self.ax3d._button_press,
"motion_notify_event" : self.ax3d._on_move,
"button_release_event" : self.ax3d._button_release}
self.cfs = [self.ax3d.figure.canvas.mpl_connect(kind, self.cb) \
for kind in self.funcmap.keys()]
def cb(self, event):
event.inaxes = self.ax3d
self.funcmap[event.name](event)
def proj(self, X):
"""
From a 3D point in axes ax1,
calculate position in 2D in ax2
"""
x,y,z = X
x2, y2, _ = proj3d.proj_transform(x,y,z, self.ax3d.get_proj())
tr = self.ax3d.transData.transform((x2, y2))
return self.ax2d.transData.inverted().transform(tr)
def image(self,arr,xy):
"""
Place an image (arr) as annotation at position xy
"""
im = offsetbox.OffsetImage(arr, zoom=0.5)
im.image.axes = self.ax3d
ab = offsetbox.AnnotationBbox(im, xy, xybox=(-30., 30.),
xycoords='data', boxcoords="offset points",
pad=0.3, arrowprops=dict(arrowstyle="->"), frameon=True)
self.ax2d.add_artist(ab)
return ab
def update(self,event):
if np.any(self.ax3d.get_w_lims() != self.lim) or \
np.any(self.ax3d.get_proj() != self.rot):
self.lim = self.ax3d.get_w_lims()
self.rot = self.ax3d.get_proj()
for s,ab in zip(self.xyz, self.annot):
ab.xy = self.proj(s)
class ConfusionMatrix(object):
"""
Holds and updates a confusion matrix object given the networks
outputs
"""
def __init__(self, n_cls):
self.n_cls = n_cls
self.reset()
def reset(self):
self.val = torch.zeros(self.n_cls, self.n_cls, dtype=torch.float32)
def update(self, batch_output, batch_labels):
_, topi = batch_output.topk(1)
oh_labels = torch.nn.functional.one_hot(batch_labels, self.n_cls)
oh_outputs = torch.nn.functional.one_hot(topi, self.n_cls).view(-1, self.n_cls)
self.val += torch.matmul(torch.transpose(oh_labels, 0, 1), oh_outputs)
def print_misclassified_objects(self, encoding, n_obj=5):
"""
prints out the n_obj misclassified objects given a
confusion matrix array cm.
"""
cm = self.val.numpy()
encoding = np.array(encoding)
np.fill_diagonal(cm, 0)
maxind = self.largest_indices(cm, n_obj)
most_misclassified = encoding[maxind[0]]
classified_as = encoding[maxind[1]]
print('most misclassified:', most_misclassified)
print('classified as:', classified_as)
pass
def largest_indices(self, arr, n):
"""
Returns the n largest indices from a numpy array.
"""
flat_arr = arr.flatten()
indices = np.argpartition(flat_arr, -n)[-n:]
indices = indices[np.argsort(-flat_arr[indices])]
return np.unravel_index(indices, arr.shape)
def to_figure(self, labels, title='Confusion matrix',
normalize=False,
colormap='Oranges'):
"""
Parameters:
confusion_matrix : Confusionmatrix Array
labels : This is a list of labels which will
be used to display the axis labels
title='confusion matrix' : Title for your matrix
tensor_name = 'MyFigure/image' : Name for the output summary tensor
normalize = False : Renormalize the confusion matrix to
ones
colormap = 'Oranges' : Colormap of the plot, Oranges fits
with tensorboard visualization
Returns:
summary: TensorFlow summary
Other items to note:
- Depending on the number of category and the data , you may have to
modify the figsize, font sizes etc.
- Currently, some of the ticks dont line up due to rotations.
"""
cm = self.val
if normalize:
cm = cm.astype('float') * 10 / cm.sum(axis=1)[:, np.newaxis]
cm = np.nan_to_num(cm, copy=True)
cm = cm.astype('int')
np.set_printoptions(precision=2)
fig = mpl.figure.Figure(
figsize=(14, 10), dpi=90, facecolor='w', edgecolor='k')
ax = fig.add_subplot(1, 1, 1)
im = ax.imshow(cm, cmap=colormap)
fig.colorbar(im)
classes = [re.sub(r'([a-z](?=[A-Z])|[A-Z](?=[A-Z][a-z]))', r'\1 ', x)
for x in labels]
classes = ['\n'.join(wrap(l, 40)) for l in classes]
tick_marks = np.arange(len(classes))
ax.set_xlabel('Predicted', fontsize=7)
ax.set_xticks(tick_marks)
c = ax.set_xticklabels(classes, fontsize=4, rotation=-90, ha='center')
ax.xaxis.set_label_position('bottom')
ax.xaxis.tick_bottom()
ax.set_ylabel('True Label', fontsize=7)
ax.set_yticks(tick_marks)
ax.set_yticklabels(classes, fontsize=4, va='center')
ax.yaxis.set_label_position('left')
ax.yaxis.tick_left()
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
ax.text(j, i, format(cm[i, j], '.0f') if cm[i, j] != 0 else '.',
horizontalalignment="center", fontsize=6,
verticalalignment='center', color="black")
fig.set_tight_layout(True)
return fig
def to_tensorboard(self, writer, class_encoding, global_step):
writer.add_figure('confusionmatrix', self.to_figure(class_encoding), global_step=global_step)
writer.close()
class PrecisionRecall(object):
"""
Holds and updates values for precision and recall object
"""
def __init__(self, n_cls):
self.n_cls = n_cls
self.reset()
def reset(self):
self.probabilities = []
self.predictions = []
#self.labels = []
def update(self, batch_output, batch_labels):
_, topi = batch_output.topk(1)
class_probs_batch = [torch.nn.functional.softmax(el, dim=0) for el in batch_output]
self.probabilities.append(class_probs_batch)
self.predictions.append(torch.flatten(topi))
#self.labels.append(batch_labels)
def to_tensorboard(self, writer, class_encoding, global_step):
'''
Takes in a "the class_encoding" i.e. from 0 to 9 and plots the corresponding precision-recall curves to tensorboard
'''
probs = torch.cat([torch.stack(b) for b in self.probabilities]).view(-1, self.n_cls)
preds = torch.cat(self.predictions).view(-1)
#labels = torch.cat(self.labels).view(-1)
for class_index, class_name in enumerate(class_encoding):
# subset = np.where(labels == class_index)
# sub_probs = probs[subset[0]]
# sub_preds = preds[subset[0]]
#
# ground_truth = sub_preds == class_index
# probability = sub_probs[:, class_index]
ground_truth = preds == class_index
probability = probs[:, class_index]
writer.add_pr_curve(class_encoding[class_index],
ground_truth,
probability,
global_step=global_step)
writer.close()
def images_to_probs(output, images):
'''
Generates predictions and corresponding probabilities from a trained
network and a list of images
'''
# output = net(images)
# convert output probabilities to predicted class
_, preds_tensor = torch.max(output, 1)
preds = np.squeeze(preds_tensor.numpy())
return preds, [torch.nn.functional.softmax(el, dim=0)[i].item() for i, el in zip(preds, output)]
def plot_classes_preds(output, images, labels, classes):
'''
Generates matplotlib Figure using a trained network, along with images
and labels from a batch, that shows the network's top prediction along
with its probability, alongside the actual label, coloring this
information based on whether the prediction was correct or not.
Uses the "images_to_probs" function.
'''
_,channels,height,width = images.shape
one_channel = True if channels in [1, 2] else False
stereo = True if (channels % 2) == 0 else False
preds, probs = images_to_probs(output, images)
# plot the images in the batch, along with predicted and true labels
fig = mpl.figure.Figure(
figsize=(12, 12), dpi=90, facecolor='w', edgecolor='k')
total_imgs = len(images) if len(images) < 10 else 10
for idx in np.arange(total_imgs):
ax = fig.add_subplot(5, 5, idx+1, xticks=[], yticks=[])
img = images[idx]
if stereo:
#img = img.view(channels//2,height*2,width)
img1, img2 = torch.split(img, channels//2)
img = torch.cat([img1,img2], dim=1)
elif one_channel:
img = img.mean(dim=0)
img = img / 2 + 0.5 # unnormalize
npimg = img.numpy()
if one_channel:
if len(npimg.shape) > 2:
npimg = np.transpose(npimg, (1, 2, 0))[:,:,0]
ax.imshow(npimg, cmap="Greys")
else:
ax.imshow(np.transpose(npimg, (1, 2, 0)))
ax.set_title("{0}, {1:.1f}%\n(label: {2})".format(
classes[preds[idx]],
probs[idx] * 100.0,
classes[labels[idx]]),
color=("green" if preds[idx]==labels[idx].item() else "red"), fontsize=6)
return fig
# -----------------
# sensitivity of concentration
# -----------------
def plot_concentration_mass(target_percentage, occluder_percentage, overlap_percentage, background_percentage, filename):
target_percentage *= 100
occluder_percentage *= 100
overlap_percentage *= 100
background_percentage *= 100
fig, ax = plt.subplots(figsize=(4,4))
ax.errorbar(np.arange(0,4), target_percentage.mean(axis=0), yerr=target_percentage.std(axis=0), xerr=None, fmt='o-', label='target')
ax.errorbar(np.arange(0,4), occluder_percentage.mean(axis=0), yerr=occluder_percentage.std(axis=0), xerr=None, fmt='o-', label='occluder')
ax.errorbar(np.arange(0,4), overlap_percentage.mean(axis=0), yerr=overlap_percentage.std(axis=0), xerr=None, fmt='o-', label='overlap')
ax.errorbar(np.arange(0,4), background_percentage.mean(axis=0), yerr=background_percentage.std(axis=0), xerr=None, fmt='o-', label='background')
ax.set_xlabel("timesteps")
ax.set_ylabel("percentage")
ax.legend()
ax.set_xticks([0,1,2,3])
ax.set_xticklabels(['$t_0$','$t_1$','$t_2$','$t_3$'])
plt.savefig(filename)
plt.close()
print('[INFO] background percentage')
print(background_percentage.mean(axis=0), background_percentage.std(axis=0))
print('[INFO] occluder percentage')
print(occluder_percentage.mean(axis=0), occluder_percentage.std(axis=0))
print('[INFO] overlap percentage')
print(overlap_percentage.mean(axis=0), overlap_percentage.std(axis=0))
print('[INFO] target percentage')
print(target_percentage.mean(axis=0), target_percentage.std(axis=0))
# reform into a pandas dataframe
points, _ = target_percentage.shape
concentration_df = pd.DataFrame(
np.hstack([
np.vstack([background_percentage[:,0], np.repeat(0, points), np.repeat('background', points)]),
np.vstack([background_percentage[:,1], np.repeat(1, points), np.repeat('background', points)]),
np.vstack([background_percentage[:,2], np.repeat(2, points), np.repeat('background', points)]),
np.vstack([background_percentage[:,3], np.repeat(3, points), np.repeat('background', points)])
,
np.vstack([occluder_percentage[:,0], np.repeat(0, points), np.repeat('occluder', points)]),
np.vstack([occluder_percentage[:,1], np.repeat(1, points), np.repeat('occluder', points)]),
np.vstack([occluder_percentage[:,2], np.repeat(2, points), np.repeat('occluder', points)]),
np.vstack([occluder_percentage[:,3], np.repeat(3, points), np.repeat('occluder', points)])
,
np.vstack([overlap_percentage[:,0], np.repeat(0, points), np.repeat('overlap', points)]),
np.vstack([overlap_percentage[:,1], np.repeat(1, points), np.repeat('overlap', points)]),
np.vstack([overlap_percentage[:,2], np.repeat(2, points), np.repeat('overlap', points)]),
np.vstack([overlap_percentage[:,3], np.repeat(3, points), np.repeat('overlap', points)])
,
np.vstack([target_percentage[:,0], np.repeat(0, points), np.repeat('target', points)]),
np.vstack([target_percentage[:,1], np.repeat(1, points), np.repeat('target', points)]),
np.vstack([target_percentage[:,2], np.repeat(2, points), np.repeat('target', points)]),
np.vstack([target_percentage[:,3], np.repeat(3, points), np.repeat('target', points)])
]).T, columns=['data', 'timestep', 'type'])
concentration_df = concentration_df.explode('data')
concentration_df['data'] = concentration_df['data'].astype('float')
concentration_df = concentration_df.explode('timestep')
concentration_df['timestep'] = concentration_df['timestep'].astype('int')
with sns.axes_style("ticks"):
sns.set_context("paper", font_scale=1.0, )#rc={"lines.linewidth": 0.5})
fig, ax = plt.subplots(figsize=(4,5.25), gridspec_kw=dict(wspace=0.0, hspace=0.0,
top=0.95,
bottom=0.07,
left=0.20,
right=0.90))
palette = sns.color_palette("colorblind")
palette = [sns.color_palette("colorblind")[7]] + sns.color_palette("colorblind")
sns.set_palette(palette)
sns.boxplot(data=concentration_df, x='timestep', y='data',
hue='type', showfliers = False, ax=ax
)
sns.despine(offset=10, trim=True)
ax.set_xticklabels(['$t_0$','$t_1$','$t_2$','$t_3$'])
ax.set_ylabel('Percentage')
ax.set_xlabel('Time step')
#ax.set_ylim([0.05, 0.5])
ax.legend(title="pixel type", loc="upper left", bbox_to_anchor=(0,1), ncol=1, fontsize=10, title_fontsize=10, framealpha=0.0, facecolor='white', edgecolor='white')
# from statannot import add_stat_annotation
# add_stat_annotation(ax, data=concentration_df, x='timestep', y='data', hue='type',
# box_pairs=[
# # ((0,1),(1,1)),
# # ((1,1),(2,1)),
# # ((0,2),(1,2)),
# # ((1,2),(2,2)),
# # ((2,1),(3,1)),
# ((1, 'target'),(2, 'occluder')),
# ((1, 'target'),(2, 'target')),
# ], test='t-test_ind', text_format='star', loc='inside', verbose=2)
plt.show()
# T-Test/KS-Test mit <NAME> nach <NAME>
#print(concentration_df, concentration_df.shape)
pixel_types = ['background', 'occluder', 'overlap', 'target']
t = 4
for ti in range(t):
df = concentration_df[concentration_df.timestep == ti]
qstar = 0.05
pval = np.ones([t, t])
stval = np.ones([t, t])
significance_table = np.zeros([t, t])
for k in range(t):
for j in range(t):
if k != j and k > j:
#stval[k, j], pval[k, j] = st.ttest_ind(dict_of_metric[k], dict_of_metric[j], equal_var=False)
stval[k, j], pval[k, j] = st.ks_2samp(
df[df.type == pixel_types[k]].data.values,
df[df.type == pixel_types[j]].data.values,
alternative='two-sided',
mode='auto')
print(np.round(pval, 4))
print(np.round(stval, 4))
sorted_pvals = np.sort(pval[pval < 1])
bjq = np.arange(1, len(sorted_pvals) + 1) / \
len(sorted_pvals) * qstar
for k in range(t):
for j in range(t):
if k != j and k > j:
if pval[k, j] in sorted_pvals[sorted_pvals - bjq <= 0]:
significance_table[k, j] = 1
else:
significance_table[k, j] = 0
print(significance_table)
# -----------------
# class activation mapping
# -----------------
def quantify_pixel_importance(cams, preds, percentage=0.25):
b,t,n_classes,h,w = cams.shape
# normalize cams to a probability distribution over pixels
# -----
# offset by minimum of each upsampled activation map
min_val, min_args = torch.min(cams.view(b,t,n_classes,h*w), dim=-1, keepdim=True)
cams -= torch.unsqueeze(min_val, dim=-1)
## or take the absolute value?
#cams = torch.abs(cams)
# normalize by the sum of each upsampled activation map
sum_val = torch.sum(cams, dim=[-2,-1], keepdim=True)
cams /= sum_val
pixel_array = []
for timestep in range(t):
pixels_per_timestep = []
for batch in range(b):
pixels = cams[batch, timestep, preds[batch, timestep, 0],:,:].view(h*w)
pixels = torch.cumsum(torch.sort(pixels, 0, descending=True)[0], 0)
pixels = np.argmax(pixels>percentage)
pixels_per_timestep.append(pixels)
pixels_per_timestep = torch.stack(pixels_per_timestep, 0)
pixel_array.append(pixels_per_timestep)
pixel_array = np.array(torch.stack(pixel_array, dim=1))
#print(np.mean(pixel_array,0))
fig, ax = plt.subplots()
for timestep in range(t):
plot_distribution(pixel_array[:,timestep], ax, lab='$t={}$'.format(timestep))
ax.legend()
ax.set_title('pixels accounting for {}% class output mass'.format(int(percentage*100)))
plt.show()
pass
def plot_cam_fourier_space(predicted_cams,imnr=None):
b,t,h,w = predicted_cams.shape
fig, ax = plt.subplots(nrows=6, ncols=4, figsize=(9, 11))
# fft of the heatmap
for timestep in range(t):
if imnr:
image = predicted_cams[imnr, timestep,:,:]
else:
image = torch.mean(predicted_cams, dim=0)[timestep,:,:]
freq = np.fft.fft2(image)
freq = np.abs(freq)
freq[0,0] = np.min(freq)
ax[0,timestep].hist(freq.ravel(), bins=100)
ax[0,timestep].set_title('hist(freq)', fontsize=7)
ax[1,timestep].hist(np.log(freq).ravel(), bins=100)
ax[1,timestep].set_title('hist(log(freq))', fontsize=7)
ax[2,timestep].imshow(freq, interpolation="none")
ax[2,timestep].set_title('freq', fontsize=7)
ax[3,timestep].imshow(np.fft.fftshift(freq), interpolation="none")
ax[3,timestep].set_title('freq', fontsize=7)
ax[4,timestep].imshow(np.log(freq), interpolation="none")
ax[4,timestep].set_title('log(freq)', fontsize=7)
ax[5,timestep].imshow(image, interpolation="none")
ax[5,timestep].set_title('image', fontsize=7)
plt.show()
pass
def saliencymap_to_figure(smap, pic, alpha=0.5):
"""
saliencymap_to_figure takes a saliency map smap, a picture pic and an
optional value for alpha and returns a matplotlib figure containing the
picture overlayed with the saliency map with transparency alpha.
"""
number_of_maps = smap.shape[0]
fig, axes = plt.subplots(smap.shape[0],smap.shape[1])
for i in range(number_of_maps):
for j in range(smap.shape[1]):
classmap_answer = smap[i, j, :, :]
axes[i,j].imshow(pic, cmap="Greys")
axes[i,j].imshow(classmap_answer, cmap=mpl.cm.jet, alpha=alpha,
interpolation='nearest', vmin=0, vmax=1)
axes[i,j].axis('off')
return fig, axes
def show_cam_samples(cams, pics, targets, probs, preds, alpha=0.5, n_samples=5):
"""
cams (b,t,n_classes,h,w) Class Activation Maps
pics (b,t,n_channels,h,w) Input Images for Recurrent Network
targets (b) Target Vectors
probs (b,t,topk) Probabilities for Each output
preds (b,t,topk) Predictions of the Network
alpha Transparency of the heatmap overlay
"""
b,t,c,h,w = pics.shape
all_cams = cams
all_pics = pics
all_targets = targets
all_probs = probs
all_preds = preds
for n in range(n_samples):
fig, axes = plt.subplots(3,t+1, figsize=(12,12))
cams = all_cams[n]
pics = all_pics[n]
targets = all_targets[n]
probs = all_probs[n]
preds = all_preds[n]
for i in range(t):
axes[0,i].imshow(pics[i,0,:,:], cmap="Greys")
im = axes[0,i].imshow(cams[i,preds[i,0],:,:], cmap=mpl.cm.jet, alpha=alpha,
interpolation='nearest')#, vmin=0, vmax=1)
axes[0,i].set_title('t{}: tar/pred ({}/{})'.format(i, targets, preds[i,0]))
axes[0,i].axis('off')
divider = make_axes_locatable(axes[0,i])
cax = divider.append_axes("right", size="5%", pad=0.05)
plt.colorbar(im, cax=cax)
axes[0, -1].imshow(pics[-1,0,:,:], cmap="Greys")
min, max = (cams[-1,preds[-1,0],:,:] - cams[0,preds[0,0],:,:]).min(), (cams[-1,preds[-1,0],:,:] - cams[0,preds[0,0],:,:]).max()
# zero_pos = (-min) / (-min + max)
# zero_centered_cmap = make_cmap([(0,0,255),(255,255,255),(255,0,0)], position=[0, zero_pos, 1], bit=True)
im = axes[0, -1].imshow(cams[-1,preds[-1,0],:,:] - cams[0,preds[0,0],:,:], cmap=mpl.cm.seismic, alpha=alpha, interpolation='nearest', vmin=-max, vmax=max)
axes[0, -1].set_title('Delta t')
axes[0, -1].axis('off')
divider = make_axes_locatable(axes[0,-1])
cax = divider.append_axes("right", size="5%", pad=0.05)
plt.colorbar(im, cax=cax)
for i in range(t):
axes[1,i].imshow(pics[i,0,:,:], cmap="Greys")
im = axes[1,i].imshow(cams[i,preds[-1,0],:,:], cmap=mpl.cm.jet, alpha=alpha,
interpolation='nearest')#, vmin=0, vmax=1)
axes[1,i].set_title('t{}: tar/pred ({}/{})'.format(i, targets, preds[i,0]))
axes[1,i].axis('off')
divider = make_axes_locatable(axes[1,i])
cax = divider.append_axes("right", size="5%", pad=0.05)
plt.colorbar(im, cax=cax)
axes[1, -1].imshow(pics[-1,0,:,:], cmap="Greys")
min, max = (cams[-1,preds[-1,0],:,:] - cams[0,preds[-1,0],:,:]).min(), (cams[-1,preds[-1,0],:,:] - cams[0,preds[-1,0],:,:]).max()
# zero_pos = (-min) / (-min + max)
# zero_centered_cmap = make_cmap([(0,0,255),(255,255,255),(255,0,0)], position=[0, zero_pos, 1], bit=True)
im = axes[1, -1].imshow(cams[-1,preds[-1,0],:,:] - cams[0,preds[-1,0],:,:], cmap=mpl.cm.seismic, alpha=alpha, interpolation='nearest', vmin=-max, vmax=max)
axes[1, -1].set_title('Delta t')
axes[1, -1].axis('off')
divider = make_axes_locatable(axes[1,-1])
cax = divider.append_axes("right", size="5%", pad=0.05)
cbar = plt.colorbar(im, cax=cax)
for i in range(t):
axes[2,i].imshow(pics[i,0,:,:], cmap="Greys")
im = axes[2,i].imshow(cams[i,targets,:,:], cmap=mpl.cm.jet, alpha=alpha,
interpolation='nearest')#, vmin=0, vmax=1)
axes[2,i].set_title('t{}: tar/pred ({}/{})'.format(i, targets, preds[i,0]))
axes[2,i].axis('off')
divider = make_axes_locatable(axes[2,i])
cax = divider.append_axes("right", size="5%", pad=0.05)
plt.colorbar(im, cax=cax)
axes[2, -1].imshow(pics[-1,0,:,:], cmap="Greys")
min, max = (cams[-1,targets,:,:] - cams[0,targets,:,:]).min(), (cams[-1,targets,:,:] - cams[0,targets,:,:]).max()
# zero_pos = (-min) / (-min + max)
# zero_centered_cmap = make_cmap([(0,0,255),(255,255,255),(255,0,0)], position=[0, zero_pos, 1], bit=True)
im = axes[2, -1].imshow(cams[-1,targets,:,:] - cams[0,targets,:,:], cmap=mpl.cm.seismic, alpha=alpha, interpolation='nearest', vmin=-max, vmax=max)
axes[2, -1].set_title('Delta t')
axes[2, -1].axis('off')
divider = make_axes_locatable(axes[2,-1])
cax = divider.append_axes("right", size="5%", pad=0.05)
plt.colorbar(im, cax=cax)
# for i in range(1,t):
# for j in range(1,i+1):
# axes[j,i].imshow(pics[i,0,:,:], cmap="Greys")
# axes[j,i].imshow(cams[i,0,:,:] - cams[i-j,0,:,:], cmap=mpl.cm.seismic, alpha=alpha,
# interpolation='nearest')#, vmin=-1, vmax=1)
# axes[j,i].set_title('t{}-t{}'.format(i,i-j))
#
#
# for j in range(1,t):
# for i in range(0,j):
# axes[j,i].axis('off')
plt.show()
pass
def show_cam_means(cams, targets, probs, preds):
"""
cams (b,t,n_classes,h,w) Class Activation Maps
targets (b) Target Vectors
probs (b,t,topk) Probabilities for Each output
preds (b,t,topk) Predictions of the Network
alpha Transparency of the heatmap overlay
"""
b,t,n_classes,h,w = cams.shape
# prepare real data according to different properties
# i.e. target prediction, current prediction, final prediction
# topk output (current)
uber_cam = []
for timestep in range(t):
topk_cam = []
for batch in range(b):
topk_cam.append(cams[batch, timestep, preds[batch, timestep, 0],:,:])
topk_cam = torch.stack(topk_cam, 0)
uber_cam.append(topk_cam)
cams1 = torch.mean(torch.stack(uber_cam, dim=1), dim=0)
# last prediction evolution (final)
uber_cam = []
for timestep in range(t):
topk_cam = []
for batch in range(b):
topk_cam.append(cams[batch, timestep, preds[batch, -1, 0],:,:])
topk_cam = torch.stack(topk_cam, 0)
uber_cam.append(topk_cam)
cams2 = torch.mean(torch.stack(uber_cam, dim=1), dim=0)
# target evolution (target)
uber_cam = []
for timestep in range(t):
topk_cam = []
for batch in range(b):
topk_cam.append(cams[batch, timestep, targets[batch],:,:])
topk_cam = torch.stack(topk_cam, 0)
uber_cam.append(topk_cam)
cams3 = torch.mean(torch.stack(uber_cam, dim=1), dim=0)
fig, axes = plt.subplots(3,t+1, figsize=(12,12))
cams = cams[0]
targets = targets[0]
probs = probs[0]
preds = preds[0]
alpha = 1.0
for i in range(t):
im = axes[0,i].imshow(cams1[i], cmap=mpl.cm.jet, alpha=alpha,
interpolation='nearest', vmin=-cams1[i].max(), vmax=cams1[i].max())
axes[0,i].set_title('t{}: tar/pred ({}/{})'.format(i, targets, preds[i,0]))
axes[0,i].axis('off')
divider = make_axes_locatable(axes[0,i])
cax = divider.append_axes("right", size="5%", pad=0.05)
plt.colorbar(im, cax=cax)
min, max = (cams1[-1] - cams1[0]).min(), (cams1[-1] - cams1[0]).max()
# zero_pos = (-min) / (-min + max)
# zero_centered_cmap = make_cmap([(0,0,255),(255,255,255),(255,0,0)], position=[0, zero_pos, 1], bit=True)
im = axes[0, -1].imshow(cams1[-1] - cams1[0], cmap=mpl.cm.seismic, alpha=alpha, interpolation='nearest', vmin=-max, vmax=max)
axes[0, -1].set_title('Delta t')
axes[0, -1].axis('off')
divider = make_axes_locatable(axes[0,-1])
cax = divider.append_axes("right", size="5%", pad=0.05)
plt.colorbar(im, cax=cax)
for i in range(t):
im = axes[1,i].imshow(cams2[i], cmap=mpl.cm.jet, alpha=alpha,
interpolation='nearest', vmin=-cams2[i].max(), vmax=cams2[i].max())
axes[1,i].set_title('t{}: tar/pred ({}/{})'.format(i, targets, preds[i,0]))
axes[1,i].axis('off')
divider = make_axes_locatable(axes[1,i])
cax = divider.append_axes("right", size="5%", pad=0.05)
plt.colorbar(im, cax=cax)
min, max = (cams2[-1] - cams2[0]).min(), (cams2[-1] - cams2[0]).max()
# zero_pos = (-min) / (-min + max)
# zero_centered_cmap = make_cmap([(0,0,255),(255,255,255),(255,0,0)], position=[0, zero_pos, 1], bit=True)
im = axes[1, -1].imshow(cams2[-1] - cams2[0], cmap=mpl.cm.seismic, alpha=alpha, interpolation='nearest', vmin=-max, vmax=max)
axes[1, -1].set_title('Delta t')
axes[1, -1].axis('off')
divider = make_axes_locatable(axes[1,-1])
cax = divider.append_axes("right", size="5%", pad=0.05)
plt.colorbar(im, cax=cax)
for i in range(t):
im = axes[2,i].imshow(cams3[i], cmap=mpl.cm.jet, alpha=alpha,
interpolation='nearest', vmin=-cams3[i].max(), vmax=cams3[i].max())
axes[2,i].set_title('t{}: tar/pred ({}/{})'.format(i, targets, preds[i,0]))
axes[2,i].axis('off')
divider = make_axes_locatable(axes[2,i])
cax = divider.append_axes("right", size="5%", pad=0.05)
plt.colorbar(im, cax=cax)
min, max = (cams3[-1] - cams3[0]).min(), (cams3[-1] - cams3[0]).max()
# zero_pos = (-min) / (-min + max)
# zero_centered_cmap = make_cmap([(0,0,255),(255,255,255),(255,0,0)], position=[0, zero_pos, 1], bit=True)
im = axes[2, -1].imshow(cams3[-1] - cams3[0], cmap=mpl.cm.seismic, alpha=alpha, interpolation='nearest', vmin=-max, vmax=max)
axes[2, -1].set_title('Delta t')
axes[2, -1].axis('off')
divider = make_axes_locatable(axes[2,-1])
cax = divider.append_axes("right", size="5%", pad=0.05)
plt.colorbar(im, cax=cax)
plt.show()
pass
def plot_cam_samples(cams, pics, targets, probs, preds, filename, list_of_indices=[948,614,541], alpha=0.5):
"""
cams (b,t,n_classes,h,w) Class Activation Maps
pics (b,t,n_channels,h,w) Input Images for Recurrent Network
targets (b,n_occ) NHot Target Vectors
probs (b,t,topk) Probabilities for Each output
preds (b,t,topk) Predictions of the Network
alpha Transparency of the heatmap overlay
"""
# generate a rocket-like colormap with alpha values
# -----
# get colormap
from matplotlib.colors import LinearSegmentedColormap
ncolors = 256
color_array = plt.get_cmap('rocket')(range(ncolors))
# change alpha values
color_array[:,-1] = np.linspace(1.0,0.0,ncolors)
# create a colormap object
map_object = LinearSegmentedColormap.from_list(name='rocket_alpha',colors=color_array)
# register this new colormap with matplotlib
plt.register_cmap(cmap=map_object)
b,t,n_classes,h,w = cams.shape
n_rows = len(list_of_indices)
# Create x and y indices for gaussian2D data
x = np.linspace(0, 31, 32)
y = np.linspace(0, 31, 32)
x, y = np.meshgrid(x, y)
n_hot_targets = targets if len(targets.shape) > 1 else torch.stack([targets,targets,targets],-1)
targets = targets[:,0] if len(targets.shape) > 1 else targets
print("[INFO] showing CAMS for indices {}".format(list_of_indices))
# normalize cams to 0,1
# -----
# offset by minimum of each upsampled activation map
min_val, min_args = torch.min(cams.view(b,t,n_classes,h*w), dim=-1, keepdim=True)
cams -= torch.unsqueeze(min_val, dim=-1)
# divide by the maximum of each activation map
max_val, max_args = torch.max(cams.view(b,t,n_classes,h*w), dim=-1, keepdim=True)
cams /= torch.unsqueeze(max_val, dim=-1)
# normalize by the sum of each upsampled activation map
#sum_val = torch.sum(cams, dim=[-2,-1], keepdim=True)
#cams /= sum_val
if n_rows > 3:
grid_dict = dict(left=0.05,right=0.875, bottom=0.04, top=0.973)
else:
grid_dict = dict(left=0.05,right=0.875)
fig, ax = plt.subplots(n_rows,t+1, gridspec_kw=grid_dict, figsize=(6.4, 4.8/3.0*n_rows))
for row,ind in enumerate(list_of_indices):
for ti in range(t):
current_image = pics[ind,ti,0,:,:]
threshold_map = np.array(cams[ind,ti,preds[ind,ti,0],:,:]>0.2, dtype=np.float)
# get the cams, threshold them to output 1 in case > 0.2
threshold_cam = cams[ind,ti,preds[ind,ti,0],:,:].numpy().copy()
threshold_cam[threshold_cam>0.2] = 1.0
threshold_cam /= 0.2
faded_image = current_image * threshold_cam #cams[ind,ti,preds[ind,ti,0],:,:]
#ax[row,ti].imshow(current_image, cmap="Greys", interpolation="none")
ax[row,ti].imshow(faded_image, cmap="Greys", interpolation="none")
min, max = (cams[ind,ti,preds[ind,ti,0],:,:]).min(), (cams[ind,ti,preds[ind,ti,0],:,:]).max()
min, max = 0.0, 1.0
#im = ax[row,ti].imshow(cams[ind,ti,preds[ind,ti,0],:,:], cmap="rocket", alpha=alpha, interpolation='nearest', vmin=min, vmax=max)
im = ax[row,ti].imshow(threshold_map, cmap="rocket_alpha", alpha=alpha, interpolation='nearest', vmin=min, vmax=max)
def contour_rect_slow(im):
"""Clear version"""
pad = np.pad(im, [(1, 1), (1, 1)]) # zero padding
im0 = np.abs(np.diff(pad, n=1, axis=0))[:, 1:]
im1 = np.abs(np.diff(pad, n=1, axis=1))[1:, :]
lines = []
for ii, jj in np.ndindex(im0.shape):
if im0[ii, jj] == 1:
lines += [([ii-.5, ii-.5], [jj-.5, jj+.5])]
if im1[ii, jj] == 1:
lines += [([ii-.5, ii+.5], [jj-.5, jj-.5])]
return lines
# lines = contour_rect_slow(threshold_map)
# for line in lines:
# ax[row,ti].plot(line[1], line[0], color='r', alpha=1, linewidth=0.5)
#ax[row,ti].contour(threshold_map, levels=[1.0], colors='red', linewidths=[0.5],
# extent=[0-0.5, x[:-1].max()-0.5,0-0.5, y[:-1].max()-0.5])
g = metrics.gini(np.array(cams[ind,ti,preds[ind,ti,0],:,:]))
ax[row,ti].set_xlabel('{}|{} [{},{}]\n g={:0.3f}'.format(preds[ind,ti,0], n_hot_targets[ind,0], n_hot_targets[ind,1], n_hot_targets[ind,2], g), fontsize=12)
ax[row,ti].set_yticks([])
ax[row,ti].set_xticks([])
initial_guess = (5.,16,16,5,5,0.,10.)
cdat = cams[ind,ti,preds[ind,ti,0],:,:].numpy()
x_init, y_init = np.where(cdat == np.amax(cdat))
initial_guess_2 = (np.amax(cdat),x_init[0],y_init[0],1,1,0.,3.)
#popt, pcov = opt.curve_fit(twoD_Gaussian, (x, y), np.reshape(cams[ind,ti,preds[ind,ti,0],:,:],h*w), p0=initial_guess, maxfev=50000)
#ax[row,ti].contour(x, y, (twoD_Gaussian((x, y), *popt)).reshape(32, 32), [.35,.4,.45,.5], colors='w',linewidths=.5)
# divider = make_axes_locatable(ax[row,-2])
# cax = divider.append_axes("right", size="5%", pad=0.05)
# cbar = fig.colorbar(im, cax=cax, ticks=[min, (min+max)/2., max])
# cax.tick_params(axis='both', which='major', labelsize=7)
# # enlarge axis by 5%
# box = ax[row,-2].get_position()
# ax[row,-2].set_position([box.x0, box.y0, box.width * 1.07, box.height * 1.07])
# Delta T Plot
ax[row, -1].imshow(pics[ind,-1,0,:,:], cmap="Greys")
min, max = (cams[ind,-1,preds[ind,-1,0],:,:] - cams[ind,0,preds[ind,0,0],:,:]).min(), (cams[ind,-1,preds[ind,-1,0],:,:] - cams[ind,0,preds[ind,0,0],:,:]).max()
min, max = -0.5, +0.5
# zero_pos = (-min) / (-min + max)
# zero_centered_cmap = make_cmap([(0,0,255),(255,255,255),(255,0,0)], position=[0, zero_pos, 1], bit=True)
im = ax[row, -1].imshow(cams[ind,-1,preds[ind,-1,0],:,:] - cams[ind,0,preds[ind,0,0],:,:], cmap="icefire", alpha=alpha if alpha > 0.75 else 0.75, interpolation='nearest', vmin=-max, vmax=max)
divider = make_axes_locatable(ax[row,-1])
cax = divider.append_axes("right", size="5%", pad=0.05)
cbar = fig.colorbar(im, cax=cax, ticks=[min, (min+max)/2., max])
cax.tick_params(axis='both', which='major', labelsize=10)
# enlarge axis by 10%
box = ax[row,-1].get_position()
if n_rows > 3:
y_offset = 0.0045
else:
y_offset = 0.014
ax[row,-1].set_position([box.x0 + 0.025, box.y0 - y_offset , box.width * 1.12, box.height * 1.12]) #box.y0 - 0.014
ax[row,-1].set_yticks([])
ax[row,-1].set_xticks([])
for ti in range(t):
ax[0,ti].annotate('$t_{}$'.format(ti), xy=(0.5, 1.10), xytext=(0.5, 1.10), xycoords='axes fraction',
fontsize=12, ha='center', va='bottom',
bbox=dict(boxstyle='square', fc='white', ec='white'),
#arrowprops=dict(arrowstyle='-[, widthB={}, lengthB=0.25, angleB=0'.format(3), lw=1.0)
)
ax[0,-1].annotate('$\Delta t$'.format(ti), xy=(0.5, 1.10), xytext=(0.5, 1.10), xycoords='axes fraction',
fontsize=12, ha='center', va='bottom',
bbox=dict(boxstyle='square', fc='white', ec='white'),
#arrowprops=dict(arrowstyle='-[, widthB={}, lengthB=0.25, angleB=0'.format(3), lw=1.0)
)
ax_in = ax[0,1]
ax[0,0].annotate('A', xy=(ax_in.get_xlim()[0],ax_in.get_ylim()[1]), xytext=np.array([ax_in.get_xlim()[0],ax_in.get_ylim()[1]])+np.array([-10,-12]), weight='bold', fontsize=24)
ax[-1,0].text(18, 48, '3|3 [8,5] = output:3 | target:3 [occluder1:8, occluder2:5]',
fontsize=12, horizontalalignment='left',
verticalalignment='center')
plt.savefig(filename, dpi=300, format='pdf')
plt.close()
def plot_cam_samples_alt(cams, pics, targets, probs, preds, filename, list_of_indices=[948,614,541], alpha=0.5):
"""
cams (b,t,n_classes,h,w) Class Activation Maps
pics (b,t,n_channels,h,w) Input Images for Recurrent Network
targets (b,n_occ) NHot Target Vectors
probs (b,t,topk) Probabilities for Each output
preds (b,t,topk) Predictions of the Network
alpha Transparency of the heatmap overlay
"""
# generate a rocket-like colormap with alpha values
# -----
# get colormap
from matplotlib.colors import LinearSegmentedColormap
ncolors = 256
color_array = plt.get_cmap('rocket')(range(ncolors))
# change alpha values
color_array[:,-1] = np.linspace(1.0,0.0,ncolors)
# create a colormap object
map_object = LinearSegmentedColormap.from_list(name='rocket_alpha',colors=color_array)
# register this new colormap with matplotlib
plt.register_cmap(cmap=map_object)
b,t,n_classes,h,w = cams.shape
n_rows = len(list_of_indices)
# Create x and y indices for gaussian2D data
x = np.linspace(0, 31, 32)
y = np.linspace(0, 31, 32)
x, y = np.meshgrid(x, y)
n_hot_targets = targets if len(targets.shape) > 1 else torch.stack([targets,targets,targets],-1)
targets = targets[:,0] if len(targets.shape) > 1 else targets
print("[INFO] showing CAMS for indices {}".format(list_of_indices))
# normalize cams to 0,1
# -----
# offset by minimum of each upsampled activation map
min_val, min_args = torch.min(cams.view(b,t,n_classes,h*w), dim=-1, keepdim=True)
cams -= torch.unsqueeze(min_val, dim=-1)
# divide by the maximum of each activation map
max_val, max_args = torch.max(cams.view(b,t,n_classes,h*w), dim=-1, keepdim=True)
cams /= torch.unsqueeze(max_val, dim=-1)
# normalize by the sum of each upsampled activation map
#sum_val = torch.sum(cams, dim=[-2,-1], keepdim=True)
#cams /= sum_val
if n_rows > 3:
grid_dict = dict(left=0.05,right=0.875, bottom=0.04, top=0.973)
else:
grid_dict = dict(left=0.05,right=0.875, bottom=0.12)
fig, ax = plt.subplots(n_rows,t, gridspec_kw=grid_dict)#, figsize=(6.4 - 0.5, 5.2/3.0*n_rows))
for row,ind in enumerate(list_of_indices):
current_image = pics[ind,0,0,:,:]
ax[row,0].imshow(current_image, cmap="Greys", interpolation="none")
ax[row,0].set_xlabel('{} [{},{}]'.format(n_hot_targets[ind,0], n_hot_targets[ind,1], n_hot_targets[ind,2]), fontsize=12)
ax[row,0].set_xlabel
ax[0,0].annotate('stimulus', xy=(0.5, 1.10), xytext=(0.5, 1.10), xycoords='axes fraction',
fontsize=12, ha='center', va='bottom',
bbox=dict(boxstyle='square', fc='white', ec='white'),
)
ti = 0
min, max = (cams[ind,ti,preds[ind,ti,0],:,:]).min(), (cams[ind,ti,preds[ind,ti,0],:,:]).max()
im = ax[row,1].imshow(cams[ind,ti,preds[ind,ti,0],:,:], cmap="rocket", alpha=1.0, interpolation='nearest', vmin=min, vmax=max)
ax[0,1].annotate('$t_{}$'.format(0), xy=(0.5, 1.10), xytext=(0.5, 1.10), xycoords='axes fraction',
fontsize=12, ha='center', va='bottom',
bbox=dict(boxstyle='square', fc='white', ec='white'),
#arrowprops=dict(arrowstyle='-[, widthB={}, lengthB=0.25, angleB=0'.format(3), lw=1.0)
)
g = metrics.gini(np.array(cams[ind,ti,preds[ind,ti,0],:,:]))
ax[row,1].set_xlabel('pred.: {}, g={:0.3f}'.format(preds[ind,ti,0], g), fontsize=12)
ti = 3
min, max = (cams[ind,ti,preds[ind,ti,0],:,:]).min(), (cams[ind,ti,preds[ind,ti,0],:,:]).max()
im = ax[row,2].imshow(cams[ind,ti,preds[ind,ti,0],:,:], cmap="rocket", alpha=1.0, interpolation='nearest', vmin=min, vmax=max)
ax[0,2].annotate('$t_{}$'.format(3), xy=(0.5, 1.10), xytext=(0.5, 1.10), xycoords='axes fraction',
fontsize=12, ha='center', va='bottom',
bbox=dict(boxstyle='square', fc='white', ec='white'),
#arrowprops=dict(arrowstyle='-[, widthB={}, lengthB=0.25, angleB=0'.format(3), lw=1.0)
)
g = metrics.gini(np.array(cams[ind,ti,preds[ind,ti,0],:,:]))
ax[row,2].set_xlabel('pred.: {}, g={:0.3f}'.format(preds[ind,ti,0], g), fontsize=12)
for ti in range(t):
ax[row,ti].set_yticks([])
ax[row,ti].set_xticks([])
# color bar
min, max = 0.0, 1.0
divider = make_axes_locatable(ax[row,2])
cax = divider.append_axes("right", size="5%", pad=0.05)
cbar = fig.colorbar(im, cax=cax, ticks=[min, max])
cax.tick_params(axis='both', which='major', labelsize=10)
# enlarge axis by 10%
box = ax[row,2].get_position()
if n_rows > 3:
y_offset = 0.0040
else:
y_offset = 0.0055
ax[row,2].set_position([box.x0, box.y0 - y_offset, box.width * 1.07, box.height * 1.07]) #box.y0 - 0.014
# Delta T Plot
ax[row, -1].imshow(pics[ind,-1,0,:,:], cmap="Greys")
min, max = (cams[ind,-1,preds[ind,-1,0],:,:] - cams[ind,0,preds[ind,0,0],:,:]).min(), (cams[ind,-1,preds[ind,-1,0],:,:] - cams[ind,0,preds[ind,0,0],:,:]).max()
min, max = -0.5, +0.5
# zero_pos = (-min) / (-min + max)
# zero_centered_cmap = make_cmap([(0,0,255),(255,255,255),(255,0,0)], position=[0, zero_pos, 1], bit=True)
im = ax[row, -1].imshow(cams[ind,-1,preds[ind,-1,0],:,:] - cams[ind,0,preds[ind,0,0],:,:], cmap="icefire", alpha=alpha if alpha > 0.75 else 0.75, interpolation='nearest', vmin=-max, vmax=max)
divider = make_axes_locatable(ax[row,-1])
cax = divider.append_axes("right", size="5%", pad=0.05)
cbar = fig.colorbar(im, cax=cax, ticks=[min, (min+max)/2., max])
cax.tick_params(axis='both', which='major', labelsize=10)
# enlarge axis by 10%
box = ax[row,-1].get_position()
if n_rows > 3:
y_offset = 0.0040
else:
y_offset = 0.0055
ax[row,-1].set_position([box.x0 + 0.025, box.y0 - y_offset , box.width * 1.07, box.height * 1.07]) #box.y0 - 0.014
ax[row,-1].set_yticks([])
ax[row,-1].set_xticks([])
ax[0,-1].annotate('$\Delta t$'.format(ti), xy=(0.5, 1.10), xytext=(0.5, 1.10), xycoords='axes fraction',
fontsize=12, ha='center', va='bottom',
bbox=dict(boxstyle='square', fc='white', ec='white'),
#arrowprops=dict(arrowstyle='-[, widthB={}, lengthB=0.25, angleB=0'.format(3), lw=1.0)
)
ax_in = ax[0,1]
ax[0,0].annotate('A', xy=(ax_in.get_xlim()[0],ax_in.get_ylim()[1]), xytext=np.array([ax_in.get_xlim()[0],ax_in.get_ylim()[1]])+np.array([-8,-8]), weight='bold', fontsize=24)
ax[-1,0].text(20, 44, '3 [8,5] = target:3 [occluder1:8, occluder2:5]',
fontsize=12, horizontalalignment='left',
verticalalignment='center')
plt.savefig(filename, dpi=300, format='pdf')
plt.close()
def plot_cam_means(cams, targets, probs, preds):
b,t,n_classes,h,w = cams.shape
n_rows = 3
# Create x and y indices for gaussian2D data
x = np.linspace(0, 31, 32)
y = np.linspace(0, 31, 32)
x, y = np.meshgrid(x, y)
quantify_pixel_importance(cams, preds, percentage=0.25)
quantify_pixel_importance(cams, preds, percentage=0.5)
# normalize cams to 0,1
# -----
# offset by minimum of each upsampled activation map
min_val, min_args = torch.min(cams.view(b,t,n_classes,h*w), dim=-1, keepdim=True)
cams -= torch.unsqueeze(min_val, dim=-1)
# divide by the maximum of each activation map
max_val, max_args = torch.max(cams.view(b,t,n_classes,h*w), dim=-1, keepdim=True)
cams /= torch.unsqueeze(max_val, dim=-1)
# prepare real data according to different properties
# i.e. target prediction, current prediction, final prediction
# topk output (current)
uber_cam = []
for timestep in range(t):
topk_cam = []
for batch in range(b):
topk_cam.append(cams[batch, timestep, preds[batch, timestep, 0],:,:])
topk_cam = torch.stack(topk_cam, 0)
uber_cam.append(topk_cam)
cams_current = torch.stack(uber_cam, dim=1)
#plot_cam_fourier_space(cams_current, imnr=1)
#plot_cam_fourier_space(cams_current)
# last prediction evolution (final)
uber_cam = []
for timestep in range(t):
topk_cam = []
for batch in range(b):
topk_cam.append(cams[batch, timestep, preds[batch, -1, 0],:,:])
topk_cam = torch.stack(topk_cam, 0)
uber_cam.append(topk_cam)
cams_final = torch.stack(uber_cam, dim=1)
# target evolution (target)
uber_cam = []
for timestep in range(t):
topk_cam = []
for batch in range(b):
topk_cam.append(cams[batch, timestep, targets[batch],:,:])
topk_cam = torch.stack(topk_cam, 0)
uber_cam.append(topk_cam)
cams_target = torch.stack(uber_cam, dim=1)
cams_by_row = [cams_current, cams_final, cams_target]
fig, ax = plt.subplots(n_rows, t+1)
for row in range(n_rows):
dict_of_metric = {}
popt_list = []
for ti in range(t):
dict_of_metric[ti] = []
list_of_data = []
list_of_fitted_data = []
initial_guess = (3.,15,14,5,5,0.,10.) # handcrafted parameters
for j in range(b):
# insert real data here
data_noisy = cams_by_row[row][j,ti,:,:].numpy()
x_init, y_init = np.where(data_noisy == np.amax(data_noisy))
initial_guess_2 = (np.amax(data_noisy),x_init[0],y_init[0],5,5,0.,10.)
data_noisy = np.reshape(data_noisy,h*w)
list_of_data.append(data_noisy)
try:
popt, pcov = opt.curve_fit(twoD_Gaussian, (x, y), data_noisy, p0=initial_guess, maxfev=50000)
except RuntimeError:
print('[INFO] RuntimeError, reset initial guess')
#plt.imshow(np.reshape(data_noisy,[32,32]))
#plt.show()
popt, pcov = opt.curve_fit(twoD_Gaussian, (x, y), data_noisy, p0=initial_guess_2, maxfev=50000)
popt_list.append(popt)
initial_guess = popt
# decide whether the metric is supposed to be the maximum or the intermediate
dict_of_metric[ti].append(np.sqrt(popt[3]**2 + popt[4]**2))
#dict_of_metric[ti].append(max(popt[3], popt[4]))
for popt in popt_list:
list_of_fitted_data.append(twoD_Gaussian((x, y), *popt))
popt_list = []
ax[row,ti].imshow(np.mean(list_of_data,0).reshape(32, 32), cmap="rocket", origin='upper',
extent=(x.min(), x.max(), y.min(), y.max()))
ax[row,ti].contour(x, np.flip(y), np.mean(list_of_fitted_data,0).reshape(32, 32), [.35,.4,.45,.5], colors='gray',linewidths=.25, linestyles='dashed')
popt, pcov = opt.curve_fit(twoD_Gaussian, (x, y), np.mean(list_of_data,0), p0=(1.,15,14,1,1,0.,3.), maxfev=50000)
ax[row,ti].contour(x, np.flip(y), (twoD_Gaussian((x, y), *popt)).reshape(32, 32), [.35,.4,.45,.5], colors='w',linewidths=.25)
#ax[row,ti].axis('off')
ax[row,ti].set_yticks([])
ax[row,ti].set_xticks([])
ax[0,0].set_ylabel('current')
ax[1,0].set_ylabel('final')
ax[2,0].set_ylabel('target')
ax[0,ti].annotate('$t_{}$'.format(ti), xy=(0.5, 1.10), xytext=(0.5, 1.10), xycoords='axes fraction',
fontsize=9, ha='center', va='bottom',
bbox=dict(boxstyle='square', fc='white', ec='white'),
arrowprops=dict(arrowstyle='-[, widthB={}, lengthB=0.25, angleB=0'.format(3), lw=1.0))
#print(row, ti)
for ti in range(t):
# T-Test mit <NAME> nach <NAME>
qstar = 0.05
pval = np.ones([t, t])
stval = np.ones([t, t])
significance_table = np.zeros([t, t])
for k in range(t):
for j in range(t):
if k != j and k > j:
stval[k, j], pval[k, j] = st.ttest_ind(dict_of_metric[k], dict_of_metric[j], equal_var=False)
#print(np.round(pval, 4))
#print(np.round(stval, 4))
sorted_pvals = np.sort(pval[pval < 1])
bjq = np.arange(1, len(sorted_pvals) + 1) / \
len(sorted_pvals) * qstar
for k in range(t):
for j in range(t):
if k != j and k > j:
if pval[k, j] in sorted_pvals[sorted_pvals - bjq <= 0]:
significance_table[k, j] = 1
else:
significance_table[k, j] = 0
# Shrink current axis by 20%
box = ax[row,-1].get_position()
ax[row,-1].set_position([box.x0 + 0.01, box.y0 + 0.02, box.width * 0.8, box.height * 0.8])
#ax = plt.axes([0.75, 0.5, .10, .10])
ax[row,-1].set_zorder(-1)
ax[row,-1].matshow(significance_table, cmap='Greys')
ax[row,-1].set_xticklabels(['','$t_0$','$t_1$','$t_2$'], fontsize=7)#fontsize=65)
ax[row,-1].tick_params(labelbottom=True, labeltop=False,
right=False, top=False)
ax[row,-1].set_yticklabels(['$t_0$','$t_1$','$t_2$','$t_3$'], fontsize=7)#fontsize=65)
ax[row,-1].set_xlim([-0.5, t - 1 - 0.5])
ax[row,-1].set_ylim([t - 0.5, -0.5 + 1])
ax[row,-1].spines['top'].set_visible(False)
ax[row,-1].spines['right'].set_visible(False)
ax[-1,-1].add_patch(patches.Rectangle((0.0, 2.5+4), 1, 1, fill='black',
color='black', alpha=1, clip_on=False))
ax[-1,-1].text(0.0, 6.4 - 1.0+4, 'Significant difference \n(two-sided t-test, \nexpected FDR=0.05)',
fontsize=7, horizontalalignment='left',
verticalalignment='center')
# Create a Rectangle patch
ax_in = ax[0,1]
rect = patches.Rectangle((ax_in.get_xlim()[0],ax_in.get_ylim()[0]),ax_in.get_xlim()[1]-ax_in.get_xlim()[0],ax_in.get_ylim()[1]-ax_in.get_ylim()[0],linewidth=1,edgecolor='black',facecolor='none')
# Add the patch to the Axes
ax[0,0].add_patch(rect)
# Annotate
ax[0,0].annotate('B', xy=(ax_in.get_xlim()[0],ax_in.get_ylim()[1]), xytext=np.array([ax_in.get_xlim()[0],ax_in.get_ylim()[1]])+np.array([-24,+12]), weight='bold', fontsize=24)
plt.show()
def plot_cam_means2(cams_list, targets, probs, preds, filename):
b,t,n_classes,h,w = cams_list[0].shape
n_rows = 3
# Create x and y indices for gaussian2D data
x = np.linspace(0, 31, 32)
y = np.linspace(0, 31, 32)
x, y = np.meshgrid(x, y)
# normalize cams to 0,1 and preprocessing
# -----
cams_final = []
for i, cams in enumerate(cams_list):
# offset by minimum of each upsampled activation map
min_val, min_args = torch.min(cams.view(b,t,n_classes,h*w), dim=-1, keepdim=True)
cams -= torch.unsqueeze(min_val, dim=-1)
# divide by the maximum of each activation map
max_val, max_args = torch.max(cams.view(b,t,n_classes,h*w), dim=-1, keepdim=True)
cams /= torch.unsqueeze(max_val, dim=-1)
# last prediction evolution (final)
uber_cam = []
for timestep in range(t):
topk_cam = []
for batch in range(b):
topk_cam.append(cams[batch, timestep, preds[i][batch, -1, 0],:,:])
topk_cam = torch.stack(topk_cam, 0)
uber_cam.append(topk_cam)
cams_final.append(torch.stack(uber_cam, dim=1))
# prepare real data according to different properties
# i.e. target prediction, current prediction, final prediction
# # topk output (current)
# uber_cam = []
# for timestep in range(t):
# topk_cam = []
# for batch in range(b):
# topk_cam.append(cams[batch, timestep, preds[batch, timestep, 0],:,:])
# topk_cam = torch.stack(topk_cam, 0)
# uber_cam.append(topk_cam)
# cams_current = torch.stack(uber_cam, dim=1)
# #plot_cam_fourier_space(cams_current, imnr=1)
# #plot_cam_fourier_space(cams_current)
# # last prediction evolution (final)
# uber_cam = []
# for timestep in range(t):
# topk_cam = []
# for batch in range(b):
# topk_cam.append(cams[batch, timestep, preds[batch, -1, 0],:,:])
# topk_cam = torch.stack(topk_cam, 0)
# uber_cam.append(topk_cam)
# cams_final1 = torch.stack(uber_cam, dim=1)
# # target evolution (target)
# uber_cam = []
# for timestep in range(t):
# topk_cam = []
# for batch in range(b):
# topk_cam.append(cams[batch, timestep, targets[batch],:,:])
# topk_cam = torch.stack(topk_cam, 0)
# uber_cam.append(topk_cam)
# cams_target = torch.stack(uber_cam, dim=1)
cams_by_row = cams_final
row_x_shift = [-8,8,0]
row_y_shift = [8,-8,0]
fig, ax = plt.subplots(n_rows, t+1, gridspec_kw={
'width_ratios': [1, 1, 1, 1, 1],
'height_ratios': [1, 1, 1], 'left':0.05, 'right':0.875},)
for row in range(n_rows):
dict_of_metric = {}
popt_list = []
initial_guess = (3.,16.+row_x_shift[row],16.+row_y_shift[row],2.,2.,0.,0.) # handcrafted parameters
# TODO: Transfer upper and lower bound to plot_cam_means function
lower_bound = (0,0,0,0,0.,0.,-100.)
upper_bound = (100,32,32,32.,32.,2*np.pi,100.)
min = np.mean(cams_by_row[row].numpy(), axis=0).min()
max = np.mean(cams_by_row[row].numpy(), axis=0).max()
ginis = np.zeros([b, t])
for ti in range(t):
dict_of_metric[ti] = []
list_of_data = []
list_of_fitted_data = []
for j in range(b):
# insert real data here
data_noisy = cams_by_row[row][j,ti,:,:].numpy()
ginis[j,ti] = metrics.gini(data_noisy)
x_init, y_init = np.where(data_noisy == np.amax(data_noisy))
initial_guess_2 = (3.,x_init[0],y_init[0],5,5,0.,10.)
data_noisy = np.reshape(data_noisy,h*w)
list_of_data.append(data_noisy)
dict_of_metric[ti].append(metrics.gini(data_noisy))
# *** fitting gaussians
# try:
# popt, pcov = opt.curve_fit(twoD_Gaussian, (x, y), data_noisy, p0=initial_guess, bounds=(lower_bound, upper_bound), maxfev=50000)
# except RuntimeError:
# print('[INFO] RuntimeError, reset initial guess')
# #plt.imshow(np.reshape(data_noisy,[32,32]))
# #plt.show()
# popt, pcov = opt.curve_fit(twoD_Gaussian, (x, y), data_noisy, p0=initial_guess_2, bounds = (lower_bound, upper_bound), maxfev=50000)
# popt_list.append(popt)
# # update initial_guess intelligently
# if (j % 100) == 0:
# initial_guess = np.mean(popt_list, axis=0)
# print('[INFO] {}/{} gaussian fits done'.format(j, b))
# # initial_guess = popt
# # decide whether the metric is supposed to be the maximum or the intermediate
# dict_of_metric[ti].append(np.sqrt(popt[3]**2 + popt[4]**2))
# #dict_of_metric[ti].append(max(popt[3], popt[4]))
# for popt in popt_list:
# list_of_fitted_data.append(twoD_Gaussian((x, y), *popt))
# popt_list = []
# ***
im = ax[row,ti].imshow(np.mean(list_of_data,0).reshape(32, 32), cmap="rocket", origin='upper', vmin=min, vmax=max, extent=(x.min(), x.max(), y.min(), y.max()))
#ax[row,ti].contour(x, np.flip(y), np.mean(list_of_fitted_data,0).reshape(32, 32), levels=[.35,.4,.45,.5], colors='w',linewidths=.25, linestyles='dashed')
popt, pcov = opt.curve_fit(twoD_Gaussian, (x, y), np.mean(list_of_data,0), p0=initial_guess, bounds=(lower_bound, upper_bound), maxfev=50000)
ax[row,ti].contour(x, np.flip(y), (twoD_Gaussian((x, y), *popt)).reshape(32, 32), levels=[.35,.4,.45,.5], colors='w',linewidths=.25)
#ax[row,ti].axis('off')
ax[row,ti].set_yticks([])
ax[row,ti].set_xticks([])
ax[0,ti].annotate('$t_{}$'.format(ti), xy=(0.5, 1.10), xytext=(0.5, 1.10), xycoords='axes fraction',
fontsize=12, ha='center', va='bottom',
bbox=dict(boxstyle='square', fc='white', ec='white'),
#arrowprops=dict(arrowstyle='-[, widthB={}, lengthB=0.25, angleB=0'.format(3), lw=1.0)
)
#print(row, ti)
# plot marker where target is supposed to be
ax[0,0].set_ylabel('bottom-left', fontsize=12)
ax[0,ti].plot(16-row_y_shift[0], 16-row_y_shift[0], marker="+", color='black', markersize=5.0, markeredgewidth=.25)
ax[1,0].set_ylabel('top-right', fontsize=12)
ax[1,ti].plot(16+row_y_shift[0], 16+row_y_shift[0], marker="+", color='black', markersize=5.0, markeredgewidth=.25)
ax[2,0].set_ylabel('center', fontsize=12)
ax[2,ti].plot(16, 16, marker="+", color='black', markersize=5.0, markeredgewidth=.25)
# T-Test/KS-Test mit <NAME> nach <NAME>
qstar = 0.05
pval = np.ones([t, t])
stval = np.ones([t, t])
significance_table = np.zeros([t, t])
for k in range(t):
for j in range(t):
if k != j and k > j:
#stval[k, j], pval[k, j] = st.ttest_ind(dict_of_metric[k], dict_of_metric[j], equal_var=False)
stval[k, j], pval[k, j] = st.ks_2samp(dict_of_metric[k], dict_of_metric[j], alternative='two-sided', mode='auto')
print(np.round(pval, 4))
print(np.round(stval, 4))
sorted_pvals = np.sort(pval[pval < 1])
bjq = np.arange(1, len(sorted_pvals) + 1) / \
len(sorted_pvals) * qstar
for k in range(t):
for j in range(t):
if k != j and k > j:
if pval[k, j] in sorted_pvals[sorted_pvals - bjq <= 0]:
significance_table[k, j] = 1
else:
significance_table[k, j] = 0
print(significance_table)
divider = make_axes_locatable(ax[row,-2])
cax = divider.append_axes("right", size="5%", pad=0.05)
cbar = fig.colorbar(im, cax=cax, ticks=[min, (min+max)/2., max])
cax.tick_params(axis='both', which='major', labelsize=10)
cax.yaxis.set_major_formatter(FormatStrFormatter('%.2f'))
# enlarge axis by 10%
box = ax[row,-2].get_position()
ax[row,-2].set_position([box.x0, box.y0 - 0.014, box.width * 1.12, box.height * 1.12])
ax[row,-1].set_zorder(-1)
# Shrink current axis by some amount%
box = ax[row,-1].get_position()
ax[row,-1].set_position([box.x0 + 0.125, box.y0 + 0.02, box.width * 0.85, box.height * 0.85])
print('[INFO] gini, row:{}'.format(row))
print(ginis.mean(axis=0), ginis.std(axis=0))
ax[row,-1].errorbar(np.arange(t), ginis.mean(axis=0), yerr=ginis.std(axis=0), xerr=None, fmt='o-', color='black')
#ax[-1,-1].set_xlabel("timesteps")
#ax[row,-1].set_ylabel("$g_c$")
ax[0,-1].set_title("$g_c$")
ax[row,-1].spines['top'].set_visible(False)
ax[row,-1].spines['right'].set_visible(False)
ax[row, -1].tick_params(axis='y', right=False, left=True, labelleft=True, labelright=False)
ax[row, -1].set_xticks(np.arange(t))
ax[row, -1].set_yticks([0, 0.1, 0.2, 0.3])
ax[row, -1].set_ylim([0.,0.3])
ax[row, -1].set_xlim([-0.25,3.25])
ax[row, -1].set_xticklabels(['$t_0$','$t_1$','$t_2$','$t_3$'])
# significance table (deprecated)
#
# # Shrink current axis by 50%
# box = ax[row,-1].get_position()
# ax[row,-1].set_position([box.x0 + 0.1, box.y0 + 0.02, box.width * 0.5, box.height * 0.5])
# #ax = plt.axes([0.75, 0.5, .10, .10])
# ax[row,-1].set_zorder(-1)
# ax[row,-1].matshow(significance_table, cmap='Greys')
# ax[row,-1].set_xticklabels(['','$t_0$','$t_1$','$t_2$'], fontsize=10)#fontsize=65)
# ax[row,-1].tick_params(labelbottom=True, labeltop=False,
# right=False, top=False)
# ax[row,-1].set_yticklabels(['$t_0$','$t_1$','$t_2$','$t_3$'], fontsize=10)#fontsize=65)
# ax[row,-1].set_xlim([-0.5, t - 1 - 0.5])
# ax[row,-1].set_ylim([t - 0.5, -0.5 + 1])
# ax[row,-1].spines['top'].set_visible(False)
# ax[row,-1].spines['right'].set_visible(False)
#ax[-1,-1].add_patch(patches.Rectangle((0.0 - 1.5, 2.5+2.5), 1, 1, fill='black',
# color='black', alpha=1, clip_on=False))
#ax[-1,-1].text(0.0, 6.4 - 1.0 + 1, 'Significant difference \n(two-sided t-test, \nexpected FDR=0.05)',
#fontsize=7, horizontalalignment='left',
#verticalalignment='center')
# fig, ax = plt.subplots()
# for ti in range(t):
# plot_distribution(np.array(dict_of_metric[ti]), ax, lab='$t={}$'.format(ti))
# ax.legend()
# ax.set_title('sigma metric')
# plt.show()
# Create a Rectangle patch
ax_in = ax[0,1]
#rect = patches.Rectangle((ax_in.get_xlim()[0],ax_in.get_ylim()[0]),ax_in.get_xlim()[1]-ax_in.get_xlim()[0],ax_in.get_ylim()[1]-ax_in.get_ylim()[0],linewidth=1,edgecolor='black',facecolor='none')
# Add the patch to the Axes
#ax[0,0].add_patch(rect)
# Annotate
ax[0,0].annotate('B', xy=(ax_in.get_xlim()[0],ax_in.get_ylim()[1]), xytext=np.array([ax_in.get_xlim()[0],ax_in.get_ylim()[1]])+np.array([-10,+10]), weight='bold', fontsize=24)
plt.savefig(filename, dpi=300, format='pdf')
plt.close()
return ginis
# -----------------
# tsne and softmax output functions
# -----------------
def plot_tsne_timetrajectories(representations, imgs, targets, points=1000, show_stimuli=False, show_indices=False, N='all', savefile='./../trained_models/tsnesave.npy', overwrite=False):
"""plot_tsne_timetrajectories is deprecated, use plot_tsne_evolution instead"""
# Constants, maybe become variables later
N_UNOCC = 10
# reduce dataset for plotting
representations = representations[-points:]
targets = targets[-points:]
markers = ["o","v","s","D","H"]
classes = set(targets.numpy())
# plotting
markersizes = [10,30] #,10,30]
alpha=1.0
colors = sns.color_palette("colorblind", len(classes))
points,time,feature,height,width = representations.shape
representations = representations.view(points,time,-1)
# old way where we would combine the representations into one data reduction
representations = representations.view(points*time,-1).numpy()
# restore or save tsne model
if os.path.exists(savefile):
projected_data = np.load(savefile)
print('[INFO] Loaded tsne-file at {}'.format(savefile))
else:
projected_data = np.zeros([1])
if (projected_data.shape[0] != representations.shape[0]) or overwrite:
projected_data = bhtsne.run_bh_tsne(representations, no_dims=2, perplexity=25, verbose=True, use_pca=False, initial_dims=representations[-1], max_iter=1000) #10000
np.save(savefile, projected_data)
projected_data = projected_data.reshape(points, time, -1)
x_data = projected_data[:-N_UNOCC,:,0] # (index, time)
y_data = projected_data[:-N_UNOCC,:,1] # (index, time)
tar = targets[:-N_UNOCC]
x_data_u = projected_data[-N_UNOCC:,:,0] # (index, time)
y_data_u = projected_data[-N_UNOCC:,:,1] # (index, time)
tar_u = targets[-N_UNOCC:]
fig, axes = plt.subplots(2,2, sharex=False, sharey=False, figsize=(9,6))
for pltnr, ax in enumerate([axes[0,0],axes[0,1]]):
for ti in [0,3]: #range(4)
for (i, cla) in enumerate(classes):
xc = [p for (j,p) in enumerate(x_data[:,ti]) if tar[j]==cla]
yc = [p for (j,p) in enumerate(y_data[:,ti]) if tar[j]==cla]
ax.scatter(xc,yc,c=colors[i], label=str(int(cla)), marker=markers[ti], alpha=alpha, s=markersizes[pltnr])
ax.scatter([0], [0], c='white', label=' ')
# unoccluded trajectories
for ti in range(time):
for (i,cla) in enumerate(sorted((set(tar_u.numpy())))):
xc = [p for (j,p) in enumerate(x_data_u[:,ti]) if tar_u[j]==cla]
yc = [p for (j,p) in enumerate(y_data_u[:,ti]) if tar_u[j]==cla]
ax.scatter(xc,yc,c='black', marker=markers[ti], alpha=alpha, s=markersizes[pltnr], label='$t_{}$'.format(ti))
for i in range(N_UNOCC):
ax.plot(x_data_u[i,:], y_data_u[i,:], color='black', linestyle='-', alpha=alpha)
# bottom plots
grays = ['lightgray'] * len(classes)
fills = ['none'] * len(classes)
colorset = [sns.color_palette(grays),sns.color_palette(grays)]
marker_fills = [fills,fills]
allhighlights = [[3,8],[9]] # TODO: find out what this does!
for pltnr in range(len(allhighlights)):
for hl in allhighlights[pltnr]:
colorset[pltnr][hl] = colors[hl]
marker_fills[pltnr][hl] = 'full'
for pltnr,ax in enumerate([axes[1,0],axes[1,1]]):
if N=='all':
n_indices = range(len(projected_data))
elif isinstance(N, int):
min_N_ind = N//len(set(targets.numpy()))
n_indices = []
for cla in classes:
ind = np.where(tar == cla)[0]
n_indices += list(np.random.choice(ind, min(min_N_ind, len(ind)), replace=False))
n_indices += list(np.random.randint(0,len(projected_data)-N_UNOCC,N-len(n_indices)))
n_indices = np.array(n_indices)
elif (pltnr==1):
n_indices = [N[-1]]
else:
n_indices = N[:-1]
if show_stimuli:
artists = []
for ti in range(time):
for x0, y0, i in zip(projected_data[:,ti,0][n_indices], projected_data[:,ti,1][n_indices], n_indices):
# adapt the center of arrows intelligently
if ((y0 < 0) and (x0>5)) :
xc,yc = x0-5,y0+25 #x0+10,y0+20
elif ((y0 < 0) and (x0<5)):
xc,yc = x0-5,y0+40
elif (y0 > 0):
xc,yc = x0-20,y0-30
else:
xc,yc = x0,y0
#calculate scaling factor c
c = np.sqrt((1800./(xc**2 + yc**2))) # (-30., 30.)
ab = offsetbox.AnnotationBbox(makeMarker(imgs[i,ti,0], zoom=0.65*32./len(imgs[i,ti,0])), (x0, y0), xybox=(c*xc, c*yc), xycoords='data', boxcoords="offset points",
pad=0.3,bboxprops=dict(color=colorset[pltnr][int(targets[i])]) , arrowprops=dict(arrowstyle=patches.ArrowStyle("->", head_length=.2, head_width=.1)), frameon=True)
# ab2 = offsetbox.AnnotationBbox(makeMarker(tile_tensor_lowres[i], zoom=0.65*32./len(tile_tensor_lowres[i])), (x0, y0), xybox=(c*xc-30, c*yc), xycoords='data', boxcoords="offset points",
# pad=0.3,bboxprops=dict(color=colorset[pltnr][int(all_classes[i])]), frameon=True)
if show_indices:
ax.annotate('{}'.format(i), xy=(x0, y0), xytext=(x0, y0), zorder=-1)
if len(n_indices) >= 25:
ab.zorder=-1
artists.append(ax.add_artist(ab))
# if i == 622:
# artists.append(ax.add_artist(ab2))
for ti in [0,3]: #range(4)
for (i, cla) in enumerate(classes):
xc = [p for (j,p) in enumerate(x_data[:,ti]) if tar[j]==cla]
yc = [p for (j,p) in enumerate(y_data[:,ti]) if tar[j]==cla]
ax.scatter(xc,yc,c=colorset[pltnr][i], label=str(int(cla)), marker=MarkerStyle(marker=markers[ti], fillstyle=marker_fills[pltnr][i]), alpha=alpha, s=markersizes[0])
ax.scatter([0], [0], c='white', label=' ')
# unoccluded trajectories
for ti in range(time):
for (i,cla) in enumerate(sorted((set(tar_u.numpy())))):
xc = [p for (j,p) in enumerate(x_data_u[:,ti]) if tar_u[j]==cla]
yc = [p for (j,p) in enumerate(y_data_u[:,ti]) if tar_u[j]==cla]
ax.scatter(xc,yc,c='lightgray', marker=markers[ti], alpha=alpha, s=markersizes[0], label='$t_{}$'.format(ti)) #label='${}_{}$'.format(cla[0],cla[1])
for i in range(N_UNOCC):
ax.plot(x_data_u[i,:], y_data_u[i,:], color='lightgray', linestyle='-', alpha=alpha)
# plot unoccluded trajectories as highlights
for ti in range(time):
for (i,cla) in enumerate(sorted((set(tar_u.numpy())))):
xc = [p for (j,p) in enumerate(x_data_u[:,ti]) if tar_u[j]==cla]
yc = [p for (j,p) in enumerate(y_data_u[:,ti]) if tar_u[j]==cla]
ax.scatter(xc,yc,c='lightgray', marker=markers[ti], alpha=alpha, s=markersizes[0])
for i in allhighlights[pltnr]:
xd = x_data_u[i,:]
yd = y_data_u[i,:]
ax.plot(xd, yd, color='black', linestyle='-', alpha=alpha)
for j in range(len(xd)):
ax.scatter(xd[j],yd[j],c='black', marker=markers[j], alpha=alpha, s=markersizes[0],zorder=9999)
handles, labels = axes[0,1].get_legend_handles_labels()
#handles = handles[:10] + handles[-5:]
handles = handles[:10] + handles[20:21] + [handles[-20+i*10] for i in range(time)]
#labels = labels[:10] + labels[-5:]
labels = labels[:10] + labels[20:21] + [labels[-20+i*10] for i in range(time)]
axes[0,1].legend(handles, labels, title='class label', loc='center left', bbox_to_anchor=(1, 0), frameon=False)
bottom, top = plt.ylim()
# general setup
axes[0,0].set_ylabel('t-SNE dimension 1')
axes[1,0].set_ylabel('t-SNE dimension 1')
axes[1,0].set_xlabel('t-SNE dimension 2')
axes[1,1].set_xlabel('t-SNE dimension 2')
ax_in = axes[0,1]
ax_in.set_xlim([x_data_u[5,-1] - 7, x_data_u[5,-1] + 7])
ax_in.set_ylim([y_data_u[5,-1] - 7, y_data_u[5,-1] + 7])
# Create a Rectangle patch
rect = patches.Rectangle((ax_in.get_xlim()[0],ax_in.get_ylim()[0]),ax_in.get_xlim()[1]-ax_in.get_xlim()[0],ax_in.get_ylim()[1]-ax_in.get_ylim()[0],linewidth=1,edgecolor='black',facecolor='none')
# Add the patch to the Axes
axes[0,0].add_patch(rect)
# Annotate
axes[0,0].annotate('B', xy=(ax_in.get_xlim()[0],ax_in.get_ylim()[1]), xytext=np.array([ax_in.get_xlim()[0],ax_in.get_ylim()[1]])+np.array([-2,+1]), weight='bold')
for n, ax in enumerate(axes.flatten()):
ax.text(-0.1, 1.05, string.ascii_uppercase[n], weight='bold', transform=ax.transAxes, size=18)
plt.subplots_adjust(left=None, bottom=None, right=None, top=None, wspace=None, hspace=0.3)
plt.show()
pass
def plot_tsne_evolution(representations, imgs, targets, show_stimuli=False, show_indices=False, N='all', savefile='./../trained_models/tsnesave', overwrite=False):
# hack to mitigate output
from matplotlib.axes._axes import _log as matplotlib_axes_logger
matplotlib_axes_logger.setLevel('ERROR')
# Constants, maybe become variables later
N_UNOCC = 1000
targets = targets.numpy()
classes = [0,1,2,3,4,5,6,7,8,9]
markers = ["o","v","s","D","H"]
# same markers for all timesteps
markers = ["o","o","o","o","o"]
markersizes = [10,10] #[10,30]
alpha=1.0
colors = sns.color_palette("colorblind", len(classes))
points,time,feature,height,width = representations.shape
representations = representations.view(points,time,-1)
# learn tsne embedding
# -----
# we calculate tsne for each timestep seperately
if os.path.exists(savefile + '.npy'):
projection = np.load(savefile + '.npy')
print('[INFO] Loaded tsne-file at {}'.format(savefile))
else:
projection = np.zeros([1])
if (projection.shape[0] == 1) or overwrite:
projection = np.zeros([points, time, 2])
for ti in range(time):
time_rep = representations[:,ti,:].numpy()
projected_data = bhtsne.run_bh_tsne(time_rep, no_dims=2, perplexity=25, verbose=True, use_pca=False, initial_dims=time_rep[-1], max_iter=1000) #10000
projection[:,ti,:] = projected_data
np.save(savefile + '.npy', projection)
np.save(savefile + '_targets.npy', targets)
else:
targets = np.load(savefile + '_targets.npy')
projected_data = projection
x_data = projected_data[:-N_UNOCC,:,0] # (index, time)
y_data = projected_data[:-N_UNOCC,:,1] # (index, time)
tar = targets[:-N_UNOCC]
x_data_u = projected_data[-N_UNOCC:,:,0] # (index, time)
y_data_u = projected_data[-N_UNOCC:,:,1] # (index, time)
tar_u = targets[-N_UNOCC:]
# calculate unoccluded centroids
x_data_uc = np.zeros([len(classes),time])
y_data_uc = np.zeros([len(classes), time])
tar_uc = np.zeros(len(classes))
for ti in range(time):
for (i, cla) in enumerate(classes):
x_data_uc[i,ti] = np.mean([p for (j,p) in enumerate(x_data_u[:,ti]) if tar_u[j]==cla])
y_data_uc[i,ti]= np.mean([p for (j,p) in enumerate(y_data_u[:,ti]) if tar_u[j]==cla])
tar_uc[i] = cla
# show images for sanity check
# for i, t in enumerate(tar_u[-10:]):
# print(t)
# plt.imshow(imgs[-N_UNOCC+i][0,0,:,:])
# plt.show()
#
# plot of last timestep
# fig, ax = plt.subplots(figsize=(9,6))
# for (i, cla) in enumerate(classes):
# xc = [p for (j,p) in enumerate(x_data[:,-1]) if tar[j]==cla]
# yc = [p for (j,p) in enumerate(y_data[:,-1]) if tar[j]==cla]
# ax.scatter(xc,yc,c=colors[i], label=str(int(cla)), marker=markers[3], alpha=alpha, s=markersizes[0])
# plt.show()
# start of the plots
# -----
# shift data according to timestep
x_spread = x_data.std()
for ti in range(time):
x_data[:,ti] = x_data[:,ti] + ti * x_spread*6 #6
x_data_u[:,ti] = x_data_u[:,ti] + ti * x_spread*6
x_data_uc[:,ti] = x_data_uc[:,ti] + ti * x_spread*6
fig, axes = plt.subplots(2,2, sharex=False, sharey=False, figsize=(18,6))
for pltnr, ax in enumerate([axes[0,0],axes[0,1]]):
for ti in range(time):
for (i, cla) in enumerate(classes):
xc = [p for (j,p) in enumerate(x_data[:,ti]) if tar[j]==cla]
yc = [p for (j,p) in enumerate(y_data[:,ti]) if tar[j]==cla]
ax.scatter(xc,yc,c=colors[i], label=str(int(cla)), marker=markers[ti], alpha=alpha, s=markersizes[pltnr])
bracket_ypos = 1.05*y_data.max()
data_cloud = np.concatenate([x_data[:,ti], x_data_u[:,ti]])
bracket_xpos = data_cloud.mean()
bracket_width = data_cloud.std()/3
ax.annotate('$t_{}$'.format(ti), xy=(bracket_xpos, bracket_ypos), xytext=(bracket_xpos, bracket_ypos), xycoords='data',
fontsize=9, ha='center', va='bottom',
bbox=dict(boxstyle='square', fc='white', ec='white'),
arrowprops=dict(arrowstyle='-[, widthB={}, lengthB=0.25, angleB=0'.format(bracket_width), lw=1.0))
ax.scatter([0], [0], c='white', label=' ')
ax.axis('off')
# only second plot gets the unoccluded markers
# unoccluded trajectories
for ti in range(time):
for (i, cla) in enumerate(classes):
xc = [p for (j,p) in enumerate(x_data_u[:,ti]) if tar_u[j]==cla]
yc = [p for (j,p) in enumerate(y_data_u[:,ti]) if tar_u[j]==cla]
ax.scatter(xc,yc,c=colors[i], marker=markers[ti], alpha=alpha, s=markersizes[pltnr], edgecolor='black', label='$t_{}$'.format(ti))
# start of the bottom plots
# -----
grays = ['lightgray'] * len(classes)
fills = ['none'] * len(classes)
colorset = [sns.color_palette("colorblind", len(classes)), sns.color_palette(grays)]
marker_fills = [fills,fills]
allhighlights = [[3,8],[9]]
for pltnr in range(len(allhighlights)):
for hl in allhighlights[pltnr]:
colorset[pltnr][hl] = colors[hl]
marker_fills[pltnr][hl] = 'full'
for pltnr, ax in enumerate([axes[1,0],axes[1,1]]):
if N=='all':
n_indices = range(len(projected_data))
elif isinstance(N, int):
min_N_ind = N//len(set(targets))
n_indices = []
for cla in classes:
ind = np.where(tar == cla)[0]
n_indices += list(np.random.choice(ind, min(min_N_ind, len(ind)), replace=False))
n_indices += list(np.random.randint(0,len(projected_data)-N_UNOCC,N-len(n_indices)))
n_indices = np.array(n_indices)
elif (pltnr==1):
n_indices = [N[-1]]
else:
n_indices = N[:-1]
if show_stimuli:
artists = []
for ti in range(time):
for x0, y0, i in zip(projected_data[:,ti,0][n_indices], projected_data[:,ti,1][n_indices], n_indices):
# adapt the center of arrows intelligently
if ((y0 < 0) and (x0>5)) :
xc,yc = x0-5,y0+25 #x0+10,y0+20
elif ((y0 < 0) and (x0<5)):
xc,yc = x0-5,y0+40
elif (y0 > 0):
xc,yc = x0-20,y0-30
else:
xc,yc = x0,y0
#calculate scaling factor c
c = np.sqrt((1800./(xc**2 + yc**2))) # (-30., 30.)
ab = offsetbox.AnnotationBbox(makeMarker(imgs[i,ti,0], zoom=0.65*32./len(imgs[i,ti,0])), (x0, y0), xybox=(c*xc, c*yc), xycoords='data', boxcoords="offset points",
pad=0.3,bboxprops=dict(color=colorset[pltnr][int(targets[i])]) , arrowprops=dict(arrowstyle=patches.ArrowStyle("->", head_length=.2, head_width=.1)), frameon=True)
# ab2 = offsetbox.AnnotationBbox(makeMarker(tile_tensor_lowres[i], zoom=0.65*32./len(tile_tensor_lowres[i])), (x0, y0), xybox=(c*xc-30, c*yc), xycoords='data', boxcoords="offset points",
# pad=0.3,bboxprops=dict(color=colorset[pltnr][int(all_classes[i])]), frameon=True)
if show_indices:
ax.annotate('{}'.format(i), xy=(x0, y0), xytext=(x0, y0), zorder=-1)
if len(n_indices) >= 25:
ab.zorder=-1
artists.append(ax.add_artist(ab))
# if i == 622:
# artists.append(ax.add_artist(ab2))
ax.axis('off')
ax, pltnr = axes[1,0], 0
for ti in range(time):
for (i, cla) in enumerate(classes):
# rest of the data
xc = [p for (j,p) in enumerate(x_data[:,ti]) if tar[j]==cla]
yc = [p for (j,p) in enumerate(y_data[:,ti]) if tar[j]==cla]
ax.scatter(xc,yc,c=colorset[pltnr][i], label=str(int(cla)), marker=MarkerStyle(marker=markers[ti], fillstyle=marker_fills[pltnr][i]), alpha=alpha, s=markersizes[0])
# unoccluded centroids
xc = [p for (j,p) in enumerate(x_data_uc[:,ti]) if tar_uc[j]==cla]
yc = [p for (j,p) in enumerate(y_data_uc[:,ti]) if tar_uc[j]==cla]
ax.scatter(xc,yc,c=colors[i], marker=markers[ti], alpha=alpha, s=markersizes[pltnr], edgecolor='black', label='$t_{}$'.format(ti))
# for i in allhighlights[pltnr]:
# xd = x_data_u[i,:]
# yd = y_data_u[i,:]
# for j in range(len(xd)):
# ax.scatter(xd[j],yd[j],c='black', marker=markers[j], alpha=alpha, s=markersizes[0],
# zorder=9999)
ax, pltnr = axes[1,1], 1
for ti in range(time):
for (i, cla) in enumerate(classes):
# unoccluded data
xc = [p for (j,p) in enumerate(x_data_u[:,ti]) if tar_u[j]==cla]
yc = [p for (j,p) in enumerate(y_data_u[:,ti]) if tar_u[j]==cla]
ax.scatter(xc,yc,c=colorset[pltnr][i], label=str(int(cla)), marker=MarkerStyle(marker=markers[ti], fillstyle=marker_fills[pltnr][i]), alpha=alpha, s=markersizes[0])
# rest of the data
xc = [p for (j,p) in enumerate(x_data[:,ti]) if tar[j]==cla]
yc = [p for (j,p) in enumerate(y_data[:,ti]) if tar[j]==cla]
ax.scatter(xc,yc,c=colorset[pltnr][i], label=str(int(cla)), marker=MarkerStyle(marker=markers[ti], fillstyle=marker_fills[pltnr][i]), alpha=alpha, s=markersizes[0])
handles, labels = axes[0,1].get_legend_handles_labels()
handles = handles[:10] + handles[40:41] # + [handles[-40+i*10] for i in range(time)]
labels = labels[:10] + labels[40:41] + [labels[-40+i*10] for i in range(time)]
from matplotlib.lines import Line2D
handles += [Line2D([0], [0], marker=markers[i], color='w', label='', markerfacecolor='black', markersize=6) for i in range(time)]
axes[0,1].legend(handles, labels, title='class label', loc='center left', bbox_to_anchor=(1, 0), frameon=False)
bottom, top = plt.ylim()
# general setup
axes[0,0].set_ylabel('t-SNE dimension 1')
axes[1,0].set_ylabel('t-SNE dimension 1')
axes[1,0].set_xlabel('t-SNE dimension 2')
axes[1,1].set_xlabel('t-SNE dimension 2')
# ax_in = axes[0,1]
# ax_in.set_xlim([x_data_u[5,-1] - 7, x_data_u[5,-1] + 7])
# ax_in.set_ylim([y_data_u[5,-1] - 7, y_data_u[5,-1] + 7])
#
# # Create a Rectangle patch
# rect = patches.Rectangle((ax_in.get_xlim()[0],ax_in.get_ylim()[0]),ax_in.get_xlim()[1]-ax_in.get_xlim()[0],ax_in.get_ylim()[1]-ax_in.get_ylim()[0],linewidth=1,edgecolor='black',facecolor='none')
# # Add the patch to the Axes
# axes[0,0].add_patch(rect)
# Annotate
# axes[0,0].annotate('B', xy=(ax_in.get_xlim()[0],ax_in.get_ylim()[1]), xytext=np.array([ax_in.get_xlim()[0],ax_in.get_ylim()[1]])+np.array([-2,+1]), weight='bold')
for n, ax in enumerate(axes.flatten()):
ax.text(-0.1, 1.05, string.ascii_uppercase[n], weight='bold', transform=ax.transAxes, size=18)
plt.subplots_adjust(left=None, bottom=None, right=None, top=None, wspace=None, hspace=0.3)
plt.show()
pass
def plot_tsne_evolution2(representations, imgs, targets, show_stimuli=True, show_indices=True, N=25, savefile='./../trained_models/tsnesave', overwrite=False, filename='tsne.pdf'):
# hack to mitigate output
from matplotlib.axes._axes import _log as matplotlib_axes_logger
matplotlib_axes_logger.setLevel('ERROR')
# Constants, maybe become variables later
N_UNOCC = 1000
targets = targets.numpy()
classes = [0,1,2,3,4,5,6,7,8,9]
markers = ["o","v","s","D","H"]
# same markers for all timesteps
markers = ["o","o","o","o","o"]
markersizes = [3,3] #[10,30]
alpha=1.0
colors = sns.color_palette("colorblind", len(classes))
points,time,feature,height,width = representations.shape
representations = representations.view(points,time,-1)
# learn tsne embedding
# -----
# we calculate tsne for each timestep seperately
if os.path.exists(savefile + '.npy'):
projection = np.load(savefile + '.npy')
print('[INFO] Loaded tsne-file at {}'.format(savefile))
else:
projection = np.zeros([1])
if (projection.shape[0] == 1) or overwrite:
projection = np.zeros([points, time, 2])
for ti in range(time):
time_rep = representations[:,ti,:].numpy()
projected_data = bhtsne.run_bh_tsne(time_rep, no_dims=2, perplexity=25, verbose=True, use_pca=False, initial_dims=time_rep[-1], max_iter=1000) #10000
projection[:,ti,:] = projected_data
np.save(savefile + '.npy', projection)
np.save(savefile + '_targets.npy', targets)
else:
targets = np.load(savefile + '_targets.npy')
projected_data = projection
x_data = projected_data[:-N_UNOCC,:,0] # (index, time)
y_data = projected_data[:-N_UNOCC,:,1] # (index, time)
tar = targets[:-N_UNOCC]
x_data_u = projected_data[-N_UNOCC:,:,0] # (index, time)
y_data_u = projected_data[-N_UNOCC:,:,1] # (index, time)
tar_u = targets[-N_UNOCC:]
# calculate unoccluded centroids
x_data_uc = np.zeros([len(classes),time])
y_data_uc = np.zeros([len(classes), time])
tar_uc = np.zeros(len(classes))
for ti in range(time):
for (i, cla) in enumerate(classes):
x_data_uc[i,ti] = np.mean([p for (j,p) in enumerate(x_data_u[:,ti]) if tar_u[j]==cla])
y_data_uc[i,ti]= np.mean([p for (j,p) in enumerate(y_data_u[:,ti]) if tar_u[j]==cla])
tar_uc[i] = cla
# start of the plots
# -----
# shift data according to timestep
x_spread = x_data.std()
for ti in range(time):
x_data[:,ti] = x_data[:,ti] + ti * x_spread*6 #6
x_data_u[:,ti] = x_data_u[:,ti] + ti * x_spread*6
x_data_uc[:,ti] = x_data_uc[:,ti] + ti * x_spread*6
fig, axes = plt.subplots(3,1, sharex=False, sharey=False, figsize=(9,6))
for pltnr, ax in enumerate([axes[0],axes[1]]):
for ti in range(time):
for (i, cla) in enumerate(classes):
xc = [p for (j,p) in enumerate(x_data[:,ti]) if tar[j]==cla]
yc = [p for (j,p) in enumerate(y_data[:,ti]) if tar[j]==cla]
ax.scatter(xc,yc,c=colors[i], label=str(int(cla)), marker=markers[ti], alpha=alpha, s=markersizes[pltnr])
ax.axis('off')
# only second plot gets the unoccluded markers
# unoccluded trajectories
for ti in range(time):
for (i, cla) in enumerate(classes):
xc = [p for (j,p) in enumerate(x_data_u[:,ti]) if tar_u[j]==cla]
yc = [p for (j,p) in enumerate(y_data_u[:,ti]) if tar_u[j]==cla]
ax.scatter(xc,yc,c=colors[i], marker=markers[ti], alpha=alpha, s=markersizes[pltnr], edgecolor='black', linewidth=0.5, zorder=10)
# start of the bottom plots
# -----
grays = ['lightgray'] * len(classes)
fills = ['none'] * len(classes)
colorset = [sns.color_palette("colorblind", len(classes)), sns.color_palette(grays)]
marker_fills = [fills,fills]
allhighlights = [[3,8],[3]]
for pltnr in range(len(allhighlights)):
for hl in allhighlights[pltnr]:
colorset[pltnr][hl] = colors[hl]
marker_fills[pltnr][hl] = 'full'
ax, pltnr = axes[2], 1
for ti in range(time):
for (i, cla) in enumerate(classes):
# rest of the data
xc = [p for (j,p) in enumerate(x_data[:,ti]) if tar[j]==cla]
yc = [p for (j,p) in enumerate(y_data[:,ti]) if tar[j]==cla]
ax.scatter(xc,yc,c=colorset[pltnr][cla], marker=MarkerStyle(marker=markers[ti], fillstyle=marker_fills[pltnr][cla]), alpha=alpha, s=markersizes[0])
# unoccluded data
# xc = [p for (j,p) in enumerate(x_data_u[:,ti]) if tar_u[j]==cla]
# yc = [p for (j,p) in enumerate(y_data_u[:,ti]) if tar_u[j]==cla]
# ax.scatter(xc,yc,c=colorset[pltnr][i], label=str(int(cla)), marker=MarkerStyle(marker=markers[ti], fillstyle=marker_fills[pltnr][i]), alpha=alpha, s=markersizes[0])
# unoccluded centroids
xc = [p for (j,p) in enumerate(x_data_uc[:,ti]) if tar_uc[j]==cla]
yc = [p for (j,p) in enumerate(y_data_uc[:,ti]) if tar_uc[j]==cla]
ax.scatter(xc,yc,c=colorset[pltnr][cla], marker=markers[ti], alpha=alpha, s=markersizes[pltnr], edgecolor='black', linewidth=0.5, zorder=10)
for (i, cla) in enumerate(allhighlights[pltnr]):
# rest of the data
xc = [p for (j,p) in enumerate(x_data[:,ti]) if tar[j]==cla]
yc = [p for (j,p) in enumerate(y_data[:,ti]) if tar[j]==cla]
ax.scatter(xc,yc,c=colorset[pltnr][cla], marker=MarkerStyle(marker=markers[ti], fillstyle=marker_fills[pltnr][cla]), alpha=alpha, s=markersizes[0])
# unoccluded centroids
xc = [p for (j,p) in enumerate(x_data_uc[:,ti]) if tar_uc[j]==cla]
yc = [p for (j,p) in enumerate(y_data_uc[:,ti]) if tar_uc[j]==cla]
ax.scatter(xc,yc,c=colorset[pltnr][cla], marker=markers[ti], alpha=alpha, s=markersizes[pltnr], edgecolor='black', linewidth=0.5, zorder=10)
# plot markers
# -----
for pltnr, ax in enumerate(axes[1:]):
if N=='all':
n_indices = range(len(projected_data))
elif isinstance(N, int):
min_N_ind = N//len(set(targets))
n_indices = []
for cla in classes:
ind = np.where(targets == cla)[0]
n_indices += list(np.random.choice(ind, min(min_N_ind, len(ind)), replace=False))
n_indices += list(np.random.randint(0,len(projected_data)-N_UNOCC,N-len(n_indices)))
n_indices = np.array(n_indices)
elif (pltnr==0):
n_indices = N[0]
else:
n_indices = N[-1]
if show_stimuli:
artists = []
for ti in range(time):
for x0, y0, i in zip(projected_data[:,ti,0][n_indices], projected_data[:,ti,1][n_indices], n_indices):
# adapt the center of arrows intelligently
if ((y0 < 0) and (x0>5)) :
xc,yc = x0 - ti * x_spread * 6 ,y0 + 10 #x0+10,y0+20
elif ((y0 < 0) and (x0<5)):
xc,yc = x0 - ti * x_spread * 6 ,y0 + 10
elif (y0 > 0):
xc,yc = x0 - ti * x_spread * 6 ,y0
else:
xc,yc = x0 - ti * x_spread * 6, y0
xc,yc = x0 - ti * x_spread * 4, y0
#calculate scaling factor c
c = np.sqrt((1500./(xc**2 + yc**2))) # (-30., 30.)
xy_box = np.array([c*xc, c*yc])
# handcrafted rearrangement
if i in [516] and ti == 0:
xy_box = xy_box - np.array([0,15])
elif i in [1629, 516] and ti == 1:
xy_box = xy_box + np.array([0,15])
elif i in [909] and ti == 1:
xy_box = xy_box - np.array([10,0])
elif i in [909] and ti == 2:
xy_box = xy_box + np.array([30,0])
elif i in [226] and ti == 2:
xy_box = xy_box + np.array([10,-10])
elif i in [1629, 516] and ti == 3:
xy_box = xy_box - np.array([20,13])
else:
pass
# handcrafted annotation
if i in [516] and ti == 0:
ax.annotate('{}'.format('3 [2,0]'), xy=(x0, y0), xytext=xy_box + np.array([-5,25]), zorder=-1)
elif i in [1629] and ti == 0:
ax.annotate('{}'.format('2'), xy=(x0, y0), xytext=xy_box + np.array([-20,18]), zorder=-1)
elif i in [909] and ti == 0:
ax.annotate('{}'.format('3 [2,8]'), xy=(x0, y0), xytext=xy_box + np.array([-10,-23]), zorder=-1)
elif i in [226] and ti == 0:
ax.annotate('{}'.format('2 [9,8]'), xy=(x0, y0), xytext=xy_box + np.array([0,-27]), zorder=-1)
else:
pass
ab = offsetbox.AnnotationBbox(makeMarker(imgs[i,ti,0], zoom=0.65*32./len(imgs[i,ti,0])), (x0, y0), xybox=xy_box, xycoords='data', boxcoords="offset points",
pad=0.1,bboxprops=dict(color=colorset[pltnr][int(targets[i])]) , arrowprops=dict(arrowstyle=patches.ArrowStyle("->", head_length=.2, head_width=.1)), frameon=True)
if show_indices:
ax.annotate('{}'.format(i), xy=(x0, y0), xytext=(x0, y0), zorder=-1)
if len(n_indices) >= 100:
ab.zorder=-1
else:
ab.zorder=99
artists.append(ax.add_artist(ab))
# if i == 622:
# artists.append(ax.add_artist(ab2))
ax.axis('off')
# general setup
handles, labels = axes[0].get_legend_handles_labels()
handles = handles[:10]
labels = labels[:10]
axes[0].legend(handles, labels, title='class label', loc='center left', bbox_to_anchor=(1, 0), frameon=False)
bottom, top = plt.ylim()
for ti in range(time):
bracket_ypos = 1.10*y_data.max()
data_cloud = np.concatenate([x_data[:,ti], x_data_u[:,ti]])
bracket_xpos = data_cloud.mean()
bracket_width = data_cloud.std()/3
axes[0].annotate('$t_{}$'.format(ti), xy=(bracket_xpos, bracket_ypos), xytext=(bracket_xpos, bracket_ypos), xycoords='data',
fontsize=10, ha='center', va='bottom',
bbox=dict(boxstyle='square', fc='white', ec='white'),
# arrowprops=dict(arrowstyle='-[, widthB={}, lengthB=0.25, angleB=0'.format(bracket_width), lw=1.0)
)
axes[0].set_ylabel('t-SNE dimension 1')
axes[1].set_ylabel('t-SNE dimension 1')
axes[2].set_ylabel('t-SNE dimension 1')
axes[2].set_xlabel('t-SNE dimension 2')
for n, ax in enumerate(axes.flatten()):
ax.text(-0.1, 1.05, string.ascii_uppercase[n], weight='bold', transform=ax.transAxes, size=18)
plt.subplots_adjust(left=None, bottom=None, right=None, top=None, wspace=None, hspace=0.3)
plt.savefig(filename, dpi=300, format='pdf')
plt.show()
pass
def plot_relative_distances(representations, nhot_targets, representations_unocc, onehot_targets_unocc, filename):
classes = 10
n_occ = 2
points,time,feature,height,width = representations.shape
representations = representations.view(points,time,-1).numpy()
representations_unocc = representations_unocc.view(points,time,-1).numpy()
_,_,dim = representations.shape
_,n_targets = nhot_targets.shape
# get the centroid of the un-occluded representation for each class
# and timestep
centroid_unocc = np.zeros([classes, time, dim]) # (10,4,32)
for (i,cla) in enumerate(range(classes)):
r_sortedbyclass = np.array([p for (j,p) in enumerate(representations_unocc) if onehot_targets_unocc[j]==cla])
centroid_unocc[cla,:,:] = np.mean(r_sortedbyclass, axis=0)
# calculate the distances for the two occluder-centroids to the representation
# of the occluded digits
sim = metrics.Similarity(minimum=0.001)
distances = np.zeros([points,time,n_targets])
relative_distances = np.zeros([points,time,n_targets-1])
for ti in range(time):
for i,(a,b,c) in enumerate(nhot_targets):
distances[i,ti,0] = sim.euclidean_distance(
representations[i,ti], centroid_unocc[a,ti])
distances[i,ti,1] = sim.euclidean_distance(
representations[i,ti], centroid_unocc[b,ti])
distances[i,ti,2] = sim.euclidean_distance(
representations[i,ti], centroid_unocc[c,ti])
# calculate relative distances relative_distance = d_zur_8 / 0.5(d_zur_8 + d_zur_2)
# for ti in range(time):
# relative_distances[:,ti,0] = distances[:,ti,0] / (0.5*(distances[:,ti,0] + distances[:,ti,1]))
# relative_distances[:,ti,1] = distances[:,ti,0] / (0.5*(distances[:,ti,0] + distances[:,ti,2]))
# alternative relative distances
for ti in range(time):
relative_distances[:,ti,0] = distances[:,ti,0] / distances[:,ti,1]
relative_distances[:,ti,1] = distances[:,ti,0] / distances[:,ti,2]
# create distribution plots
# -----
fig, ax = plt.subplots()
for ti in range(time):
plot_distribution(relative_distances[:,ti,0], ax, lab='$t={}$'.format(ti))
ax.legend()
ax.set_title('relative distance target, occluder 1')
# plt.savefig('A.pdf')
plt.show()
fig, ax = plt.subplots()
for ti in range(time):
plot_distribution(relative_distances[:,ti,1], ax, lab='$t={}$'.format(ti))
ax.legend()
ax.set_title('relative distance target, occluder 2')
# plt.savefig('B.pdf')
ax.annotate('B', xy=(ax.get_xlim()[0],ax.get_ylim()[1]), xytext=np.array([ax.get_xlim()[0],ax.get_ylim()[1]])+np.array([-7,+2]), weight='bold', fontsize=24)
plt.show()
fig, ax = plt.subplots()
plot_distribution(distances[:,0,0], ax, lab='$target,t=0$')
plot_distribution(distances[:,1,0], ax, lab='$target,t=1$')
plot_distribution(distances[:,2,0], ax, lab='$target,t=2$')
plot_distribution(distances[:,3,0], ax, lab='$target,t=3$')
ax.set_title('absolute distance target to stimulus')
ax.legend()
# plt.savefig('C.pdf')
plt.show()
fig, ax = plt.subplots()
plot_distribution(distances[:,0,1], ax, lab='$occ1,t=0$')
plot_distribution(distances[:,1,1], ax, lab='$occ1,t=1$')
plot_distribution(distances[:,2,1], ax, lab='$occ1,t=2$')
plot_distribution(distances[:,3,1], ax, lab='$occ1,t=3$', xlabel='absolute distance')
ax.set_title('absolute distance occluder 1 to stimulus')
ax.legend()
# plt.savefig('C.pdf')
plt.show()
print('[INFO] distance 1')
print(relative_distances.mean(axis=0)[:,0], relative_distances.std(axis=0)[:,0])
print('[INFO] distance 2')
print(relative_distances.mean(axis=0)[:,1], relative_distances.std(axis=0)[:,1])
fig, ax = plt.subplots(figsize=(4.5, 3.4),
gridspec_kw=dict(bottom=0.15, left=0.15))
reldist_df = pd.DataFrame(
np.hstack([
np.vstack([relative_distances[:,0,0], np.repeat(0, points), np.repeat(1, points)]),
np.vstack([relative_distances[:,1,0], np.repeat(1, points), np.repeat(1, points)]),
np.vstack([relative_distances[:,2,0], np.repeat(2, points), np.repeat(1, points)]),
np.vstack([relative_distances[:,3,0], np.repeat(3, points), np.repeat(1, points)])
,
np.vstack([relative_distances[:,0,1], np.repeat(0, points), np.repeat(2, points)]),
np.vstack([relative_distances[:,1,1], np.repeat(1, points), np.repeat(2, points)]),
np.vstack([relative_distances[:,2,1], np.repeat(2, points), np.repeat(2, points)]),
np.vstack([relative_distances[:,3,1], np.repeat(3, points), np.repeat(2, points)])
]).T, columns=['data', 'timestep', 'occluder'])
with sns.axes_style("ticks"):
sns.set_context("paper", font_scale=1.0, )#rc={"lines.linewidth": 0.5})
fig, ax = plt.subplots(figsize=(4,3), gridspec_kw=dict(wspace=0.0, hspace=0.0,
top=0.90,
bottom=0.20,
left=0.20,
right=0.90))
sns.violinplot(data=reldist_df, y='data', x='timestep', palette='Greys', hue='occluder', split=True, ax=ax)
ax.axhline(y=relative_distances[:,0,0].mean(), xmin=0, xmax=5, color='black', linestyle='--')
#ax.axhline(y=0), xmin=0, xmax=5, color='black', linestyle='--')
#ax.set_title('Relative distance - unoccluded target, unoccluded occluder')
ax.set_xticklabels(['$t_0$','$t_1$','$t_2$','$t_3$'])
ax.set_ylabel('Relative distance')
ax.set_xlabel('Time step')
ax.set_ylim(0,2.75) # 3.5 for fractional metric
sns.despine(offset=10, trim=True)
ax.legend(ax.get_legend_handles_labels()[0],['$d_{rel,1}$', '$d_{rel,2}$'],frameon=True, facecolor='white', edgecolor='white', framealpha=0.0, loc='upper right')
# from statannot import add_stat_annotation
# add_stat_annotation(ax, data=reldist_df, x='timestep', y='data', hue='occluder',
# box_pairs=[
# ((0,1),(1,1)),
# ((1,1),(2,1)),
# ((2,1),(3,1)),
# ((0,2),(1,2)),
# ((1,2),(2,2)),
# ((2,2),(3,2)),
# ((0,1),(2,1)),
# ((0,1),(3,1)),
# ((1,1),(3,1)),
# ((0,2),(2,2)),
# ((0,2),(3,2)),
# ((1,2),(3,2)),
#
# ], test='Kolmogorov-Smirnov-ls', text_format='star', loc='inside', verbose=2)
ax.annotate('B', xy=(ax.get_xlim()[0],ax.get_ylim()[1]), xytext=np.array([ax.get_xlim()[0],ax.get_ylim()[1]])+np.array([-0.95,0.25]), weight='bold', fontsize=15)
plt.savefig('{}.pdf'.format(filename), dpi=300, format='pdf')
pass
def plot_softmax_output(network_output, targets, images, filename):
prop_cycle = plt.rcParams['axes.prop_cycle']
colors = prop_cycle.by_key()['color']
batchsize,time,classes = network_output.shape
softmax_output = torch.zeros([batchsize, time, classes])
for ti in range(time):
# calculate softmaxover time
softmax_output[:,ti,:] = F.softmax(network_output[:,ti,:], 1)
# find interesting samples
# -----
correct_t0 = []
for i in range(batchsize):
if (np.argmax(network_output[i, 0, :]) == int(targets[i])):
correct_t0.append(i)
correct = []
for i in range(batchsize):
if (np.argmax(network_output[i, -1, :]) == int(targets[i])):
correct.append(i)
revised = []
for i in range(batchsize):
if (np.argmax(network_output[i, 0, :]) != np.argmax(network_output[i, -1, :])) and (np.argmax(network_output[i, -1, :]) == int(targets[i])):
revised.append(i)
reinforced = []
for i in range(batchsize):
if (np.argmax(network_output[i, -2, :]) == np.argmax(network_output[i, -1, :])) and (np.argmax(network_output[i, -3, :]) == np.argmax(network_output[i, -1, :])) and (np.argmax(network_output[i, -1, :]) == int(targets[i])):
reinforced.append(i)
destroyed = []
for i in range(batchsize):
if (np.argmax(network_output[i, -1, :]) != np.argmax(network_output[i, 0, :])) and (np.argmax(network_output[i, 0, :]) == int(targets[i])):
destroyed.append(i)
print('[INFO] softmax output stats:')
print('\t correct:\t {}, percentage: {}'.format(
len(correct), len(correct)/batchsize))
print('\t revised:\t {}, of all: {}, of correct: {}'.format(
len(revised), len(revised)/batchsize, np.round(len(revised)/len(correct), 3)))
print('\t reinforced:\t {}, of all: {}, of correct: {}'.format(
len(reinforced), len(reinforced)/batchsize, np.round(len(reinforced)/len(correct), 3)))
print('\t destroyed:\t {}, of all: {}, of false: {}, of correct_t0: {}'.format(
len(destroyed), len(destroyed)/batchsize, np.round(len(destroyed)/(batchsize-len(correct)), 3), np.round(len(destroyed)/(len(correct_t0)), 3)))
# look at interesting cases
# -----
for j in revised[30:30]:#range(55,60,1):
fig, ax = plt.subplots()
for ti in range(time):
ax.plot(softmax_output[j,ti,:], label='$t_{}$'.format(ti))
ax.set_yscale('log')
ax.set_ylim([1e-8,3])
# Shrink current axis by 20%
box = ax.get_position()
ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
# Put a legend to the right of the current axis
ax.legend(frameon=True, facecolor='white', edgecolor='white', framealpha=1.0, bbox_to_anchor=(1, .5), loc='center left')
ab = offsetbox.AnnotationBbox(makeMarker(images[j,0,0], zoom=2.0), (.835, .775), xycoords='figure fraction', boxcoords="offset points",
pad=0.3, frameon=True)
ax.axvline(x=targets[j], ymin=0, ymax=2, color='black', linestyle='--')
ax.add_artist(ab)
ax.set_ylabel('Softmax output (probability)')
ax.set_xlabel('Class')
print('showing image no.', j)
ax.text(-0.1, 1.01, '{}'.format(j), weight='bold',
transform=ax.transAxes, size=40)
plt.xticks(np.arange(0, classes, step=classes//10))
plt.show()
# calculate mean output
# -----
pointsofclass = {}
for (i, cla) in enumerate(range(classes)):
pointsofclass[i] = [p for (j, p) in enumerate(
softmax_output.numpy()) if int(targets[j]) == cla]
mean_output = np.zeros([classes, time, classes])
error_output = np.zeros([classes, time, classes])
for cla in range(classes):
try:
mean_output[cla, :, :] = np.array(pointsofclass[cla]).mean(axis=0)
error_output[cla, :, :] = np.array(pointsofclass[cla]).std(
axis=0) / np.sqrt(len(pointsofclass[cla]))
except:
print('error, class {} not found'.format(cla))
mean_output[cla, :, :] = 0
error_output[cla, :, :] = 0
lstylist = ['-', '--', ':', '-.']
markerlist = ['o', 'v', 's', 'D']
fillstylelist = ['full', 'full', 'full', 'full']
fig, axes = plt.subplots(2, 5, sharex=True, sharey=True, figsize=(9, 3.5),
gridspec_kw=dict(bottom=0.15,))
# wspace=0.0, hspace=0.3,
# top=0.90,
# bottom=0.075,
# left=0.05,
# right=0.95),))
for j, ax in enumerate(axes.flatten()):
for ti in range(time):
ax.plot(mean_output[j, ti, :], label='$t_{}$'.format(ti), marker=markerlist[ti], markersize=3)
ax.fill_between(np.arange(
0, classes, 1), mean_output[j, ti, :] + error_output[j, ti, :], mean_output[j, ti, :] - error_output[j, ti, :], alpha=0.25)
# logscale!
ax.set_yscale('log')
ax.set_ylim([1e-4, 1])
ax.axvline(x=int(j),
ymin=0, ymax=2, color='black', linestyle='--')
# ax.add_artist(ab)
# add titles with targets
if j > 4:
t_x, t_y = 0.025, 0.
else:
t_x, t_y = 0.875, 0.
ax.text(
# position text relative to Figure
t_x, t_y, '{}'.format(j),
ha='left', va='bottom',
transform=ax.transAxes, fontsize=14, color=sns.color_palette("colorblind", 10)[j])
#ax.add_patch(patches.Circle((1,1),1), ha='left', va='top', transform=ax.transAxes)
axes[0, 4].legend(frameon=True, facecolor='white', edgecolor='white',
framealpha=1.0, bbox_to_anchor=(1.0, .72), loc='center left', title='time step')
axes[0, 0].set_ylabel('Softmax output')
axes[1, 0].set_ylabel('Softmax output')
for i in range(5):
axes[1, i].set_xlabel('Class')
plt.xticks(np.arange(0, classes, step=classes//10))
#plt.suptitle('Mean softmax output over candidates for each target')
#plt.savefig('mean_softmax.pdf')
axes[0,0].annotate('B', xy=(axes[0,0].get_xlim()[0],axes[0,0].get_ylim()[1]), xytext=np.array([axes[0,0].get_xlim()[0],axes[0,0].get_ylim()[1]])+np.array([-9,+2]), weight='bold', fontsize=16)
plt.savefig('{}B.pdf'.format(filename), dpi=300, format='pdf')
#plt.show()
# fig, axes = plt.subplots(4, 5, sharex=True, sharey=True, figsize=(14, 10))
# for j, ax in enumerate(axes.flatten()):
# j += 150
# for ti in range(time):
# ax.plot(softmax_output[j, ti, :], label='$t_{}$'.format(ti), color=colors[ti], marker=markerlist[ti], markersize=3)
#
# ax.set_yscale('log')
# ax.set_ylim([1e-9, 5])
# ab = offsetbox.AnnotationBbox(makeMarker(images[j,0,0], zoom=1), (.9, .1), xycoords='axes fraction', boxcoords="offset points",
# pad=0.3, frameon=True)
# ax.axvline(x=targets[j],
# ymin=0, ymax=2, color='black', linestyle='--')
# ax.add_artist(ab)
# ax.set_ylabel('Softmax output')
# ax.set_xlabel('Class')
#
# axes[0, 4].legend(frameon=True, facecolor='white', edgecolor='white',
# framealpha=1.0, bbox_to_anchor=(1.0, .8), loc='center left')
#
# plt.xticks(np.arange(0, classes, step=classes//10))
# plt.suptitle('Softmax output for specific candidates')
# #plt.savefig('specific_candidates.pdf')
# plt.show()
# lstylist = ['-', '--', ':', '-.']
# markerlist = ['o', 'v', 's', 'D']
# fillstylelist = ['full', 'full', 'full', 'full']
# candlist = [87, 128, 206]
# meanlist = [2, 3, 4]
# fig, axes = plt.subplots(2, 3, sharex=False, sharey='row', figsize=(12, 8))
#
# for j, ax in zip(candlist, axes[0]):
# for ti in range(time):
# ax.plot(softmax_output[j, ti, :], label='$t_{}$'.format(
# ti), linewidth=3, marker=markerlist[ti], markersize=7.0, fillstyle=fillstylelist[ti], color=colors[ti])
#
# ax.set_yscale('log')
# ax.set_ylim([1e-10, 5])
# ab = offsetbox.AnnotationBbox(makeMarker(images[j,0,0], zoom=1.6), (.805, .16), xycoords='axes fraction', boxcoords="offset points",
# pad=0.3, frameon=True)
# ax.axvline(x=targets[j], ymin=0,
# ymax=2, color='black', linestyle='--', linewidth=2)
# ax.add_artist(ab)
# #ax.set_ylabel('Softmax output')
# ax.set_xticks(np.arange(0, classes, step=classes//10))
# # ax.set_xlabel('Class')
# for j, ax in zip(meanlist, axes[1]):
# for ti in range(time):
# #ax.plot(mean_output[j,t,:], label='$t_{}$'.format(t), linewidth=3, marker=markerlist[t], markersize=7.0, fillstyle=fillstylelist[t], color=colors[t])
# # ax.bar(np.arange(0,10,1),output_data[0,i,j,:],label='$t_{}$'.format(i))
#
# # fill between error
# #ax.fill_between(np.arange(0,10,1), mean_output[j,t,:]+error_output[j,t,:], mean_output[j,t,:]-error_output[j,t,:], alpha=0.25)
# # small outline for errors
# # ax.plot(mean_output[j,t,:]+error_output[j,t,:], linewidth=1, color=colors[t]) #, linestyle=lstylist[t])
# # ax.plot(mean_output[j,t,:]-error_output[j,t,:], linewidth=1, color=colors[t]) #, linestyle=lstylist[t])
# ax.errorbar(np.arange(0, classes, 1), mean_output[j, ti, :], label='$t_{}$'.format(
# ti), linewidth=3, marker=markerlist[ti], markersize=7.0, fillstyle=fillstylelist[ti], color=colors[ti], yerr=error_output[j, ti, :])
#
# # switch on or off logscale
# ax.set_yscale('log')
# ax.set_ylim([1e-4, 3])
#
#
# ax.axvline(x=int(j), ymin=0, ymax=2, color='black',
# linestyle='--', linewidth=2)
# # ax.add_artist(ab)
# #ax.set_ylabel('Softmax output')
# ax.set_xlabel('Class label')
# ax.set_xticks(np.arange(0, classes, step=classes//10))
#
# axes[0, 2].legend(frameon=True, facecolor='white', edgecolor='white', framealpha=1.0,
# bbox_to_anchor=(1., .725), loc='center left', title='time step')
# #axes[0,1].set_title('Specific candidates')
# #axes[1,1].set_title('Mean softmax output per class (2,4,8)')
# axes[0, 0].set_ylabel('Softmax output')
# axes[1, 0].set_ylabel('Softmax output')
#
# axes[0, 0].text(-0.57, 1.03, 'A', weight='bold',
# transform=axes[0, 0].transAxes, size=40)
# axes[1, 0].text(-0.57, 1.03, 'B', weight='bold',
# transform=axes[1, 0].transAxes, size=40)
#
# plt.subplots_adjust(left=None, bottom=None, right=0.88,
# top=0.935, wspace=None, hspace=None)
# # plt.savefig('os_softmax33avg.ps')
# # plt.savefig('os_softmax33avg.pdf')
# plt.show()
#candlist = [87, 128, 206, 24, 33]
candlist = [55, 49, 330, 313, 342]
#candlist = [342, 340, 339, 330, 313, 285, 264,]
fig, axes = plt.subplots(1, 5, sharex=False, sharey='row', figsize=(9, 2))
for j, ax in zip(candlist, axes):
for ti in range(time):
ax.plot(softmax_output[j, ti, :], label='$t_{}$'.format(
ti), marker=markerlist[ti], markersize=3.0, fillstyle=fillstylelist[ti], color=colors[ti])
ax.set_yscale('log')
ax.set_ylim([1e-10, 5])
ab = offsetbox.AnnotationBbox(makeMarker(images[j,0,0], zoom=0.60), (.85, .15), xycoords='axes fraction', boxcoords="offset points",
pad=0.3, frameon=True)
ax.axvline(x=targets[j], ymin=0,
ymax=2, color='black', linestyle='--')
ax.add_artist(ab)
ax.set_xticks(np.arange(0, classes, step=classes//10))
ax.set_xlabel('Class')
# add titles with targets
t_x, t_y = 0.025, 0.
ax.text(
# position text relative to Figure
t_x, t_y, '{}'.format(targets[j]),
ha='left', va='bottom',
transform=ax.transAxes, fontsize=14, color=sns.color_palette("colorblind", 10)[targets[j]])
axes[4].legend(frameon=True, facecolor='white', edgecolor='white', framealpha=1.0,
bbox_to_anchor=(1., .725), loc='center left', title='time step')
axes[0].set_ylabel('Softmax output')
axes[0].annotate('A', xy=(axes[0].get_xlim()[0],axes[0].get_ylim()[1]), xytext=np.array([axes[0].get_xlim()[0],axes[0].get_ylim()[1]])+np.array([-9,+2]), weight='bold', fontsize=16)
plt.subplots_adjust(left=None, bottom=0.25, right=None,
top=0.85, wspace=None, hspace=None)
plt.savefig('{}A.pdf'.format(filename), dpi=300, format='pdf')
#plt.show()
pass
# ---------------------
# image transformations
# ---------------------
def normalize(x, inp_max=1, inp_min=-1):
"""
normalize takes and input numpy array x and optionally a minimum and
maximum of the output. The function returns a numpy array of the same
shape normalized to values beween inp_max and inp_min.
"""
normalized_digit = (inp_max - inp_min) * (x - x.min()
) / (x.max() - x.min()) + inp_min
return normalized_digit
class MidPointNorm(mpl.colors.Normalize):
"""
MidPointNorm inherits from Normalize. It is a class useful for
visualizations with a bidirectional color-scheme. It chooses
the middle of the colorbar to be in the middle of the data distribution.
"""
def __init__(self, midpoint=0, vmin=None, vmax=None, clip=False):
mpl.colors.Normalize.__init__(self, vmin, vmax, clip)
self.midpoint = midpoint
def __call__(self, value, clip=None):
if clip is None:
clip = self.clip
result, is_scalar = self.process_value(value)
self.autoscale_None(result)
vmin, vmax, midpoint = self.vmin, self.vmax, self.midpoint
if not (vmin < midpoint < vmax):
raise ValueError("midpoint must be between maxvalue and minvalue.")
elif vmin == vmax:
result.fill(0) # Or should it be all masked? Or 0.5?
elif vmin > vmax:
raise ValueError("maxvalue must be bigger than minvalue")
else:
vmin = float(vmin)
vmax = float(vmax)
if clip:
mask = np.ma.getmask(result)
result = np.ma.array(np.clip(result.filled(vmax), vmin, vmax),
mask=mask)
# ma division is very slow; we can take a shortcut
resdat = result.data
# First scale to -1 to 1 range, than to from 0 to 1.
resdat -= midpoint
resdat[resdat > 0] /= abs(vmax - midpoint)
resdat[resdat < 0] /= abs(vmin - midpoint)
resdat /= 2.
resdat += 0.5
result = np.ma.array(resdat, mask=result.mask, copy=False)
if is_scalar:
result = result[0]
return result
def inverse(self, value):
if not self.scaled():
raise ValueError("Not invertible until scaled")
vmin, vmax, midpoint = self.vmin, self.vmax, self.midpoint
if mpl.cbook.iterable(value):
val = np.ma.asarray(value)
val = 2 * (val - 0.5)
val[val > 0] *= abs(vmax - midpoint)
val[val < 0] *= abs(vmin - midpoint)
val += midpoint
return val
else:
val = 2 * (val - 0.5)
if val < 0:
return val * abs(vmin - midpoint) + midpoint
else:
return val * abs(vmax - midpoint) + midpoint
# -----------------------
# activations and filters
# -----------------------
# TODO add functions to visualize activations and filters
# -----------------------------
# sprite images for tensorboard
# -----------------------------
def create_sprite_image(images):
"""
create_sprite_image returns a sprite image consisting of images passed as
argument. Images should be count x width x height
"""
if isinstance(images, list):
images = np.array(images)
img_h = images.shape[1]
img_w = images.shape[2]
# get image channels
if len(images.shape) > 3:
channels = images.shape[3]
else:
channels = 1
n_plots = int(np.ceil(np.sqrt(images.shape[0])))
spriteimage = np.zeros((img_h * n_plots, img_w * n_plots, channels))
for i in range(n_plots):
for j in range(n_plots):
this_filter = i * n_plots + j
if this_filter < images.shape[0]:
this_img = images[this_filter]
spriteimage[i * img_h:(i + 1) * img_h,
j * img_w:(j + 1) * img_w] = this_img
# built in support for stereoscopic images
if (channels == 2) or (channels == 6):
_, spriteimage = anaglyph(
spriteimage[:, :, :channels // 2],
spriteimage[:, :, channels // 2:])
return spriteimage
def save_sprite_image(savedir, raw_images):
sprite_image = create_sprite_image(raw_images)
if sprite_image.shape[2] == 1:
plt.imsave(savedir, sprite_image[:, :, 0], cmap='gray_r')
else:
plt.imsave(savedir, sprite_image.astype(np.uint8))
# -----------------
# tensorboard specific
# -----------------
def add_pr_curve_tensorboard(class_enc, class_index, test_probs, test_preds, writer, global_step=0):
'''
Takes in a "class_index" from 0 to 9 and plots the corresponding
precision-recall curve
'''
tensorboard_preds = test_preds == class_index
tensorboard_probs = test_probs[:, class_index]
writer.add_pr_curve(class_enc[class_index],
tensorboard_preds,
tensorboard_probs,
global_step=global_step)
writer.close()
# _____________________________________________________________________________
# -----------------
# top-level comment
# -----------------
# medium level comment
# -----
# low level comment
<file_sep>#!/usr/bin/python
#
# Project Titan
# _____________________________________________________________________________
#
# _.oo.
# May 2020 _.u[[/;:,. .odMMMMMM'
# .o888UU[[[/;:-. .o@P^ MMM^
# engine.py oN88888UU[[[/;::-. dP^
# The main file including dNMMNN888UU[[[/;:--. .o@P^
# the training loop ,MMMMMMN888UU[[/;::-. o@^
# NNMMMNN888UU[[[/~.o@P^
# <NAME> 888888888UU[[[/o@^-..
# oI8888UU[[[/o@P^:--..
# .@^ YUU[[[/o@^;::---..
# oMP ^/o@P^;:::---..
# .dMMM .o@^ ^;::---...
# dMMMMMMM@^` `^^^^
# YMMMUP^
# ^^
# _____________________________________________________________________________
#
#
# Copyright 2021 <NAME>
#
# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# _____________________________________________________________________________
# ----------------
# import libraries
# ----------------
# standard libraries
# -----
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch import optim
# for MNIST
from torchvision import utils, datasets
from torch.utils.data import Dataset, DataLoader, Subset
from torch.utils.tensorboard import SummaryWriter
import torchvision
import torchvision.transforms as transforms
import argparse
import os
import sys
import random
import time
import math
# custom functions
# -----
import utilities.afterburner as afterburner
import utilities.helper as helper
import utilities.visualizer as visualizer
from utilities.networks.buildingblocks.rcnn import RecConvNet, CAM, GLM
from utilities.dataset_handler import StereoImageFolderLMDB, StereoImageFolder, AffineTransform
def asMinutes(s):
m = math.floor(s / 60)
s -= m * 60
return '%d %ds' % (m, s)
def timeSince(since, percent):
now = time.time()
s = now - since
es = s / (percent)
rs = es - s
return '%s ( - %s)' % (asMinutes(s), asMinutes(rs))
# cross-platform development
from platform import system
IS_MACOSX = True if system() == 'Darwin' else False
PWD_STEM = "/Users/markus/Research/Code/" if IS_MACOSX else "/home/mernst/git/"
# commandline arguments
# -----
# FLAGS
parser = argparse.ArgumentParser()
parser.add_argument(
"-t",
"--testrun",
# type=bool,
default=False,
dest='testrun',
action='store_true',
help='reduced dataset configuration on local machine for testing')
parser.add_argument(
"-c",
"--config_file",
type=str,
default=PWD_STEM +
'titan/experiments/001_noname_experiment/' +
'files/config_files/config0.csv',
help='path to the configuration file of the experiment')
parser.add_argument(
"-n",
"--name",
type=str,
default='i1',
help='name of the run, i.e. iteration1')
parser.add_argument(
"-r",
"--restore_ckpt",
type=bool,
default=True,
help='restore model from last checkpoint')
FLAGS = parser.parse_args()
CONFIG = helper.infer_additional_parameters(
helper.read_config_file(FLAGS.config_file)
)
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
# -----------------
# Functions for Checkpointing, Training, Testing and Evaluation
# -----------------
def checkpoint(epoch, model, optimizer, ckpt_dir, save_every, remove_last=True):
# write dict with model and optimizer parameters
state = {
'epoch': epoch + 1, 'state_dict': model.state_dict(),
'optimizer': optimizer.state_dict(),
}
ckpt_out_path = ckpt_dir + "checkpoint_epoch_{}.pt".format(epoch)
#torch.save(model.state_dict(), model_out_path)
torch.save(state, ckpt_out_path)
#print("[INFO] Checkpoint saved to {}".format(ckpt_out_path, end='\n'))
#print("[INFO] Checkpoint saved.".format(ckpt_out_path, end='\n'))
if (epoch > 0 and remove_last):
try:
os.remove(ckpt_dir +
"checkpoint_epoch_{}.pt".format(epoch - save_every))
except(FileNotFoundError):
print('[INFO] ' +
"Old checkpoint_epoch_{}.pt could not be found/deleted".format(epoch - save_every))
def load_checkpoint(model, optimizer, ckpt_dir):
# Note: Input model & optimizer should be pre-defined. This routine only updates their states.
start_epoch = 0
def sort_key(string):
return int(string.split('.')[0].split('_')[-1])
list_of_ckpts = [f for f in os.listdir(ckpt_dir) if '.pt' in f]
list_of_ckpts.sort(key=sort_key, reverse=True)
if len(list_of_ckpts) > 0:
final_checkpoint = list_of_ckpts[0]
checkpoint = torch.load(os.path.join(ckpt_dir, final_checkpoint), map_location=device)
start_epoch = checkpoint['epoch']
model.load_state_dict(checkpoint['state_dict'])
optimizer.load_state_dict(checkpoint['optimizer'])
print("[INFO] Loaded checkpoint '{}' (continue: epoch {})"
.format(final_checkpoint, checkpoint['epoch']))
else:
#print("[INFO] No checkpoint found at '{}'".format(ckpt_dir))
print("[INFO] No checkpoint found, starting from scratch")
return model, optimizer, start_epoch
def train_recurrent(input_tensor, target_tensor, network, optimizer, criterion, timesteps, stereo):
optimizer.zero_grad()
if stereo:
input_tensor = torch.cat(input_tensor, dim=1)
input_tensor, target_tensor = input_tensor.to(device), target_tensor.to(device)
loss = 0
input_tensor = input_tensor.unsqueeze(1)
input_tensor = input_tensor.repeat(1, timesteps, 1, 1, 1)
outputs, _ = network(input_tensor)
for t in range(outputs.shape[1]):
loss += criterion(outputs[:,t,:], target_tensor)
topv, topi = outputs[:,t,:].topk(1)
accuracy = (topi == target_tensor.unsqueeze(1)).sum(
dim=0, dtype=torch.float64) / topi.shape[0]
loss.backward()
optimizer.step()
loss = loss / topi.shape[0] # average loss per item
return loss.item(), accuracy.item()
def test_recurrent(test_loader, network, criterion, epoch, timesteps, stereo):
loss = 0
accuracy = 0
confusion_matrix = visualizer.ConfusionMatrix(n_cls=network.fc.out_features)
precision_recall = visualizer.PrecisionRecall(n_cls=network.fc.out_features)
with torch.no_grad():
for i, data in enumerate(test_loader):
input_tensor, target_tensor = data
if stereo:
input_tensor = torch.cat(input_tensor, dim=1)
input_tensor, target_tensor = input_tensor.to(device), target_tensor.to(device)
input_tensor = input_tensor.unsqueeze(1)
input_tensor = input_tensor.repeat(1, timesteps, 1, 1, 1)
outputs, _ = network(input_tensor)
topv, topi = outputs[:,-1,:].topk(1)
# other timesteps?
for t in range(outputs.shape[1]):
loss += criterion(outputs[:,t,:], target_tensor) / topi.shape[0]
accuracy += (topi == target_tensor.unsqueeze(1)).sum(
dim=0, dtype=torch.float64) / topi.shape[0]
# update confusion matrix
confusion_matrix.update(outputs[:,-1,:].cpu(), target_tensor.cpu())
# update pr curves
precision_recall.update(outputs[:,-1,:].cpu(), target_tensor.cpu())
visual_prediction = visualizer.plot_classes_preds(outputs[:,-1,:].cpu(), input_tensor[:,-1,:,:,:].cpu(), target_tensor.cpu(), CONFIG['class_encoding'])
#visual_prediction = None
print(" " * 80 + "\r" + '[Testing:] E%d: %.4f %.4f' % (epoch,
loss /(i+1), accuracy/(i+1)), end="\n")
return loss /(i+1), accuracy/(i+1), confusion_matrix, precision_recall, visual_prediction
def evaluate_recurrent(dataset, network, batch_size, criterion, timesteps, stereo, projector=False):
# create a random but deterministic order for the dataset (important for bc)
torch.manual_seed(1234)
shuffled_dataset = Subset(dataset, torch.randperm(len(dataset)).tolist())
eval_loader = DataLoader(shuffled_dataset, batch_size=batch_size, num_workers=4, shuffle=False)
def show(img):
import matplotlib.pyplot as plt
npimg = img.numpy()
plt.imshow(np.transpose(npimg, (1,2,0)), interpolation='nearest')
plt.show()
loss = 0
accuracy = 0
list_of_output_tensors = []
list_of_bc_values = []
with torch.no_grad():
for i, data in enumerate(eval_loader):
input_tensor, target_tensor = data
if stereo:
input_tensor = torch.cat(input_tensor, dim=1)
input_tensor, target_tensor = input_tensor.to(device), target_tensor.to(device)
# Show a grid of images
# show(torchvision.utils.make_grid(input_tensor, padding=8))
input_tensor = input_tensor.unsqueeze(1)
input_tensor = input_tensor.repeat(1, timesteps, 1, 1, 1)
outputs, _ = network(input_tensor)
topv, topi = outputs[:,-1,:].topk(1)
# other timesteps?
for t in range(outputs.shape[1]):
loss += criterion(outputs[:,t,:], target_tensor) / topi.shape[0]
accuracy += (topi == target_tensor.unsqueeze(1)).sum(
dim=0, dtype=torch.float64) / topi.shape[0]
list_of_bc_values.append(torch.eq(
torch.argmax(outputs[:,-1,:], 1),
target_tensor))
list_of_output_tensors.append(F.softmax(outputs, dim=-1))
bc_values = torch.cat(list_of_bc_values, 0).type(torch.int8)
output_values = torch.cat(list_of_output_tensors, 0)
print(" " * 80 + "\r" + '[Evaluation:] E%d: %.4f %.4f' % (-1,
loss /(i+1), accuracy/(i+1)), end="\n")
evaluation_data = \
{'boolean_classification': np.array(bc_values.cpu()),
'softmax_output': np.array(output_values.cpu())}
embedding_data = None # TODO implement at some point
return evaluation_data, embedding_data
def trainEpochs(train_loader, test_loader, network, optimizer, criterion, writer, start_epoch, n_epochs, test_every, print_every, log_every, save_every, learning_rate, lr_decay, lr_cosine, lr_decay_rate, lr_decay_epochs, output_dir, checkpoint_dir):
plot_losses = []
print_loss_total = 0
print_accuracy_total = 0
plot_loss_total = 0
plot_accuracy_total = 0
len_of_data = len(train_loader)
for epoch in range(start_epoch, n_epochs):
if epoch % test_every == 0:
test_loss, test_accurary, cm, pr, vp = test_recurrent(test_loader, network, criterion, epoch, CONFIG['time_depth'] + 1 + CONFIG['time_depth_beyond'], CONFIG['stereo'])
writer.add_scalar('testing/loss', test_loss,
epoch * len_of_data)
writer.add_scalar(
'testing/accuracy', test_accurary, epoch * len_of_data)
network.log_stats(writer, epoch * len_of_data)
cm.to_tensorboard(writer, CONFIG['class_encoding'], epoch)
#cm.print_misclassified_objects(CONFIG['class_encoding'], 5)
pr.to_tensorboard(writer, CONFIG['class_encoding'], epoch)
writer.add_figure('predictions vs. actuals', vp, epoch)
writer.close()
start = time.time()
if lr_decay:
helper.adjust_learning_rate(
learning_rate,
lr_cosine,
lr_decay_rate,
n_epochs,
lr_decay_epochs,
optimizer,
epoch)
for i_batch, sample_batched in enumerate(train_loader):
loss, accuracy = train_recurrent(
sample_batched[0], sample_batched[1],
network, optimizer, criterion, CONFIG['time_depth'] + 1, CONFIG['stereo'])
print_loss_total += loss
plot_loss_total += loss
print_accuracy_total += accuracy
plot_accuracy_total += accuracy
if (epoch * len_of_data + i_batch) % print_every == 0:
divisor = 1 if (epoch * len_of_data + i_batch) // print_every == 0 else print_every
print_loss_avg = print_loss_total / divisor
print_loss_total = 0
print_accuracy_avg = print_accuracy_total / divisor
print_accuracy_total = 0
print(" " * 80 + "\r" +
'[Training:] E%d: %s (%d %d%%) %.4f %.4f'
% (epoch, timeSince(start, (i_batch + 1) / len_of_data),
i_batch, (i_batch + 1) / len_of_data * 100,
print_loss_avg, print_accuracy_avg), end="\r")
if (epoch * len_of_data + i_batch) % log_every == 0:
divisor = 1 if (epoch * len_of_data + i_batch) // log_every == 0 else log_every
plot_loss_avg = plot_loss_total / divisor
plot_loss_total = 0
plot_accuracy_avg = plot_accuracy_total / divisor
plot_accuracy_total = 0
plot_losses.append(plot_loss_avg)
writer.add_scalar(
'training/loss', plot_loss_avg,
epoch * len_of_data + i_batch)
writer.add_scalar(
'training/accuracy', plot_accuracy_avg, epoch * len_of_data + i_batch)
writer.close()
if epoch % save_every == 0:
checkpoint(epoch, network, optimizer, checkpoint_dir + 'network', save_every)
if start_epoch < n_epochs:
# final test after training; do not test if restarting from the same epoch
test_loss, test_accurary, cm, pr, vp = test_recurrent(test_loader, network, criterion, n_epochs, CONFIG['time_depth'] + 1 + CONFIG['time_depth_beyond'], CONFIG['stereo'])
writer.add_scalar('testing/loss', test_loss,
n_epochs * len_of_data)
writer.add_scalar(
'testing/accuracy', test_accurary, n_epochs * len_of_data)
network.log_stats(writer, n_epochs * len_of_data)
cm.to_tensorboard(writer, CONFIG['class_encoding'], n_epochs)
cm.print_misclassified_objects(CONFIG['class_encoding'], 5)
pr.to_tensorboard(writer, CONFIG['class_encoding'], n_epochs)
writer.add_figure('predictions vs. actuals', vp, n_epochs)
writer.close()
checkpoint(n_epochs, network, optimizer, checkpoint_dir + 'network', save_every)
# -----------------
# Main Program
# -----------------
if __name__ == '__main__':
# input transformation
if CONFIG['color'] == 'grayscale':
train_transform = transforms.Compose([
transforms.Grayscale(),
transforms.ToTensor(),
transforms.Normalize((0.,), (1.,))
])
test_transform = train_transform
else:
train_transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.,), (1.,))
])
test_transform = train_transform
# input dataset
if CONFIG['dataset'] == 'mnist':
train_dataset = datasets.MNIST(root=CONFIG['input_dir'], train=True,
transform=transforms.Compose([
#transforms.CenterCrop(32),
transforms.ToTensor(),
transforms.Normalize((0.,), (1.,))
,]),
download=True)
test_dataset = datasets.MNIST(root=CONFIG['input_dir'], train=False,
transform=transforms.Compose([
#transforms.CenterCrop(32),
transforms.ToTensor(),
transforms.Normalize((0.,), (1.,))
,]),
download=True)
elif 'osycb' in CONFIG['dataset']:
print('[INFO] No LMDB-file available, using standard folder instead')
if CONFIG['occlusion_percentage'] == 0:
train_dataset = StereoImageFolder(
root_dir=CONFIG['input_dir'] + '/{}/{}'.format(CONFIG['dataset'], 20),
train=True,
stereo=CONFIG['stereo'],
transform=train_transform
)
test_dataset = StereoImageFolder(
root_dir=CONFIG['input_dir'] + '/{}/{}'.format(CONFIG['dataset'], 20),
train=False,
stereo=CONFIG['stereo'],
transform=test_transform
)
for percentage in [40,60,80]:
train_dataset._add_data(CONFIG['input_dir'] + '/{}/{}'.format(CONFIG['dataset'], percentage))
test_dataset._add_data(CONFIG['input_dir'] + '/{}/{}'.format(CONFIG['dataset'], percentage))
else:
train_dataset = StereoImageFolder(
root_dir=CONFIG['input_dir'] + '/{}/{}'.format(CONFIG['dataset'], CONFIG['occlusion_percentage']),
train=True,
stereo=CONFIG['stereo'],
transform=train_transform
)
test_dataset = StereoImageFolder(
root_dir=CONFIG['input_dir'] + '/{}/{}'.format(CONFIG['dataset'], CONFIG['occlusion_percentage']),
train=False,
stereo=CONFIG['stereo'],
transform=test_transform
)
else:
# Datasets LMDB Style
try:
train_dataset = StereoImageFolderLMDB(
db_path=CONFIG['input_dir'] + '/{}/{}_train.lmdb'.format(CONFIG['dataset'], CONFIG['dataset']),
stereo=CONFIG['stereo'],
transform=train_transform
)
test_dataset = StereoImageFolderLMDB(
db_path=CONFIG['input_dir'] + '/{}/{}_test.lmdb'.format(CONFIG['dataset'], CONFIG['dataset']),
stereo=CONFIG['stereo'],
transform=test_transform
)
except:
print('[INFO] No LMDB-file available, using standard folder instead')
# Datasets direct import
train_dataset = StereoImageFolder(
root_dir=CONFIG['input_dir'] + '/{}'.format(CONFIG['dataset']),
train=True,
stereo=CONFIG['stereo'],
transform=train_transform
)
test_dataset = StereoImageFolder(
root_dir=CONFIG['input_dir'] + '/{}'.format(CONFIG['dataset']),
train=False,
stereo=CONFIG['stereo'],
transform=test_transform
)
train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=CONFIG['batchsize'], shuffle=True, num_workers=4)
test_loader = torch.utils.data.DataLoader(test_dataset, batch_size=CONFIG['batchsize'], shuffle=True, num_workers=4)
output_dir, checkpoint_dir = helper.get_output_directory(CONFIG, FLAGS)
stats_writer = SummaryWriter(output_dir)
# configure network
if CONFIG['connectivity'] == 'GLM':
network = GLM(
image_size=CONFIG['image_height']*CONFIG['image_width'], input_channels=CONFIG['image_channels'], num_targets=CONFIG['classes']).to(device)
else:
network = RecConvNet(
CONFIG['connectivity'],
kernel_size=CONFIG['kernel_size'],
input_channels=CONFIG['image_channels'],
n_features=CONFIG['n_features'],
num_layers=CONFIG['network_depth'],
num_targets=CONFIG['classes']
).to(device)
criterion = nn.CrossEntropyLoss().to(device)
optimizer = optim.Adam(network.parameters(), lr=CONFIG['learning_rate'], weight_decay=CONFIG['l2_lambda'])
if FLAGS.restore_ckpt:
network, optimizer, start_epoch = load_checkpoint(network, optimizer, checkpoint_dir)
else:
start_epoch = 0
# -----------------
# sketch pad for evaluation
# -----------------
if FLAGS.testrun:
# load pretrained network
network, optimizer, start_epoch = load_checkpoint(network, optimizer, '/Users/markus/Research/Code/titan/trained_models/BLT3_osmnist2r_stereo/')
# look at test-error
# -----
# test_loss, test_accurary, cm, pr, vp = test_recurrent(test_loader, network, criterion, CONFIG['epochs'], CONFIG['time_depth'] + 1 + CONFIG['time_depth_beyond'], CONFIG['stereo'])
# load library for analysis
import utilities.publisher as publisher
SAMPLE_SIZE = 10000 # default is 10000
# visualize filters
# -----
#publisher.first_layer_network_filters(network, test_transform, CONFIG, sample_size=SAMPLE_SIZE, random_seed=1234)
# softmax and tsne analysis
# -----
#publisher.fig_softmax_and_tsne(network, test_transform, CONFIG, sample_size=SAMPLE_SIZE, random_seed=1234)
# analyze pixelwise concentration
# -----
#publisher.fig_concentration(network, test_transform, CONFIG, sample_size=SAMPLE_SIZE, random_seed=1234)
# class activation map analysis
# -----
publisher.fig_cam(network, test_transform, CONFIG, sample_size=SAMPLE_SIZE, random_seed=1234)
sys.exit()
# training loop
trainEpochs(
train_loader, test_loader, network, optimizer, criterion,
writer=stats_writer,
start_epoch=start_epoch,
n_epochs=CONFIG['epochs'],
test_every=CONFIG['test_every'],
print_every=CONFIG['write_every'],
log_every=CONFIG['write_every'],
save_every=CONFIG['test_every'],
learning_rate=CONFIG['learning_rate'],
lr_decay=CONFIG['lr_decay'],
lr_cosine=CONFIG['lr_cosine'],
lr_decay_rate=CONFIG['lr_decay_rate'],
lr_decay_epochs=CONFIG['lr_decay_epochs'],
output_dir=output_dir,
checkpoint_dir=checkpoint_dir
)
# evaluation and afterburner
# -----
evaluation_data, embedding_data = evaluate_recurrent(test_dataset, network, CONFIG['batchsize'], criterion, CONFIG['time_depth'] + 1, CONFIG['stereo'])
essence = afterburner.DataEssence()
essence.distill(path=output_dir, evaluation_data=evaluation_data,
embedding_data=None) # embedding_data (save space)
essence.write_to_file(filename=CONFIG['output_dir'] +
FLAGS.config_file.split('/')[-1].split('.')[0] +
'{}'.format(FLAGS.name) + '.pkl')
essence.plot_essentials(CONFIG['output_dir'].rsplit('/', 2)[0] +
'/visualization/' +
FLAGS.config_file.split('/')[-1].split('.')[0] +
'{}'.format(FLAGS.name) + '.pdf')
# _____________________________________________________________________________
# -----------------
# top-level comment
# -----------------
# medium level comment
# -----
# low level comment
<file_sep>#!/usr/bin/python
#
# Project Titan
# _____________________________________________________________________________
#
# _.oo.
# April 2020 _.u[[/;:,. .odMMMMMM'
# .o888UU[[[/;:-. .o@P^ MMM^
# config.py oN88888UU[[[/;::-. dP^
# set and get experiment parameters dNMMNN888UU[[[/;:--. .o@P^
# ,MMMMMMN888UU[[/;::-. o@^
# NNMMMNN888UU[[[/~.o@P^
# <NAME> 888888888UU[[[/o@^-..
# oI8888UU[[[/o@P^:--..
# .@^ YUU[[[/o@^;::---..
# oMP ^/o@P^;:::---..
# .dMMM .o@^ ^;::---...
# dMMMMMMM@^` `^^^^
# YMMMUP^
# ^^
# _____________________________________________________________________________
#
#
# Copyright 2021 <NAME>
#
# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# _____________________________________________________________________________
# ----------------
# import libraries
# ----------------
# standard libraries
# -----
import os
import numpy as np
# custom functions
# -----
from platform import system
IS_MACOSX = True if system() == 'Darwin' else False
PWD_STEM = "/Users/markus/Research/Code/" if IS_MACOSX else "/home/mernst/git/"
# --------------------------
# main experiment parameters
# --------------------------
def get_par():
"""
Get main parameters.
For each experiment, change these parameters manually for different
experiments.
"""
par = {}
par['exp_name'] = ["noname_experiment"]
# par['name'] must be defined as a FLAG to engine, b/c it resembles the
# iteration number that gets passed by the sbatch script
# TODO: add documentation i.e. parameter possibilities
par['dataset'] = ["osmnist2r_reduced"] #osmnist2 #ycb1_single
par['n_occluders'] = [2] #2
par['occlusion_percentage'] = [0]
par['label_type'] = ["onehot"] #["onehot"]
par['connectivity'] = ['B', 'BF', 'BK', 'BD', 'BT', 'BL', 'BLT'] #['B', 'BF', 'BK', 'BT', 'BL', 'BLT'] # ['BD', 'BT', 'BL', 'BLT'] # ['B', 'BF', 'BK', 'BD', 'BT', 'BL', 'BLT'] #['BLT']
par['BLT_longrange'] = [0]
par['time_depth'] = [3]
par['time_depth_beyond'] = [0]
par['feature_multiplier'] = [1]
par['keep_prob'] = [1.0]
par['batchnorm'] = [True]
par['stereo'] = [False]
par['color'] = ['grayscale'] #color
# par['cropped'] = [False]
# par['augmented'] = [False]
par['write_every'] = [100] # 500
par['test_every'] = [5] # 5
par['buffer_size'] = [600000] #[600000]
par['verbose'] = [False]
par['visualization'] = [False] #False
par['projector'] = [False]
par['batchsize'] = [500] #500
par['epochs'] = [100]
par['learning_rate'] = [0.004]
return par
# ----------------------------
# auxiliary network parameters
# ----------------------------
def get_aux():
"""
Get auxiliary parameters.
These auxiliary parameters do not have to be changed manually for the most
part. Configure once in the beginning of setup.
"""
aux = {}
aux['wdir'] = ["{}titan/".format(PWD_STEM)]
aux['input_dir'] = ["{}titan/datasets/".format(PWD_STEM)]
# aux['input_dir'] = ["/home/aecgroup/aecdata/Textures/occluded/datasets/"]
aux['output_dir'] = ["{}titan/experiments/".format(PWD_STEM)]
# aux['output_dir'] = ["/home/aecgroup/aecdata/Results_python/markus/experiments/"]
aux['norm_by_stat'] = [False]
aux['training_dir'] = [""] # "all"
aux['validation_dir'] = [""] # ""
aux['test_dir'] = [""] # ""
aux['evaluation_dir'] = [""] # ""
aux['lr_decay'] = [True]
aux['lr_cosine'] = [False]
aux['lr_decay_epochs'] = ['90,'] # ['60, 75, 90'] # ['90,']
aux['lr_decay_rate'] = [0.1]
aux['l2_lambda'] = [0.] # 0.0005
# old parameters for Spoerer Like decay
# aux['lr_eta'] = [0.1]
# aux['lr_delta'] = [0.1]
# aux['lr_d'] = [40.]
aux['global_weight_init_mean'] = ['None'] #[1.0, 0.0]
aux['global_weight_init_std'] = ['None']
# Info: None-Values have to be strings b/c of csv text conversion
#aux['num_workers'] = [4]
aux['iterations'] = [1] # 5
return aux
# _____________________________________________________________________________
# -----------------
# top-level comment
# -----------------
# medium level comment
# -----
# low level comment
<file_sep>matplotlib==3.4.3
numpy==1.20.3
scipy==1.7.1
pandas==1.3.3
seaborn==0.11.2
torch==1.9.1
torchvision==0.10.1
tensorboard==2.6.0
imageio==2.9.0
Pillow==8.3.2
<file_sep>#!/usr/bin/python
#
# Project Titan
# _____________________________________________________________________________
#
# _.oo.
# April 2020 _.u[[/;:,. .odMMMMMM'
# .o888UU[[[/;:-. .o@P^ MMM^
# run_engine.py oN88888UU[[[/;::-. dP^
# setup and initialization dNMMNN888UU[[[/;:--. .o@P^
# ,MMMMMMN888UU[[/;::-. o@^
# NNMMMNN888UU[[[/~.o@P^
# <NAME> 888888888UU[[[/o@^-..
# oI8888UU[[[/o@P^:--..
# .@^ YUU[[[/o@^;::---..
# oMP ^/o@P^;:::---..
# .dMMM .o@^ ^;::---...
# dMMMMMMM@^` `^^^^
# YMMMUP^
# ^^
# _____________________________________________________________________________
#
#
# Copyright 2021 <NAME>
#
# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# _____________________________________________________________________________
# ----------------
# import libraries
# ----------------
# standard libraries
# -----
import numpy as np
import os
import errno
import shutil
import csv
import itertools
import argparse
# commandline arguments
# -----
parser = argparse.ArgumentParser()
parser.add_argument(
"-tbp",
"--tensorboard_port",
type=int,
default=6006,
help='port for tensorboard monitoring')
parser.add_argument(
"-gpus",
"--number_of_gpus",
type=int,
default=1,
help='activate gpu acceleration')
parser.add_argument(
"-nodes",
"--number_of_nodes",
type=int,
default=1,
help='spread jobs on more nodes')
parser.add_argument(
"-mem",
"--memory",
type=int,
default=20,
help='memory to be reserved (GB)')
args = parser.parse_args()
# custom functions
# -----
from config import get_par, get_aux
def mkdir_p(path):
"""
mkdir_p takes a string path and creates a directory at this path if it
does not already exist.
"""
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
class SbatchDocument(object):
"""docstring for SbatchDocument."""
def __init__(self, paths_to_config_files, files_dir, experiment_name,
iterations):
super(SbatchDocument, self).__init__()
self.experiment_name = experiment_name
self.files_dir = files_dir
self.iterations = iterations
self.write_to_file(paths_to_config_files)
def gen_sbatch(self, paths_to_config_files):
"""
gen_sbatch takes a parameter dict and a auxiliary_parameters dict and
generates a sbatch file for the corresponding experiment
"""
bash_array = '('
for p in paths_to_config_files:
bash_array += '"' + p + '" '
bash_array = bash_array.strip()
bash_array += ')'
# 10GB for mnist 15 for osmnist
header = \
"#!/bin/bash \n" + \
"# \n" + \
"#SBATCH --nodes=1 \n" + \
"#SBATCH --ntasks-per-node=1 \n" + \
"#SBATCH --cpus-per-task=4 \n" + \
"#SBATCH --time=700:00:00 \n" + \
"#SBATCH --mem={}GB \n".format(args.memory) + \
"#SBATCH --reservation triesch-shared \n" + \
"#SBATCH --exclude vane \n" + \
"#SBATCH --partition=sleuths \n" + \
"#SBATCH --job-name={} \n".format(self.experiment_name) + \
"#SBATCH --mail-type=END \n" + \
"#SBATCH --mail-user=<EMAIL> \n" + \
"#SBATCH --output={}slurm_output/{}_slurm_%j.out \n".format(
self.files_dir, self.experiment_name) + \
"#SBATCH --array=0-{}%{} \n".format(len(paths_to_config_files)-1,
args.number_of_nodes)
if args.number_of_gpus > 0:
header += "#SBATCH --gres=gpu:{} \n\n".format(args.number_of_gpus) #rtx2070super #rtx2080ti
else:
header += "\n"
middle = \
'config_array={} \n'.format(bash_array) + \
'j=$((SLURM_ARRAY_TASK_ID)) \n' + \
'for i in `seq 1 1 {}` \n'.format(self.iterations) + \
'do \n' + \
' echo "iteration $i" \n' + \
' echo "job $j" \n' + \
' srun python3 engine.py' + \
' --config_file ${config_array[$j]}' + \
' --name i$i\n' + \
'done \n'
footer = \
'# --- end of experiment --- \n'
# TODO: think about management for afterburner. Lookup if experiment is
# still running. Then proceed with afterburner. -> self.config_dir
return (header + middle + footer)
def write_to_file(self, paths_to_config_files):
file = open("{}/run_experiment.sbatch".format(self.files_dir), "w")
file.write(self.gen_sbatch(paths_to_config_files))
file.close()
pass
def run_sbatch(self):
os.chdir(self.files_dir)
os.system("sbatch run_experiment.sbatch")
print("sbatch {}run_experiment.sbatch".format(self.files_dir))
print("[INFO] running {} on cluster".format(self.experiment_name))
pass
class ExperimentEnvironment(object):
"""docstring for ExperimentEnvironment."""
def __init__(self, parameters):
super(ExperimentEnvironment, self).__init__()
self.parameters = parameters
self.working_directory = self.parameters['wdir'][0]
self.output_directory = self.parameters["output_dir"][0]
self.experiment_name = self.parameters["exp_name"][0]
self.est_folder_structure()
self.copy_experiment_files()
def update_parameters(self):
self.parameters['output_dir'] = [self.data_dir]
self.parameters['visualization_dir'] = [self.visualization_dir]
return self.parameters
def est_folder_structure(self):
"""
est_folders establishes a folder structure given the parameters
of the configuration
"""
list_of_previous_experiment_folders = os.listdir(self.output_directory)
try:
list_of_previous_experiment_folders.sort()
list_of_previous_experiment_folders.remove(".DS_Store")
except(ValueError):
pass
if len(list_of_previous_experiment_folders) == 0:
experiment_number = 1
else:
experiment_number = \
int(list_of_previous_experiment_folders[-1].split('_')[0]) + 1
self.experiment_dir = self.output_directory + \
"{0:0=3d}".format(experiment_number) + \
"_{}/".format(self.experiment_name)
self.config_dir = self.experiment_dir + "files/config_files/"
self.files_dir = self.experiment_dir + "files/"
self.data_dir = self.experiment_dir + "data/"
self.visualization_dir = self.experiment_dir + "visualization/"
self.slurm_dir = self.files_dir + "slurm_output/"
mkdir_p(self.experiment_dir)
mkdir_p(self.data_dir)
mkdir_p(self.visualization_dir)
mkdir_p(self.slurm_dir)
mkdir_p(self.config_dir)
def copy_experiment_files(self):
shutil.copyfile(self.working_directory +
"/network_engine/engine.py", self.files_dir +
"engine.py")
shutil.copyfile(self.working_directory +
"/network_engine/config.py", self.files_dir +
"config.py")
shutil.copytree(self.working_directory +
"/network_engine/utilities", self.files_dir +
"utilities",
ignore=shutil.ignore_patterns('tsne', '__pycache__',
'*.pyc', 'tmp*'))
pass
class ExperimentConfiguration(object):
"""docstring for ExperimentConfiguration."""
def __init__(self, parameters):
super(ExperimentConfiguration, self).__init__()
self.infer_additional_parameters(parameters)
def infer_additional_parameters(self, parameters):
# this was implemented as a helper function and is not part of the
# run_engine file
self.parameters = parameters
pass
def generate_single_configurations(self):
number_of_configs = np.prod([len(v) for v in self.parameters.values()])
a = list(self.parameters.values())
combinations = list(itertools.product(*a))
return list(self.parameters.keys()), combinations
def write_config_files(self, path_to_config_folder=''):
paths_to_config_files = []
keys, cfs = self.generate_single_configurations()
for i in range(len(cfs)):
w = csv.writer(open(path_to_config_folder +
"config{}.csv".format(i), "w"))
for j in range(len(keys)):
w.writerow([keys[j], cfs[i][j]])
paths_to_config_files.append(
path_to_config_folder + "config{}.csv".format(i))
return paths_to_config_files
# ------------
# main program
# ------------
if __name__ == '__main__':
par, aux = get_par(), get_aux()
par.update(aux)
# generate main experiment structure
# -----
environment = ExperimentEnvironment(par)
par = environment.update_parameters()
# generate configuration files
# -----
config = ExperimentConfiguration(par)
config_paths = config.write_config_files(environment.config_dir)
# generate sbatch file
# -----
sbatch_file = SbatchDocument(config_paths, environment.files_dir,
environment.experiment_name,
par['iterations'][0])
sbatch_file.run_sbatch()
# start a tensorboard instance to monitor experiment
# -----
os.system("screen -dmS tb_monitor")
os.system("screen -S tb_monitor -p 0 -X stuff \
'pt\n '")
os.system("screen -S tb_monitor -p 0 -X stuff \
'tensorboard --logdir {} --port {}\n '".format(
environment.data_dir, args.tensorboard_port))
print("[INFO] monitoring {} in tensorboard at port {}".format(
environment.experiment_name, args.tensorboard_port))
# _____________________________________________________________________________
# Description:
#
# This program is supposed to create an folder structure for the experiment,
# copy the necessary files to that environment, create an sbatch file and run
# the file given the environment-variables predefined in an external document.
# _____________________________________________________________________________
# -----------------
# top-level comment
# -----------------
# medium level comment
# -----
# low level comment
<file_sep>#!/usr/bin/python
#
# Project Titan
# _____________________________________________________________________________
#
# _.oo.
# March 2020 _.u[[/;:,. .odMMMMMM'
# .o888UU[[[/;:-. .o@P^ MMM^
# rcnn.py oN88888UU[[[/;::-. dP^
# define recurrent networks dNMMNN888UU[[[/;:--. .o@P^
# ,MMMMMMN888UU[[/;::-. o@^
# NNMMMNN888UU[[[/~.o@P^
# <NAME> 888888888UU[[[/o@^-..
# oI8888UU[[[/o@P^:--..
# .@^ YUU[[[/o@^;::---..
# oMP ^/o@P^;:::---..
# .dMMM .o@^ ^;::---...
# dMMMMMMM@^` `^^^^
# YMMMUP^
# ^^
# _____________________________________________________________________________
#
#
# Copyright 2021 <NAME>
#
# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# _____________________________________________________________________________
# ----------------
# import libraries
# ----------------
# standard libraries
# -----
import torch.nn as nn
import torch.nn.functional as F
import torch
# custom functions
# -----
import utilities.metrics as metrics
# -----------------
# Network Constructor
# -----------------
def return_same(x):
return x
# -----------------
# Simplified Generalized Linear Model (GLM)
# -----------------
class GLM(nn.Module):
def __init__(self, image_size=1024, input_channels=1, num_targets=10):
super(GLM, self).__init__()
self.n_features = image_size*input_channels
self.fc = nn.Linear(self.n_features, num_targets)
def forward(self, x):
seq_len = x.size(1)
output_list = []
feature_map = x
for t in range(seq_len):
input = x[:, t, :, :, :].view(x.size(0), self.n_features)
output_list.append(self.fc(input))
x = torch.stack(output_list, dim=1)
return x, feature_map
def log_stats(self, writer, step):
self._varstats2tb(self.fc.weight,
'linear/fc_weights', writer, step)
self._varstats2tb(self.fc.bias,
'linear/fc_bias', writer, step)
def _varstats2tb(self, variable, name, writer, step):
variable = variable.detach()
writer.add_scalar(
'network/{}/mean'.format(name), variable.mean(), step)
writer.add_scalar(
'network/{}/std'.format(name), variable.std(), step)
writer.add_scalar(
'network/{}/min'.format(name), variable.min(), step)
writer.add_scalar(
'network/{}/max'.format(name), variable.max(), step)
writer.add_scalar(
'network/{}/median'.format(name), variable.median(), step)
# writer.add_scalar(
# 'network/{}/gini'.format(name), metrics.gini_torch(variable), step)
# -----------------
# Class Activation Mapping (CAM)
# -----------------
class CAM(nn.Module):
def __init__(self, network):
super(CAM, self).__init__()
self.network = network
def forward(self, x, topk=3):
outputs, feature_maps = self.network(x)
cams = []
topk_prob_list = []
topk_arg_list = []
b, t, c, h, w = feature_maps.size()
for timestep in range(t):
output = outputs[:,timestep,:]
feature_map = feature_maps[:,timestep,:,:,:]
probs = F.softmax(output, 1)
prob, args = torch.sort(probs, dim=1, descending=True)
## top k class probability
topk_prob = prob[:,:topk]
topk_arg = args[:,:topk]
# generate class activation map
feature_map = feature_map.view(b, c, h*w).transpose(1, 2)
fc_weight = nn.Parameter(self.network.fc.weight.t().unsqueeze(0))
fc_weight = fc_weight.repeat(b, 1, 1)
cam = torch.bmm(feature_map, fc_weight).transpose(1, 2)
## top k class activation map
cam = cam.view(b, -1, h, w)
# top k sorting should be outsourced to the visualization?
# topk_cam = []
# for i in range(b):
# topk_cam.append(cam[i, topk_arg[i,:],:,:])
# topk_cam = torch.stack(topk_cam, 0)
cam_upsampled = F.interpolate(cam,
(x.size(3), x.size(4)), mode='bilinear', align_corners=True)
_,n_classes,fh,fw = cam_upsampled.shape
cams.append(cam_upsampled)
topk_prob_list.append(topk_prob)
topk_arg_list.append(topk_arg)
cams = torch.stack(cams, dim=1)
topk_prob = torch.stack(topk_prob_list, dim=1)
topk_arg = torch.stack(topk_arg_list, dim=1)
return outputs, (cams, topk_prob, topk_arg)
# -----------------
# RC Classes
# -----------------
class RecConvCell(nn.Module):
"""docstring for RCCell."""
def __init__(self, connectivity, input_channels, output_channels, output_channels_above, kernel_size, bias):
"""
Initialize RecConvCell cell.
Parameters
----------
input_channels: int
Number of channels of input tensor.
output_channels: int
Number of channels of the output tensor.
kernel_size: (int, int)
Size of the convolutional kernel.
bias: bool
Whether or not to add the bias.
"""
super(RecConvCell, self).__init__()
self.connectivity = connectivity
self.input_channels = input_channels
self.output_channels = output_channels
self.output_channels_above = output_channels_above
self.kernel_size = kernel_size
self.padding = kernel_size[0] // 2, kernel_size[1] // 2
self.bias = bias
self.bottomup = nn.Conv2d(in_channels=self.input_channels,
out_channels=self.output_channels,
kernel_size=self.kernel_size,
padding=self.padding,
bias=self.bias)
self.lateral = nn.Conv2d(in_channels=self.output_channels,
out_channels=self.output_channels,
kernel_size=self.kernel_size,
padding=self.padding,
bias=self.bias)
self.topdown = nn.ConvTranspose2d(in_channels=self.output_channels_above,
out_channels=self.output_channels,
kernel_size=self.kernel_size,
stride=(2,2),
padding=1,
output_padding=1,
groups=1,
bias=True,
dilation=1,
padding_mode='zeros')
self.bn_b = nn.BatchNorm2d(self.output_channels)
self.bn_l = nn.BatchNorm2d(self.output_channels)
self.bn_t = nn.BatchNorm2d(self.output_channels)
self.bn_all = nn.BatchNorm2d(self.output_channels)
self.activation = nn.ReLU()
# TODO: clean this up in order to use it properly
#self.lrn = nn.LocalResponseNorm(2, alpha=0.0001, beta=0.75, k=1.0)
self.lrn = return_same
def forward(self, b_input, l_input, t_input):
b_conv = self.bn_b(self.bottomup(b_input))
if 'BLT' in self.connectivity:
l_conv = self.bn_l(self.lateral(l_input))
t_conv = self.bn_t(self.topdown(t_input))
next_state = self.lrn(self.activation(b_conv + l_conv + t_conv))
elif 'BL' in self.connectivity:
l_conv = self.bn_l(self.lateral(l_input))
next_state = self.lrn(self.activation(b_conv + l_conv))
elif 'BT' in self.connectivity:
t_conv = self.bn_t(self.topdown(t_input))
next_state = self.lrn(self.activation(b_conv + t_conv))
else:
next_state = self.lrn(self.activation(b_conv))
return next_state
def init_hidden(self, batch_size, image_size):
height, width = image_size
l = torch.zeros(batch_size,
self.output_channels, height, width,
device=self.bottomup.weight.device)
td = torch.zeros(batch_size,
self.output_channels_above, height//2, width//2,
device=self.topdown.weight.device)
return (l, td)
class RecConv(nn.Module):
"""
Parameters:
input_dim: Number of channels in input
hidden_dim: Number of hidden channels
kernel_size: Size of kernel in convolutions
num_layers: Number of LSTM layers stacked on each other
batch_first: Whether or not dimension 0 is the batch or not
bias: Bias or no bias in Convolution
return_all_layers: Return the list of computations for all layers
Note: Will do same padding.
Input:
A tensor of size B, T, C, H, W or T, B, C, H, W
Output:
A tuple of two lists of length num_layers (or length 1 if return_all_layers is False).
0 - layer_output_list is the list of lists of length T of each output
1 - last_state_list is the list of last states
each element of the list is a tuple (h, c) for hidden state and memory
Example:
>> x = torch.rand((32, 10, 64, 128, 128))
>> convlstm = ConvLSTM(64, 16, 3, 1, True, True, False)
>> _, last_states = convlstm(x)
>> h = last_states[0][0] # 0 for layer index, 0 for h index
"""
def __init__(self, connectivity, input_dim, hidden_dim, kernel_size,
num_layers, batch_first=False, bias=True, pooling=True):
super(RecConv, self).__init__()
self._check_kernel_size_consistency(kernel_size)
# Make sure that both `kernel_size` and `hidden_dim` are lists having len == num_layers
kernel_size = self._extend_for_multilayer(kernel_size, num_layers)
hidden_dim = self._extend_for_multilayer(hidden_dim, num_layers + 1)
if not len(kernel_size) == len(hidden_dim) - 1 == num_layers:
raise ValueError('Inconsistent list length.')
if not(pooling) and num_layers > 1:
raise ValueError('Multiple layers without pooling are not supported')
# TODO: implement multiple layers without pooling
self.input_dim = input_dim
self.hidden_dim = hidden_dim
self.kernel_size = kernel_size
self.num_layers = num_layers
self.batch_first = batch_first
self.bias = bias
self.pooling = pooling
cell_list = []
for i in range(0, self.num_layers):
cur_input_dim = self.input_dim if i == 0 else self.hidden_dim[i - 1]
cell_list.append(RecConvCell(connectivity=connectivity,
input_channels=cur_input_dim,
output_channels=self.hidden_dim[i],
output_channels_above=self.hidden_dim[i+1],
kernel_size=self.kernel_size[i],
bias=self.bias))
self.cell_list = nn.ModuleList(cell_list)
if self.pooling:
self.maxpool = nn.MaxPool2d((2, 2))
def forward(self, input_tensor, hidden_state=None):
"""
Parameters
----------
input_tensor: todo
5-D Tensor either of shape (t, b, c, h, w) or (b, t, c, h, w)
hidden_state: todo
None. todo implement stateful
Returns
-------
last_state_list, layer_output
"""
if not self.batch_first:
# (t, b, c, h, w) -> (b, t, c, h, w)
input_tensor = input_tensor.permute(1, 0, 2, 3, 4)
b, _, _, h, w = input_tensor.size()
# TODO: Implement stateful RCNN
if hidden_state is not None:
raise NotImplementedError()
else:
# Since the init is done in forward. Can send image size here
hidden_state = self._init_hidden(batch_size=b,
image_size=(h, w),
pooling=self.pooling)
seq_len = input_tensor.size(1)
output_inner = []
for t in range(seq_len):
layer_output_list = []
cur_layer_input = input_tensor[:, t, :, :, :]
for layer_idx in range(self.num_layers):
l, td = hidden_state[layer_idx]
cur_layer_input = self.cell_list[layer_idx](
b_input=cur_layer_input,
l_input=l,
t_input=td)
layer_output_list.append(cur_layer_input)
if self.pooling and (layer_idx < (self.num_layers - 1)):
cur_layer_input = self.maxpool(cur_layer_input)
# update hidden states
for layer_idx in range(self.num_layers - 1):
hidden_state[layer_idx] = (layer_output_list[layer_idx], layer_output_list[layer_idx+1])
hidden_state[self.num_layers - 1] = (layer_output_list[self.num_layers - 1], hidden_state[self.num_layers - 1][-1])
output_inner.append(cur_layer_input)
# # look at activations
# activations_inner.append(layer_output_list)
return torch.stack(output_inner, dim=1)
def return_activations(self, input_tensor, hidden_state=None):
if not self.batch_first:
# (t, b, c, h, w) -> (b, t, c, h, w)
input_tensor = input_tensor.permute(1, 0, 2, 3, 4)
b, _, _, h, w = input_tensor.size()
# TODO: Implement stateful RCNN
if hidden_state is not None:
raise NotImplementedError()
else:
# Since the init is done in forward. Can send image size here
hidden_state = self._init_hidden(batch_size=b,
image_size=(h, w),
pooling=self.pooling)
seq_len = input_tensor.size(1)
output_inner = []
activations_inner = []
for t in range(seq_len):
layer_output_list = []
cur_layer_input = input_tensor[:, t, :, :, :]
for layer_idx in range(self.num_layers):
l, td = hidden_state[layer_idx]
cur_layer_input = self.cell_list[layer_idx](
b_input=cur_layer_input,
l_input=l,
t_input=td)
layer_output_list.append(cur_layer_input)
if self.pooling and (layer_idx < (self.num_layers - 1)):
cur_layer_input = self.maxpool(cur_layer_input)
# update hidden states
for layer_idx in range(self.num_layers - 1):
hidden_state[layer_idx] = (layer_output_list[layer_idx], layer_output_list[layer_idx+1])
hidden_state[self.num_layers - 1] = (layer_output_list[self.num_layers - 1], hidden_state[self.num_layers - 1][-1])
output_inner.append(cur_layer_input)
activations_inner.append(layer_output_list)
return activations_inner
def _init_hidden(self, batch_size, image_size, pooling):
init_states = []
if pooling:
init_states.append(self.cell_list[0].init_hidden(batch_size, image_size))
for i in range(1, self.num_layers):
init_states.append(self.cell_list[i].init_hidden(batch_size, tuple(dim//(2*i) for dim in image_size)))
else:
for i in range(self.num_layers):
init_states.append(self.cell_list[i].init_hidden(batch_size, image_size))
return init_states
@staticmethod
def _check_kernel_size_consistency(kernel_size):
if not (isinstance(kernel_size, tuple) or
(isinstance(kernel_size, list) and all([isinstance(elem, tuple) for elem in kernel_size]))):
raise ValueError('`kernel_size` must be tuple or list of tuples')
@staticmethod
def _extend_for_multilayer(param, num_layers):
if not isinstance(param, list):
param = [param] * num_layers
return param
class RecConvNet(nn.Module):
def __init__(self, connectivity, kernel_size, input_channels=1, n_features=32, num_layers=2, num_targets=10):
super(RecConvNet, self).__init__()
self.rcnn = RecConv(connectivity, input_channels, n_features, kernel_size, num_layers, batch_first=True)
self.fc = nn.Linear(n_features, num_targets)
self.n_features = n_features
def forward(self, x):
feature_map = self.rcnn(x)
x = feature_map.mean(dim=[-2,-1], keepdim=True) #global average pooling
seq_len = x.size(1)
output_list = []
for t in range(seq_len):
input = x[:, t, :, :, :].view(x.size(0), self.n_features) #8, 8 for osmnist
output_list.append(self.fc(input))
x = torch.stack(output_list, dim=1)
return x, feature_map
def log_stats(self, writer, step):
for i, layer in enumerate(self.rcnn.cell_list):
connectivity = layer.connectivity
self._varstats2tb(layer.bottomup.weight,
'layer{}/B_weights'.format(i+1), writer, step)
self._varstats2tb(layer.bottomup.bias,
'layer{}/B_bias'.format(i+1), writer, step)
if 'L' in connectivity:
self._varstats2tb(layer.lateral.weight,
'layer{}/L_weights'.format(i+1), writer, step)
self._varstats2tb(layer.lateral.bias,
'layer{}/L_bias'.format(i+1), writer, step)
if ('T' in connectivity) and (i > 0):
self._varstats2tb(layer.topdown.weight,
'layer{}/T_weights'.format(i+1), writer, step)
self._varstats2tb(layer.topdown.bias,
'layer{}/T_bias'.format(i+1), writer, step)
def _varstats2tb(self, variable, name, writer, step):
variable = variable.detach()
writer.add_scalar(
'network/{}/mean'.format(name), variable.mean(), step)
writer.add_scalar(
'network/{}/std'.format(name), variable.std(), step)
writer.add_scalar(
'network/{}/min'.format(name), variable.min(), step)
writer.add_scalar(
'network/{}/max'.format(name), variable.max(), step)
writer.add_scalar(
'network/{}/median'.format(name), variable.median(), step)
# writer.add_scalar(
# 'network/{}/gini'.format(name), metrics.gini_torch(variable), step)
# _____________________________________________________________________________
# -----------------
# top-level comment
# -----------------
# medium level comment
# -----
# low level comment
<file_sep>#!/usr/bin/python
#
# Project Titan
# _____________________________________________________________________________
#
# _.oo.
# April 2020 _.u[[/;:,. .odMMMMMM'
# .o888UU[[[/;:-. .o@P^ MMM^
# metrics.py oN88888UU[[[/;::-. dP^
# a collection of dNMMNN888UU[[[/;:--. .o@P^
# metrics for ,MMMMMMN888UU[[/;::-. o@^
# NNMMMNN888UU[[[/~.o@P^
# <NAME> 888888888UU[[[/o@^-..
# oI8888UU[[[/o@P^:--..
# .@^ YUU[[[/o@^;::---..
# oMP ^/o@P^;:::---..
# .dMMM .o@^ ^;::---...
# dMMMMMMM@^` `^^^^
# YMMMUP^
# ^^
# _____________________________________________________________________________
#
#
# Copyright 2021 <NAME>
#
# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# _____________________________________________________________________________
# ----------------
# import libraries
# ----------------
# standard libraries
# -----
import math
import random
import csv
import cProfile
import numpy as np
import hashlib
from fractions import Fraction
import torch
# calculate the gini coefficient from a numpy array
def gini(array):
"""Calculate the Gini coefficient of a numpy array."""
# based on bottom eq:
# http://www.statsdirect.com/help/generatedimages/equations/equation154.svg
# from:
# http://www.statsdirect.com/help/default.htm#nonparametric_methods/gini.htm
# All values are treated equally, arrays must be 1d:
array = array.flatten()
if np.amin(array) < 0:
# Values cannot be negative:
array -= np.amin(array)
# Values cannot be 0:
array += 0.0000001
# Values must be sorted:
array = np.sort(array)
# Index per array element:
index = np.arange(1,array.shape[0]+1)
# Number of array elements:
n = array.shape[0]
# Gini coefficient:
return ((np.sum((2 * index - n - 1) * array)) / (n * np.sum(array)))
# calculate the gini coefficient from a torch array
def gini_torch(array):
"""Calculate the Gini coefficient of a torch array."""
# based on bottom eq:
# http://www.statsdirect.com/help/generatedimages/equations/equation154.svg
# from:
# http://www.statsdirect.com/help/default.htm#nonparametric_methods/gini.htm
# All values are treated equally, arrays must be 1d:
array = array.flatten()
if torch.amin(array) < 0:
# Values cannot be negative:
array -= torch.amin(array)
# Values cannot be 0:
array += 0.0000001
# Values must be sorted:
array = torch.sort(array)[0]
# Index per array element:
index = torch.arange(1,array.shape[0]+1)
# Number of array elements:
n = array.shape[0]
# Gini coefficient:
return ((torch.sum((2 * index - n - 1) * array)) / (n * torch.sum(array)))
memoization = {}
class Similarity:
"""
This class contains instances of similarity / distance metrics.
These are used in centroid based clustering algorithms to identify similar
patterns and put them into the same homogeneous sub sets
:param minimum: the minimum distance between two patterns
(so you don't divide by 0)
"""
def __init__(self, minimum):
self.e = minimum
self.vector_operators = VectorOperations()
def manhattan_distance(self, p_vec, q_vec):
"""
This method implements the manhattan distance metric
:param p_vec: vector one
:param q_vec: vector two
:return: the manhattan distance between vector one and two
"""
return max(np.sum(np.fabs(p_vec - q_vec)), self.e)
def square_euclidean_distance(self, p_vec, q_vec):
"""
This method implements the squared euclidean distance metric
:param p_vec: vector one
:param q_vec: vector two
:return: the squared euclidean distance between vector one and two
"""
diff = p_vec - q_vec
return max(np.sum(diff**2), self.e)
def euclidean_distance(self, p_vec, q_vec):
"""
This method implements the euclidean distance metric
:param p_vec: vector one
:param q_vec: vector two
:return: the euclidean distance between vector one and two
"""
return max(math.sqrt(self.square_euclidean_distance(p_vec, q_vec)),
self.e)
def half_square_euclidean_distance(self, p_vec, q_vec):
"""
This method implements the half squared euclidean distance metric
:param p_vec: vector one
:param q_vec: vector two
:return: the half squared euclidean distance between vector one and two
"""
return max(0.5 * self.square_euclidean_distance(p_vec, q_vec), self.e)
def cosine_similarity(self, p_vec, q_vec):
"""
This method implements the cosine similarity metric
:param p_vec: vector one
:param q_vec: vector two
:return: the cosine similarity between vector one and two
"""
pq = self.vector_operators.product(p_vec, q_vec)
p_norm = self.vector_operators.norm(p_vec)
q_norm = self.vector_operators.norm(q_vec)
return max(pq / (p_norm * q_norm), self.e)
def tanimoto_coefficient(self, p_vec, q_vec):
"""
This method implements the cosine tanimoto coefficient metric
:param p_vec: vector one
:param q_vec: vector two
:return: the tanimoto coefficient between vector one and two
"""
pq = self.vector_operators.product(p_vec, q_vec)
p_square = self.vector_operators.square(p_vec)
q_square = self.vector_operators.square(q_vec)
return max(pq / (p_square + q_square - pq), self.e)
def fractional_distance(self, p_vec, q_vec, fraction=Fraction(1,2)):
"""
This method implements the fractional distance metric. I have
implemented memoization for this method to reduce
the number of function calls required. The net effect is that the
algorithm runs 400% faster. A similar approach
can be used with any of the above distance metrics as well.
:param p_vec: vector one
:param q_vec: vector two
:param fraction: the fractional distance value (power)
:return: the fractional distance between vector one and two
"""
memoize = False
if memoize:
key = self.get_key(p_vec, q_vec)
x = memoization.get(key)
if x is None:
diff = p_vec - q_vec
diff_fraction = np.abs(diff)**fraction
return max(math.pow(np.sum(diff_fraction), 1/fraction), self.e)
else:
return x
else:
diff = p_vec - q_vec
diff_fraction = np.abs(diff)**fraction
return max(math.pow(np.sum(diff_fraction), 1/fraction), self.e)
@staticmethod
def get_key(p_vec, q_vec):
"""
This method returns a unique hash value for two vectors. The hash value
is equal to the concatenated string of the hash value for vector one
and vector two. E.g. is hash(p_vec) = 1234 and hash(q_vec) = 5678 then
get_key(p_vec, q_vec) = 12345678. Memoization improved the speed of
this algorithm 400%.
:param p_vec: vector one
:param q_vec: vector two
:return: a unique hash
"""
# return str(hash(tuple(p_vec))) + str(hash(tuple(q_vec)))
return str(hashlib.sha1(p_vec)) + str(hashlib.sha1(q_vec))
class VectorOperations():
"""
This class contains useful implementations of methods which can be
performed on vectors
"""
@staticmethod
def product(p_vec, q_vec):
"""
This method returns the product of two lists / vectors
:param p_vec: vector one
:param q_vec: vector two
:return: the product of p_vec and q_vec
"""
return p_vec * q_vec
@staticmethod
def square(p_vec):
"""
This method returns the square of a vector
:param p_vec: the vector to be squared
:return: the squared value of the vector
"""
return p_vec**2
@staticmethod
def norm(p_vec):
"""
This method returns the norm value of a vector
:param p_vec: the vector to be normed
:return: the norm value of the vector
"""
return np.sqrt(p_vec)
# _____________________________________________________________________________
# -----------------
# top-level comment
# -----------------
# medium level comment
# -----
# low level comment
<file_sep>#!/usr/bin/python
#
# Project Titan
# _____________________________________________________________________________
#
# _.oo.
# April 2020 _.u[[/;:,. .odMMMMMM'
# .o888UU[[[/;:-. .o@P^ MMM^
# dataset_handler.py oN88888UU[[[/;::-. dP^
# Pytorch dataloaders dNMMNN888UU[[[/;:--. .o@P^
# ,MMMMMMN888UU[[/;::-. o@^
# NNMMMNN888UU[[[/~.o@P^
# <NAME> 888888888UU[[[/o@^-..
# oI8888UU[[[/o@P^:--..
# .@^ YUU[[[/o@^;::---..
# oMP ^/o@P^;:::---..
# .dMMM .o@^ ^;::---...
# dMMMMMMM@^` `^^^^
# YMMMUP^
# ^^
# _____________________________________________________________________________
#
#
# Copyright 2021 <NAME>
#
# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# _____________________________________________________________________________
# standard libraries
# -----
import numpy as np
import os
import sys
import six
import string
import lmdb
import pickle
import msgpack
import tqdm
import pyarrow as pa
import torch
from torch.utils.data import Dataset, DataLoader
from torchvision.datasets import ImageFolder
from torchvision import transforms, datasets
from skimage import io, transform
from PIL import Image
# custom functions
# -----
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
# Standard Usecase
# -----
# For the standard usecase of having just one class you can use the built-in
# torchvision.datasets.ImageFolder dataset
class dynaMODataset(Dataset):
"""Dynamic Occluded MNIST Dataset"""
def __init__(self, root_dir, transform=None, target_transform=None):
"""
Args:
root_dir (string): Directory with all the images.
transform (callable, optional): Optional transform to be applied
on a sample.
"""
self.root_dir = root_dir
self.transform = transform
self.target_transform = target_transform
self.paths_to_samples = []
self.height = 32
self.width = 32
# move through the filestructure to get a list of all images
uberclasses = os.listdir(self.root_dir)
try:
uberclasses.remove('.DS_Store')
except(ValueError):
pass
for cla in uberclasses:
class_folder = os.path.join(self.root_dir, cla)
filenames = os.listdir(class_folder)
try:
filenames.remove('.DS_Store')
except(ValueError):
pass
for name in filenames:
self.paths_to_samples.append(
os.path.join(self.root_dir, cla, name))
def __len__(self):
return len(self.paths_to_samples)
def __getitem__(self, idx):
if torch.is_tensor(idx):
idx = idx.tolist()
img_name = self.paths_to_samples[idx]
image_array = io.imread(img_name)
image_array = image_array.reshape(self.height, self.width, -1, order='F')
target = []
for t in self.paths_to_samples[idx].rsplit('_', 1)[-1].rsplit('.')[0]:
target.append(int(t))
target = np.array(target, dtype=np.uint8)
if self.transform is not None:
image_array = self.transform(image_array)
if self.target_transform is not None:
target = self.target_transform(target)
sample = {'image': image_array, 'target': target}
return sample
class ToSingle(object):
"""Convert ndarrays in sample to Tensors."""
def __call__(self, target_sequence):
return torch.tensor(target_sequence[-1], dtype=torch.int64, device=device)
class ToTimeSeries(object):
"""Convert given data in sample to timeseries."""
def __init__(self, height=32, width=32):
self.height = height
self.width = width
def __call__(self, image):
image_array = np.array(image)
image_array = image_array.reshape(self.height, self.width, -1, order='F')
return image_array
def raw_reader(path):
with open(path, 'rb') as f:
bin_data = f.read()
return bin_data
def pil_loader(path):
with open(path, 'rb') as f:
img = Image.open(f)
return img.convert('RGB')
class StereoImageFolder(Dataset):
"""Modified ImageFolder Structure to Import Stereoscopic Data"""
def __init__(self, root_dir, train, stereo=False, loader=pil_loader, transform=None, target_transform=None, nhot_targets=False):
"""
Args:
root_dir (string): Directory with all the images.
transform (callable, optional): Optional transform to be applied
on a sample.
"""
self.train = train
self.transform = transform
self.target_transform = target_transform
self.paths_to_left_samples = []
self.paths_to_right_samples = []
self.height = 32
self.width = 32
self.loader = loader
self.stereo = stereo
self.nhot = nhot_targets
# move through the filestructure to get a list of all images
self._add_data(root_dir)
def _add_data(self, root_dir):
root_dir = root_dir + '/train/left/' if self.train else root_dir + '/test/left/'
objectclasses = os.listdir(root_dir)
new_left_samples = []
try:
objectclasses.remove('.DS_Store')
except(ValueError):
pass
for cla in objectclasses:
class_folder = os.path.join(root_dir, cla)
filenames = os.listdir(class_folder)
try:
filenames.remove('.DS_Store')
except(ValueError):
pass
for name in filenames:
new_left_samples.append(os.path.join(root_dir, cla, name))
self.paths_to_left_samples.append(
os.path.join(root_dir, cla, name))
for item in new_left_samples:
self.paths_to_right_samples.append(item.split('left')[0] + 'right' + item.split('left')[1])
def _remove_data(self, n_samples, last_samples=True):
for i in range(n_samples):
if last_samples:
self.paths_to_left_samples.pop()
self.paths_to_right_samples.pop()
else:
self.paths_to_left_samples.pop(0)
self.paths_to_right_samples.pop(0)
def __len__(self):
return len(self.paths_to_left_samples)
def __getitem__(self, idx):
if torch.is_tensor(idx):
idx = idx.tolist()
img_name = self.paths_to_left_samples[idx]
image = self.loader(img_name)
target = []
if self.nhot:
t_list = self.paths_to_left_samples[idx].rsplit('.',1)[0].rsplit('-',3)[-3:]
if len(t_list)==1:
raise NotImplementedError('nhot targets not implemented for this dataset')
target = np.array(t_list, dtype=np.int64)
else:
t_list = self.paths_to_left_samples[idx].rsplit('_', 1)[-1].rsplit('/')[0]
if t_list.__class__ == str:
target = np.array(t_list, dtype=np.int64) # target.append(int(t_list))
else:
for t in self.paths_to_left_samples[idx].rsplit('_', 1)[-1].rsplit('/')[0]:
target.append(int(t))
target = np.array(target, dtype=np.int64)
if self.target_transform is not None:
target = self.target_transform(target)
if self.stereo:
image_l = image
image_r = self.loader(self.paths_to_right_samples[idx])
if self.transform is not None:
image_l = self.transform(image_l)
image_r = self.transform(image_r)
sample = [(image_l, image_r), target]
else:
if self.transform is not None:
image = self.transform(image)
sample = [image, target]
return sample
class ImageFolderLMDB(Dataset):
def __init__(self, db_path, transform=None, target_transform=None):
self.db_path = db_path
self.env = lmdb.open(db_path, subdir=os.path.isdir(db_path),
readonly=True, lock=False,
readahead=False, meminit=False)
with self.env.begin(write=False) as txn:
# self.length = txn.stat()['entries'] - 1
self.length =pa.deserialize(txn.get(b'__len__'))
self.keys= pa.deserialize(txn.get(b'__keys__'))
self.transform = transform
self.target_transform = target_transform
def __len__(self):
return self.length
def __getitem__(self, index):
img, target = None, None
env = self.env
with env.begin(write=False) as txn:
byteflow = txn.get(self.keys[index])
unpacked = pa.deserialize(byteflow)
# load image
imgbuf = unpacked[0]
buf = six.BytesIO()
buf.write(imgbuf)
buf.seek(0)
img = Image.open(buf).convert('RGB')
# load label
target = unpacked[1]
if self.transform is not None:
img = self.transform(img)
if self.target_transform is not None:
target = self.target_transform(target)
return img, target
def __len__(self):
return self.length
def __repr__(self):
return self.__class__.__name__ + ' (' + self.db_path + ')'
class StereoImageFolderLMDB(Dataset):
def __init__(self, db_path, stereo=False, transform=None, target_transform=None):
self.db_path = db_path
#********
self.env = lmdb.open(db_path, subdir=os.path.isdir(db_path),
readonly=True, lock=False,
readahead=False, meminit=False)
with self.env.begin(write=False) as txn:
# self.length = txn.stat()['entries'] - 1
self.length =pa.deserialize(txn.get(b'__len__'))
self.keys= pa.deserialize(txn.get(b'__keys__'))
#********
self.stereo = stereo
self.transform = transform
self.target_transform = target_transform
def __len__(self):
return self.length
def _open_lmdb(self):
self.env = lmdb.open(self.db_path, subdir=os.path.isdir(self.db_path),
readonly=True, lock=False,
readahead=False, meminit=False)
with self.env.begin(write=False) as txn:
# self.length = txn.stat()['entries'] - 1
self.length = pa.deserialize(txn.get(b'__len__'))
self.keys = pa.deserialize(txn.get(b'__keys__'))
self.txn = txn
def __getitem__(self, index):
if not hasattr(self, 'env'):
self._open_lmdb()
img_l, img_r, target = None, None, None
#env = self.env
#with self.env.begin(write=False) as txn:
# byteflow = txn.get(self.keys[index])
byteflow = self.txn.get(self.keys[index])
unpacked = pa.deserialize(byteflow)
# load image
imgbuf = unpacked[0][0]
buf = six.BytesIO()
buf.write(imgbuf)
buf.seek(0)
img_l = Image.open(buf).convert('RGB')
# load label
target = unpacked[1]
if self.target_transform is not None:
target = self.target_transform(target)
if self.stereo:
imgbuf = unpacked[0][1]
buf = six.BytesIO()
buf.write(imgbuf)
buf.seek(0)
img_r = Image.open(buf).convert('RGB')
if self.transform is not None:
img_l = self.transform(img_l)
img_r = self.transform(img_r)
return (img_l, img_r), target
else:
if self.transform is not None:
img_l = self.transform(img_l)
return img_l, target
def dumps_pyarrow(obj):
"""
Serialize an object.
Returns:
Implementation-dependent bytes-like object
"""
return pa.serialize(obj).to_buffer()
def folder2lmdb(dpath, name="train", write_frequency=5000, num_workers=16):
directory = os.path.expanduser(os.path.join(dpath, name))
print("Loading dataset from %s" % directory)
dataset = ImageFolder(directory, loader=raw_reader)
data_loader = DataLoader(dataset, num_workers=num_workers, collate_fn=lambda x: x)
lmdb_path = os.path.join(dpath, "%s.lmdb" % name)
isdir = os.path.isdir(lmdb_path)
print("Generate LMDB to %s" % lmdb_path)
db = lmdb.open(lmdb_path, subdir=isdir,
map_size=1099511627776,
readonly=False,
meminit=False, map_async=True)
print(len(dataset), len(data_loader))
txn = db.begin(write=True)
for idx, data in enumerate(data_loader):
# print(type(data), data)
image, label = data[0]
txn.put(u'{}'.format(idx).encode('ascii'), dumps_pyarrow((image, label)))
if idx % write_frequency == 0:
print("[%d/%d]" % (idx, len(data_loader)))
txn.commit()
txn = db.begin(write=True)
# finish iterating through dataset
txn.commit()
keys = [u'{}'.format(k).encode('ascii') for k in range(idx + 1)]
with db.begin(write=True) as txn:
txn.put(b'__keys__', dumps_pyarrow(keys))
txn.put(b'__len__', dumps_pyarrow(len(keys)))
print("Flushing database ...")
db.sync()
db.close()
def stereofolder2lmdb(dpath, name, write_frequency=5000, num_workers=16):
directory = os.path.expanduser(dpath)
print("Loading dataset from %s" % directory)
for train_bool in [True, False]:
finalname = name + '_train' if train_bool else name + '_test'
dataset = StereoImageFolder(directory, stereo=True, train=train_bool, loader=raw_reader)
data_loader = DataLoader(dataset, num_workers=num_workers, collate_fn=lambda x: x)
lmdb_path = os.path.join(dpath, "%s.lmdb" % finalname)
isdir = os.path.isdir(lmdb_path)
print("Generate LMDB to %s" % lmdb_path)
db = lmdb.open(lmdb_path, subdir=isdir,
map_size=1099511627776,
readonly=False,
meminit=False, map_async=True)
print(len(dataset), len(data_loader))
txn = db.begin(write=True)
for idx, data in enumerate(data_loader):
# print(type(data), data)
(image_l, image_r), label = data[0]
txn.put(u'{}'.format(idx).encode('ascii'), dumps_pyarrow(((image_l, image_r), label)))
if idx % write_frequency == 0:
print("[%d/%d]" % (idx, len(data_loader)))
txn.commit()
txn = db.begin(write=True)
# finish iterating through dataset
txn.commit()
keys = [u'{}'.format(k).encode('ascii') for k in range(idx + 1)]
with db.begin(write=True) as txn:
txn.put(b'__keys__', dumps_pyarrow(keys))
txn.put(b'__len__', dumps_pyarrow(len(keys)))
print("Flushing database ...")
db.sync()
db.close()
class RandomData(Dataset):
def __init__(self, length=45, timesteps=4, constant_over_time=False, transform=None):
self.length = length
self.timesteps = timesteps
self.transform = transform
if constant_over_time:
input_tensor = torch.randint(255,[length, 1, 32, 32], dtype=torch.float)
input_tensor = input_tensor.unsqueeze(1)
self.data = input_tensor.repeat(1, timesteps, 1, 1, 1)
else:
self.data = torch.randint(255,[length, timesteps, 1, 32, 32], dtype=torch.float)
self.labels = torch.randint(9, [length], dtype=torch.float)
def __len__(self):
return self.length
def __getitem__(self, idx):
if torch.is_tensor(idx):
idx = idx.tolist()
image = self.data[idx]
label = self.labels[idx]
if self.transform:
image = self.transform(image)
return image, label
class AffineTransform:
"""Rotate by one of the given angles."""
def __init__(self, x_shift, y_shift):
self.x_shift = x_shift
self.y_shift = y_shift
def __call__(self, x):
return transforms.functional.affine(x,0,[self.x_shift,self.y_shift], 1.0,0)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-f", "--folder", type=str)
parser.add_argument('-n', '--name', type=str, default="dataset")
parser.add_argument('-p', '--procs', type=int, default=20)
parser.add_argument( "-os", "--stereo", type=bool, default=False)
args = parser.parse_args()
if args.stereo:
stereofolder2lmdb(args.folder, num_workers=args.procs, name=args.name)
else:
folder2lmdb(args.folder, num_workers=args.procs, name=args.name)
# _____________________________________________________________________________
# -----------------
# top-level comment
# -----------------
# medium level comment
# -----
# low level comment
<file_sep>#!/usr/bin/python
#
# Project Titan
# _____________________________________________________________________________
#
# _.oo.
# July 2021 _.u[[/;:,. .odMMMMMM'
# .o888UU[[[/;:-. .o@P^ MMM^
# publisher.py oN88888UU[[[/;::-. dP^
# create paperready dNMMNN888UU[[[/;:--. .o@P^
# figures and plots ,MMMMMMN888UU[[/;::-. o@^
# NNMMMNN888UU[[[/~.o@P^
# <NAME> 888888888UU[[[/o@^-..
# oI8888UU[[[/o@P^:--..
# .@^ YUU[[[/o@^;::---..
# oMP ^/o@P^;:::---..
# .dMMM .o@^ ^;::---...
# dMMMMMMM@^` `^^^^
# YMMMUP^
# ^^
# _____________________________________________________________________________
#
#
# Copyright 2021 <NAME>
#
# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# _____________________________________________________________________________
# ----------------
# import libraries
# ----------------
# standard libraries
# -----
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch import optim
from torch.utils.data import Dataset, DataLoader, Subset
import torchvision.transforms as transforms
import torchvision
import os
import sys
from PIL import Image, ImageOps
import matplotlib.pyplot as plt
# statsmodels for ANOVA
import pandas as pd
import statsmodels.api as sm
from statsmodels.formula.api import ols
from statsmodels.stats.anova import AnovaRM
from scipy.stats import shapiro, levene, ks_2samp, ttest_ind
# custom functions
# -----
import utilities.visualizer as visualizer
from utilities.networks.buildingblocks.rcnn import RecConvNet, CAM
from utilities.dataset_handler import StereoImageFolderLMDB, StereoImageFolder, AffineTransform
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
# ----------------
# Helper Functions
# ----------------
def eta_squared(aov):
aov['eta_sq'] = 'NaN'
aov['eta_sq'] = aov[:-1]['sum_sq']/sum(aov['sum_sq'])
return aov
def omega_squared(aov):
mse = aov['sum_sq'][-1]/aov['df'][-1]
aov['omega_sq'] = 'NaN'
aov['omega_sq'] = (aov[:-1]['sum_sq']-(aov[:-1]['df']*mse))/(sum(aov['sum_sq'])+mse)
return aov
def show(img):
npimg = img.numpy()
#print(np.transpose(npimg, (1,2,0)).shape)
plt.matshow(np.transpose(npimg, (1,2,0))[:,:,0], interpolation='nearest', cmap='viridis')
def generate_hidden_representation(test_loader, network, timesteps, stereo):
loss = 0
accuracy = 0
feature_list = []
input_list = []
target_list = []
output_list = []
classification_list = []
with torch.no_grad():
for i, data in enumerate(test_loader):
input_tensor, target_tensor = data
if stereo:
input_tensor = torch.cat(input_tensor, dim=1)
input_tensor, target_tensor = input_tensor.to(device), target_tensor.to(device)
input_tensor = input_tensor.unsqueeze(1)
input_tensor = input_tensor.repeat(1, timesteps, 1, 1, 1)
outputs, features = network(input_tensor)
# get features after GAP
features = features.mean(dim=[-2,-1], keepdim=True) #global average pooling
# topv, topi = outputs[:,-1,:].topk(1)
# accuracy += (topi == target_tensor.unsqueeze(1)).sum(
# dim=0, dtype=torch.float64) / topi.shape[0]
feature_list.append(features)
input_list.append(input_tensor)
target_list.append(target_tensor)
output_list.append(outputs)
features = torch.cat(feature_list, dim=0)
inputs = torch.cat(input_list, dim=0)
targets = torch.cat(target_list, dim=0)
outputs = torch.cat(output_list, dim=0)
return features, inputs, targets, outputs
def generate_class_activation(test_loader, network, timesteps, stereo):
cam = CAM(network)
loss = 0
accuracy = 0
cam_list = []
input_list = []
target_list = []
output_list = []
topk_prob_list = []
topk_pred_list = []
# TODO: Solve the unroll-timestep handling as a function parameter
#timesteps = configuration_dict['time_depth'] + 1 + configuration_dict['time_depth_beyond']
with torch.no_grad():
for i, data in enumerate(test_loader):
input_tensor, target_tensor = data
if stereo:
input_tensor = torch.cat(input_tensor, dim=1)
input_tensor, target_tensor = input_tensor.to(device), target_tensor.to(device)
input_tensor = input_tensor.unsqueeze(1)
input_tensor = input_tensor.repeat(1, timesteps, 1, 1, 1)
outputs, (cams, topk_prob, topk_pred) = cam(input_tensor)
cam_list.append(cams)
input_list.append(input_tensor)
target_list.append(target_tensor)
output_list.append(outputs)
topk_prob_list.append(topk_prob)
topk_pred_list.append(topk_pred)
class_activations = torch.cat(cam_list, dim=0)
inputs = torch.cat(input_list, dim=0)
targets = torch.cat(target_list, dim=0)
outputs = torch.cat(output_list, dim=0)
topk_probabilities = torch.cat(topk_prob_list, dim=0)
topk_predictions = torch.cat(topk_pred_list, dim=0)
return class_activations, inputs, targets, outputs, topk_probabilities, topk_predictions
def compare_concentration_mass(rgb_loader, test_loader, network, timesteps, stereo):
cam = CAM(network)
with torch.no_grad():
target_percentages = []
target_pixel_percentages = []
occluder_percentages = []
occluder_pixel_percentages = []
overlap_percentages = []
overlap_pixel_percentages = []
background_percentages = []
background_pixel_percentages = []
for i, (data, rgb) in enumerate(zip(test_loader, rgb_loader)):
input_tensor, target_tensor = data
if stereo:
input_tensor = torch.cat(input_tensor, dim=1)
input_tensor, target_tensor = input_tensor.to(device), target_tensor.to(device)
input_tensor = input_tensor.unsqueeze(1)
input_tensor = input_tensor.repeat(1, timesteps, 1, 1, 1)
outputs, (cams, topk_prob, topk_pred) = cam(input_tensor)
b,t,n_classes,h,w = cams.shape
# normalize cams to 0,1
# -----
# offset by minimum of each upsampled activation map
min_val, min_args = torch.min(cams.view(b,t,n_classes,h*w), dim=-1, keepdim=True)
cams -= torch.unsqueeze(min_val, dim=-1)
# divide by the maximum of each activation map
max_val, max_args = torch.max(cams.view(b,t,n_classes,h*w), dim=-1, keepdim=True)
cams /= torch.unsqueeze(max_val, dim=-1)
# normalize by the sum of each upsampled activation map
#sum_val = torch.sum(cams, dim=[-2,-1], keepdim=True)
#cams /= sum_val
mass_perc_on_target = np.zeros([b, timesteps])
mass_perc_on_target_pixel = np.zeros([b, timesteps])
mass_perc_on_occluder = np.zeros([b, timesteps])
mass_perc_on_occluder_pixel = np.zeros([b, timesteps])
mass_perc_on_background = np.zeros([b, timesteps])
mass_perc_on_background_pixel = np.zeros([b, timesteps])
mass_perc_on_overlap = np.zeros([b, timesteps])
mass_perc_on_overlap_pixel = np.zeros([b, timesteps])
# R: target, G: occluder, B: overlap, None: background)
# R and B: targets
tar_pixels = rgb[0][0][:, 0, :, :]# + rgb[0][0][:, 2, :, :]
# G and B: occluders
occ_pixels = rgb[0][0][:, 1, :, :]# + rgb[0][0][:, 2, :, :]
#
ovl_pixels = rgb[0][0][:, 2, :, :]
# None
background_pixels = (-1)*(rgb[0][0][:, 0, :, :] + rgb[0][0][:, 1, :, :] + rgb[0][0][:, 2, :, :]) + 1
for ti in range(timesteps):
for ind in range(b):
# filter out incorrect classifications
#if topk_pred[ind,-1,0] == target_tensor[ind, 0]:
if True:
c = cams[ind, ti, topk_pred[ind,ti,0], :, :]
total_mass = c.sum()
mass_perc_on_target[ind, ti] = (c[tar_pixels[ind] > 0].sum() / total_mass)
mass_perc_on_target_pixel[ind, ti] = mass_perc_on_target[ind, ti] / (tar_pixels[ind] > 0).sum()
mass_perc_on_occluder[ind, ti] = (c[occ_pixels[ind] > 0].sum() / total_mass)
mass_perc_on_occluder_pixel[ind, ti] = mass_perc_on_occluder[ind, ti] / (occ_pixels[ind] > 0).sum()
mass_perc_on_background[ind, ti] = (c[background_pixels[ind] > 0].sum() / total_mass)
mass_perc_on_background_pixel[ind, ti] = mass_perc_on_background[ind, ti] / (background_pixels[ind] > 0).sum()
mass_perc_on_overlap[ind, ti] = (c[ovl_pixels[ind] > 0].sum() / total_mass)
mass_perc_on_overlap_pixel[ind, ti] = mass_perc_on_overlap[ind, ti] / (ovl_pixels[ind] > 0).sum()
else:
pass
target_percentages.append(mass_perc_on_target.copy())
target_pixel_percentages.append(mass_perc_on_target_pixel.copy())
occluder_percentages.append(mass_perc_on_occluder.copy())
occluder_pixel_percentages.append(mass_perc_on_occluder_pixel.copy())
background_percentages.append(mass_perc_on_background.copy())
background_pixel_percentages.append(mass_perc_on_background_pixel.copy())
overlap_percentages.append(mass_perc_on_overlap.copy())
overlap_pixel_percentages.append(mass_perc_on_overlap_pixel.copy())
target_percentages = np.concatenate(target_percentages, axis=0)
target_pixel_percentages = np.concatenate(target_pixel_percentages, axis=0)
occluder_percentages = np.concatenate(occluder_percentages, axis=0)
occluder_pixel_percentages = np.concatenate(occluder_pixel_percentages, axis=0)
background_percentages = np.concatenate(background_percentages, axis=0)
background_pixel_percentages = np.concatenate(background_pixel_percentages, axis=0)
overlap_percentages = np.concatenate(overlap_percentages, axis=0)
overlap_pixel_percentages = np.concatenate(overlap_pixel_percentages, axis=0)
return target_percentages, target_pixel_percentages, occluder_percentages, occluder_pixel_percentages, overlap_percentages, overlap_pixel_percentages, background_percentages, background_pixel_percentages
# ----------------
# Analysis Functions
# ----------------
def fig_cam(network, test_transform, configuration_dict, sample_size, random_seed):
np.random.seed(random_seed)
test_dataset = StereoImageFolder(
root_dir=configuration_dict['input_dir'] + '/{}'.format(configuration_dict['dataset']),
train=False,
stereo=configuration_dict['stereo'],
transform=test_transform,
nhot_targets=True
)
rep_sample = list(np.random.choice(range(len(test_dataset)), size=sample_size, replace=False))
test_subset = torch.utils.data.Subset(test_dataset, rep_sample)
test_loader = torch.utils.data.DataLoader(test_subset, batch_size=configuration_dict['batchsize'], shuffle=False, num_workers=4)
cams, img, tar, out, topk_prob, topk_pred = generate_class_activation(test_loader, network, configuration_dict['time_depth'] + 1, configuration_dict['stereo'])
# filter correct predictions - best topk at last timestep = target
correct_indices = (tar[:,0] == topk_pred[:, -1, 0])
# show means for correct predictions
# visualizer.plot_cam_means(
# cams[correct_indices],
# tar[correct_indices,0],
# topk_prob[correct_indices],
# topk_pred[correct_indices]
# )
#visualizer.plot_cam_samples(cams, img, tar, topk_prob, topk_pred, list_of_indices=[948,614,541], filename='{}/fig8a_cam_samples.pdf'.format(configuration_dict['visualization_dir']))
#visualizer.plot_cam_samples_alt(cams, img, tar, topk_prob, topk_pred, list_of_indices=[948,614,541], filename='{}/fig8a_cam_samples_alt.pdf'.format(configuration_dict['visualization_dir']))
# np.random.choice(np.arange(1000),10)
# visualizer.plot_cam_samples(cams, img, tar, topk_prob, topk_pred, list_of_indices=[972, 51, 205, 227, 879, 538, 112, 741, 309, 289])
# visualizer.plot_cam_samples_alt(cams, img, tar, topk_prob, topk_pred, list_of_indices=[972, 51, 205, 227, 879, 538, 112, 741])
#
# for i in range(10):
# visualizer.plot_cam_samples_alt(cams, img, tar, topk_prob, topk_pred, list_of_indices=list(np.random.choice(np.arange(1000),8)))
c3,t3,prob3,pred3 = [],[],[],[]
for ds in ['osmnist2rf_br_reduced','osmnist2rf_tl_reduced','osmnist2rf_c_reduced']:
test_dataset = StereoImageFolder(
root_dir=configuration_dict['input_dir'] + '/{}'.format(ds),
train=False,
stereo=configuration_dict['stereo'],
transform=test_transform,
nhot_targets=True
)
# delete subset generation from final evaluation
test_subset = torch.utils.data.Subset(test_dataset, rep_sample)
test_loader = torch.utils.data.DataLoader(test_subset,
#test_loader = torch.utils.data.DataLoader(test_dataset,
batch_size=configuration_dict['batchsize'], shuffle=False, num_workers=4)
cams, img, tar, out, topk_prob, topk_pred = generate_class_activation(test_loader, network, configuration_dict['time_depth'] + 1, configuration_dict['stereo'])
c3.append(cams)
t3.append(tar)
prob3.append(topk_prob)
pred3.append(topk_pred)
gini_coefficients = visualizer.plot_cam_means2(c3, t3, prob3, pred3, filename='{}/fig8b_cam_means.pdf'.format(configuration_dict['visualization_dir']))
# ANOVA of gini-coefficients
dataframe = pd.DataFrame({'id':np.arange(gini_coefficients.shape[0]), 't0':gini_coefficients[:,0], 't1':gini_coefficients[:,1], 't2':gini_coefficients[:,2], 't3':gini_coefficients[:,3]})
melted_df = pd.melt(dataframe, id_vars=['id'])
melted_df.rename(columns = {'variable':'timesteps'}, inplace=True)
melted_df['condition'] = np.repeat('background', melted_df.shape[0])
# # normality assumption
# for i in range(gini_coefficients.shape[1]):
# stat, p = shapiro(gini_coefficients[:,i])
# print('Wilk-Shapiro: t{}: statistics={}, p={}'.format(i, stat, p))
# # interpret
# alpha = 0.05
# if p > alpha:
# print('Sample looks Gaussian (fail to reject H0)')
# else:
# print('Sample does not look Gaussian (reject H0)')
#
# # homogeneity of variance assumption
# stat, p = levene(gini_coefficients[:,0], gini_coefficients[:,1], gini_coefficients[:,2],gini_coefficients[:,3], center='median')
# print('Levene: statistics={}, p={}'.format(stat, p))
# stat, p = levene(gini_coefficients[:,0], gini_coefficients[:,1], gini_coefficients[:,2],gini_coefficients[:,3], center='mean')
# print('Levene: statistics={}, p={}'.format(stat, p))
#
# if p > alpha:
# print('Groups have equal variance (fail to reject H0)')
# else:
# print('Groups do not have equal variance (reject H0)')
# # standard ANOVA
# print('[INFO] Standard one-way ANOVA')
# melted_df.boxplot('value', by='timesteps')
# plt.show()
#
# linear_model= ols('value ~ timesteps', data=melted_df).fit()
# #print(linear_model.summary())
# aov_table = sm.stats.anova_lm(linear_model, typ=2)
# eta_squared(aov_table)
# omega_squared(aov_table)
# print(aov_table.round(4))
#
# from statsmodels.stats.oneway import anova_oneway
#
# print('[INFO] Repeated Measures one-way ANOVA')
# aovrm = AnovaRM(melted_df, 'value', 'id', within=['timesteps'])
# res = aovrm.fit()
# print(res)
#
# (F(3,2997)=890.7, p=0.000)
# posthoc
# tukey HSD or multiple t-tests with bonferroni-correction
pass
def fig_softmax_and_tsne(network, test_transform, configuration_dict, sample_size, random_seed):
np.random.seed(random_seed)
test_dataset = StereoImageFolder(
root_dir=configuration_dict['input_dir'] + '/{}'.format(configuration_dict['dataset']),
train=False,
stereo=configuration_dict['stereo'],
transform=test_transform,
nhot_targets=True
)
rep_sample = list(np.random.choice(range(len(test_dataset)), size=sample_size, replace=False))
test_subset = torch.utils.data.Subset(test_dataset, rep_sample)
test_loader = torch.utils.data.DataLoader(test_subset, batch_size=configuration_dict['batchsize'], shuffle=False, num_workers=4)
feat, img, tar, out = generate_hidden_representation(test_loader, network, configuration_dict['time_depth'] + 1, configuration_dict['stereo'])
# prepare tsne analysis
# -----
test_dataset_unoccluded = StereoImageFolder(
root_dir=configuration_dict['input_dir'] + '/{}'.format('osmnist2_0occ'),
train=False,
stereo=configuration_dict['stereo'],
transform=test_transform
)
test_subset_unoccluded = torch.utils.data.Subset(test_dataset_unoccluded, rep_sample)
test_loader_unoccluded = torch.utils.data.DataLoader(test_subset_unoccluded, batch_size=configuration_dict['batchsize'], shuffle=False, num_workers=4)
featu, imgu, taru, _ = generate_hidden_representation(test_loader_unoccluded, network, configuration_dict['time_depth'] + 1, configuration_dict['stereo'])
# hand the data to the visualization functions
# -----
highlights=[[1629,226],[516,909]] #[[1672,812,1629,226],[516,909]]
visualizer.plot_tsne_evolution2(
torch.cat([feat,featu], dim=0),
torch.cat([img,imgu], dim=0),
torch.cat([tar[:,0],taru], dim=0),
show_indices=False, N=highlights,
overwrite=False, filename='{}/fig6_tsne.pdf'.format(configuration_dict['visualization_dir']))
visualizer.plot_softmax_output(out, tar[:,0], img, filename='{}/fig5_softmax'.format(configuration_dict['visualization_dir']))
visualizer.plot_relative_distances(feat, tar, featu, taru, filename='{}/fig7_distance'.format(configuration_dict['visualization_dir']))
def first_layer_network_filters(network, test_transform, configuration_dict, sample_size, random_seed):
np.random.seed(random_seed)
test_dataset = StereoImageFolder(
root_dir=configuration_dict['input_dir'] + '/{}'.format(configuration_dict['dataset']),
train=False,
stereo=configuration_dict['stereo'],
transform=test_transform,
nhot_targets=True
)
rep_sample = list(np.random.choice(range(len(test_dataset)), size=sample_size, replace=False))
test_subset = torch.utils.data.Subset(test_dataset, rep_sample)
test_loader = torch.utils.data.DataLoader(test_subset, batch_size=configuration_dict['batchsize'], shuffle=False, num_workers=4)
fl_weights = network.rcnn.cell_list[0].bottomup.weight.detach()
# im_left = Image.open('/Users/markus/Desktop/JOVstuff/grating.png')
# im_left = ImageOps.grayscale(im_left)
# im_left = torch.from_numpy(np.array(im_left, dtype=np.float32))
#
# im_right = Image.open('/Users/markus/Desktop/JOVstuff/grating_shift1.png')
# im_right = ImageOps.grayscale(im_right)
# im_right = torch.from_numpy(np.array(im_right, dtype=np.float32))
#
# im_tensor = torch.stack([im_left, im_right], dim=0)
# im_tensor = torch.unsqueeze(im_tensor, 0)
# you need a batch of useful data to make sure the bn-values are useful
# im_tensor = im_tensor.repeat(1, configuration_dict['time_depth'] + 1, 1, 1, 1)
#combine it with the first X samples of the testloader
testbatch, _ = next(iter(test_loader))
if configuration_dict['stereo']:
testbatch = torch.cat(testbatch, dim=1)
testbatch = testbatch.unsqueeze(1)
testbatch = testbatch.repeat(1, configuration_dict['time_depth'] + 1, 1, 1, 1)
# testbatch[0] = im_tensor
activations = network.rcnn.return_activations(testbatch)
imgno = 0
for i in range(0,1):
for ti in range(configuration_dict['time_depth'] + 1):
show(torchvision.utils.make_grid(activations[ti][0][i].reshape(32,1,32,32), padding=2, normalize=False, scale_each=False))
#plt.savefig('/Users/markus/Desktop/activations/{:03d}.png'.format(imgno))
imgno += 1
#plt.close()
plt.show()
# for ti in range(configuration_dict['time_depth'] + 1):
# show(torchvision.utils.make_grid(activations[ti][1][i].reshape(32,1,16,16), padding=2, normalize=False, scale_each=False))
# plt.show()
if configuration_dict['stereo']:
show(torchvision.utils.make_grid(fl_weights.reshape(32,1,6,3), padding=2, normalize=True, scale_each=True))
else:
show(torchvision.utils.make_grid(fl_weights.reshape(32,1,3,3), padding=2, normalize=True, scale_each=True))
plt.show()
def fig_concentration(network, test_transform, configuration_dict, sample_size, random_seed):
# set random seed
np.random.seed(random_seed)
# get concentration of sensititivy (COS) dataset
cos_dataset = StereoImageFolder(
root_dir=configuration_dict['input_dir'] + '/concentration_of_sensitivity/osmnist2r/',
train=False,
stereo=configuration_dict['stereo'],
transform=test_transform,
nhot_targets=True
)
rep_sample = list(np.random.choice(range(len(cos_dataset)), size=sample_size, replace=False))
cos_subset = torch.utils.data.Subset(cos_dataset, rep_sample)
cos_loader = torch.utils.data.DataLoader(cos_subset,
batch_size=configuration_dict['batchsize'], shuffle=False, num_workers=4)
# get rgb information dataset
# rgb dataset needs a transform without grayscale
rgb_dataset = StereoImageFolder(
root_dir=configuration_dict['input_dir'] + '/concentration_of_sensitivity/osmnist2r_rgb/',
train=False,
stereo=configuration_dict['stereo'],
transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.,), (1.,))]),
nhot_targets=True
)
rgb_subset = torch.utils.data.Subset(rgb_dataset, rep_sample)
rgb_loader = torch.utils.data.DataLoader(rgb_subset,
batch_size=configuration_dict['batchsize'], shuffle=False, num_workers=4)
tp, tpp, op, opp, ovlp, ovlpp, bp, bpp = compare_concentration_mass(rgb_loader, cos_loader, network, configuration_dict['time_depth'] + 1, configuration_dict['stereo'])
visualizer.plot_concentration_mass(tp, op, ovlp, bp, filename='{}/fig8c_percentage.pdf'.format(configuration_dict['visualization_dir']))
visualizer.plot_concentration_mass(tpp, opp, ovlpp, bpp, filename='{}/fig8c_pixelpercentage.pdf'.format(configuration_dict['visualization_dir']))
pass
# _____________________________________________________________________________
# -----------------
# top-level comment
# -----------------
# medium level comment
# -----
# low level comment
<file_sep># CAR_torch
## Occluded Object Recognition Codebase
<p align="center">
<img src="https://github.com/mrernst/CAR_torch/blob/master/img/OSCAR_fmnist.png" width="375">
CAR_torch stands for Convolutional Architectures with Recurrence, pytorch implementation. It is the codebase used for the journal publication "Recurrent Processing Improves Occluded Object Recognition and Gives Rise to Perceptual Hysteresis" [1].
If you make use of this code please cite as follows:
[1] **<NAME>., <NAME>., & <NAME>. (2021). Recurrent Processing Improves Occluded Object Recognition and Gives Rise to Perceptual Hysteresis. In Journal of Vision**
## Getting started with the repository
* Download the OSCAR version 2 datasets from Zenodo and put the in their respective folders in /datasets [](https://doi.org/10.5281/zenodo.4085133)
* Configure the config.py file
* Start an experiment on a slurm cluster using run_engine.py or on your local machine with engine.py
### Prerequisites
* [numpy](http://www.numpy.org/)
* [scipy](https://www.scipy.org/)
* [scikitlearn](http://scikit-learn.org/)
* [matplotlib](https://matplotlib.org/)
* [pytorch](https://www.pytorch.org/)
### Directory structure
```bash
.
├── datasets
│ ├── cifar10 # CIFAR10
│ ├── cifar100 # CIFAR100
│ ├── mnist # MNIST
│ ├── dynaMO # Dynamic Occluded MNIST (experimental)
│ ├── osfashionmnist2c # OS-fashion-MNIST
│ ├── osmnist2c # OS-MNIST
│ ├── osfashionmnist2r # OS-fashion-MNIST
│ ├── osmnist2r # OS-MNIST
│ ├── osycb2 # OS-YCB
├── network_engine
│ ├── utilities
│ │ ├── afterburner.py # Combines experiment files post-hoc
│ │ ├── dataset_handler.py # Pytorch dataloaders for different datasets
│ │ ├── helper.py # Helper functions
│ │ ├── metrics.py # Distance metrics for high-dim. analysis
│ │ ├── publisher.py # Create 'Paper-ready' plots
│ │ ├── visualizer.py # Visualization functions
│ │ ├── networks
│ │ │ ├── buildingblocks
│ │ │ │ ├── rcnn.py # Dynamic network for recurrent networks
│ │ │ │ ├── convlstm.py # Convolutional LSTM Networks (experimental)
│ ├── engine.py # Main Program
│ ├── config.py # Experiment Parameters
│ ├── run_engine.py # Setup and Run Experiments
├── experiments # Experiment saves
├── LICENSE # MIT License
├── README.md # ReadMe File
└── requirements.txt # conda/pip requirements
```
### Installation guide
#### Forking the repository
Fork a copy of this repository onto your own GitHub account and `clone` your fork of the repository into your computer, inside your favorite SORN folder, using:
`git clone "PATH_TO_FORKED_REPOSITORY"`
#### Setting up the environment
Install [Python 3.9](https://www.python.org/downloads/release/python-395/) and the [conda package manager](https://conda.io/miniconda.html) (use miniconda). Navigate to the project directory inside a terminal and create a virtual environment (replace <ENVIRONMENT_NAME>, for example, with `recurrentnetworks`) and install the [required packages](requirements.txt):
`conda create -n <ENVIRONMENT_NAME> --file requirements.txt python=3.7`
Activate the virtual environment:
`source activate <ENVIRONMENT_NAME>`
## License
This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details
<file_sep>#!/usr/bin/python
#
# Project Titan
# _____________________________________________________________________________
#
# _.oo.
# November 2020 _.u[[/;:,. .odMMMMMM'
# .o888UU[[[/;:-. .o@P^ MMM^
# afterburner.py oN88888UU[[[/;::-. dP^
# evaluation of experiments dNMMNN888UU[[[/;:--. .o@P^
# ,MMMMMMN888UU[[/;::-. o@^
# NNMMMNN888UU[[[/~.o@P^
# <NAME> 888888888UU[[[/o@^-..
# oI8888UU[[[/o@P^:--..
# .@^ YUU[[[/o@^;::---..
# oMP ^/o@P^;:::---..
# .dMMM .o@^ ^;::---...
# dMMMMMMM@^` `^^^^
# YMMMUP^
# ^^
# _____________________________________________________________________________
#
#
# Copyright 2021 <NAME>
#
# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# _____________________________________________________________________________
# ----------------
# import libraries
# ----------------
# standard libraries
# -----
import torch
import numpy as np
import pandas as pd
import sys
import os
import pickle
import matplotlib.pyplot as plt
import seaborn as sns
import matplotlib.style as mplstyle
from matplotlib.ticker import MaxNLocator
from matplotlib import collections
from matplotlib.patches import Rectangle
from scipy import stats
from collections import namedtuple
from cycler import cycler
from tensorboard.backend.event_processing import event_accumulator
from tensorboard.backend.event_processing import event_multiplexer
# custom functions
# -----
def read_single_summary(path_to_tfevent, chist=0, img=0, audio=0, scalars=0,
hist=0):
ea = event_accumulator.EventAccumulator(path_to_tfevent, size_guidance={
event_accumulator.COMPRESSED_HISTOGRAMS: chist,
event_accumulator.IMAGES: img,
event_accumulator.AUDIO: audio,
event_accumulator.SCALARS: scalars,
event_accumulator.HISTOGRAMS: hist,
})
ea.Reload()
ea.Tags()
return ea
def read_multiple_runs(path_to_project, chist=0, img=0, audio=0, scalars=0,
hist=0):
# use with event_multiplexer (multiplexes different events together
# useful for retraining I guess...)
em = event_multiplexer.EventMultiplexer(size_guidance={
event_accumulator.COMPRESSED_HISTOGRAMS: chist,
event_accumulator.IMAGES: img,
event_accumulator.AUDIO: audio,
event_accumulator.SCALARS: scalars,
event_accumulator.HISTOGRAMS: hist,
})
em.AddRunsFromDirectory(path_to_project)
# load data
em.Reload()
return em
def convert_em_to_df(multiplexer):
# this needs to be better and be able to cope with different scales
# sort into training and testing/network
df_dict = {}
if len(multiplexer.Runs()) == 1:
# figure out separate runs progressively
entries = {}
for run in multiplexer.Runs().keys():
for tag in multiplexer.Runs()[run]["scalars"]:
if tag.split('/')[0] not in entries.keys():
entries[tag.split('/')[0]] = []
entries[tag.split('/')[0]].append(tag)
for run in entries:
run_df = pd.DataFrame()
for tag in entries[run]:
tag_df = pd.DataFrame(multiplexer.Scalars(list(multiplexer.Runs().keys())[0], tag))
tag_df = tag_df.drop(tag_df.columns[[0]], axis=1)
run_df[tag] = tag_df.value
run_df["step"] = tag_df.step
df_dict[run] = run_df
else:
for run in multiplexer.Runs().keys():
# create fresh empty dataframe
run_df = pd.DataFrame()
for tag in multiplexer.Runs()[run]["scalars"]:
tag_df = pd.DataFrame(multiplexer.Scalars(run, tag))
tag_df = tag_df.drop(tag_df.columns[[0]], axis=1)
run_df[tag] = tag_df.value
run_df["step"] = tag_df.step
df_dict[run] = run_df
return df_dict
# ----------------
# Afterburner Classes
# ----------------
class DataEssence(object):
"""docstring for DataEssence."""
def __init__(self):
super(DataEssence, self).__init__()
def write_to_file(self, filename):
with open(filename, 'wb') as output: # Overwrites any existing file.
pickle.dump(self.essence, output, pickle.HIGHEST_PROTOCOL)
def read_from_file(self, filename):
with open(filename, 'rb') as input:
self.essence = pickle.load(input)
def distill(self, path, evaluation_data, embedding_data=None):
self.essence = self._read_tfevents(path)
# needless_keys = [l for l in list(self.essence.keys()) if '/' in l]
# for key in needless_keys:
# self.essence.pop(key, None)
self.essence['evaluation'] = evaluation_data
if embedding_data:
self.essence['embedding'] = embedding_data
def _read_tfevents(self, path):
em = read_multiple_runs(path)
df = convert_em_to_df(em)
return df
def plot_essentials(self, savefile):
# start figure
fig, axes = plt.subplots(3, 3, sharex='all', figsize=[7, 7])
# plot images onto figure
self._plot_traintest_lcurve(axes)
# self._plot_timebased_lcurve(axes) # TODO: not functional for TITAN
self._plot_parameter_lcurve(fig)
# save figure
fig.suptitle(savefile.rsplit('/')[-1])
fig.savefig(savefile)
pass
def _plot_traintest_lcurve(self, axes):
training_acc = \
self.essence['training']['training/accuracy'].tolist()
training_partacc = \
self.essence['training']['training/accuracy']\
.tolist()
training_loss = \
self.essence['training']['training/loss'].tolist()
training_steps = \
self.essence['training']['step'].tolist()
testing_acc = \
self.essence['testing']['testing/accuracy'].tolist()
testing_partacc = \
self.essence['testing']['testing/accuracy'].tolist()
testing_loss = \
self.essence['testing']['testing/loss'].tolist()
testing_steps = \
self.essence['testing']['step'].tolist()
axes[0, 0].plot(training_steps, training_acc)
axes[0, 0].plot(testing_steps, testing_acc)
axes[0, 0].set_title('accuracy')
axes[0, 2].plot(training_steps, training_partacc)
axes[0, 2].plot(testing_steps, testing_partacc)
axes[0, 2].set_title('partial accuracy')
axes[0, 1].plot(training_steps, training_loss)
axes[0, 1].plot(testing_steps, testing_loss)
axes[0, 1].set_title('loss')
pass
def _plot_timebased_lcurve(self, axes):
# TODO: not yet functional, but we also don't have tensorboard logs for other timesteps
sorted_list_of_keys = self.essence['testing'].keys().tolist()
sorted_list_of_keys.sort()
timedepth = int(sorted_list_of_keys[-1].rsplit('_')[-1])
sorted_list_of_keys = \
[key for key in sorted_list_of_keys if 'testtime_beyond' in key]
sorted_list_of_keys = \
[key.rsplit('_', 1)[0] for key in sorted_list_of_keys]
sorted_list_of_keys = list(set(sorted_list_of_keys))
sorted_list_of_keys.sort()
for i in range(len(sorted_list_of_keys)):
for j in range(timedepth + 1):
axes[1, i].plot(self.essence['testing']['step'].tolist(),
self.essence['testing']['{}_{}'.format(
sorted_list_of_keys[i], j)].tolist(),
label=sorted_list_of_keys[i].split('/')[-1])
axes[1, i].set_title(sorted_list_of_keys[i].split('/')[-1])
pass
def _plot_parameter_lcurve(self, fig):
sorted_list_of_keys = self.essence['network'].keys().tolist()
sorted_list_of_keys.sort()
sorted_list_of_keys = \
[key for key in sorted_list_of_keys if 'network' in key]
sorted_list_of_keys = \
[key.rsplit('/', 1)[0].split('/', 1)[1] for key in sorted_list_of_keys]
sorted_list_of_keys = list(set(sorted_list_of_keys))
sorted_list_of_keys = \
[key for key in sorted_list_of_keys if '_weights' in key]
sorted_list_of_keys.sort()
# for i in range(len(sorted_list_of_keys)):
for i, ax in enumerate(fig.axes[3:]):
if i < len(sorted_list_of_keys):
ax.plot(
self.essence['network']['step'].tolist(),
self.essence['network']['network/{}/mean'.format(
sorted_list_of_keys[i])].tolist(),
label=sorted_list_of_keys[i].split('/')[1],
color='tab:blue')
ax.plot(
self.essence['network']['step'].tolist(),
self.essence['network']['network/{}/median'.format(
sorted_list_of_keys[i])].tolist(),
label=sorted_list_of_keys[i].split('/')[1],
color='tab:blue',
linestyle='dashed')
ax.plot(
self.essence['network']['step'].tolist(),
self.essence['network']['network/{}/min'.format(
sorted_list_of_keys[i])].tolist(),
label=sorted_list_of_keys[i].split('/')[1],
color='tab:blue')
ax.plot(
self.essence['network']['step'].tolist(),
self.essence['network']['network/{}/max'.format(
sorted_list_of_keys[i])].tolist(),
label=sorted_list_of_keys[i].split('/')[1],
color='tab:blue')
ax.fill_between(
self.essence['network']['step'],
self.essence['network']['network/{}/mean'.format(
sorted_list_of_keys[i])] +
self.essence['network']['network/{}/std'.format(
sorted_list_of_keys[i])],
self.essence['network']['network/{}/mean'.format(
sorted_list_of_keys[i])] -
self.essence['network']['network/{}/std'.format(
sorted_list_of_keys[i])],
color='tab:blue',
alpha=0.3
)
ax.set_title(
sorted_list_of_keys[i])
pass
# TODO: write these visualizer functions / look at the ICANN code for inspiration
def visualize_softmax_output(self):
pass
def vizualize_tsne_analysis(self):
pass
class EssenceCollection(object):
"""docstring for EssenceCollection."""
def __init__(self, remove_files=False):
super(EssenceCollection, self).__init__()
path_to_file = os.path.realpath(__file__)
self.path_to_experiment = path_to_file.rsplit('/', 3)[0]
self.collection = self.collect_data_essences(
self.path_to_experiment, remove_files)
def collect_data_essences(self, path_to_experiment, remove_files):
# gather and read all files in files/essence/
collection = {}
essence = DataEssence()
path_to_data = path_to_experiment + "/data/"
for file in os.listdir(path_to_data):
if file.endswith(".pkl") and file.startswith("conf"):
config_name = file.split('.')[0].rsplit('i', 1)[0]
iteration_number = file.split('.')[0].rsplit('i', 1)[-1]
essence.read_from_file(os.path.join(path_to_data, file))
if config_name in collection.keys():
collection[config_name][iteration_number] = essence.essence
else:
collection[config_name] = {}
collection[config_name][iteration_number] = essence.essence
# delete file
if remove_files:
os.remove(os.path.join(path_to_data, file))
print("[INFO] File '{}' deleted.".format(file))
else:
pass
return collection
def write_to_file(self, filename):
with open(filename, 'wb') as output: # Overwrites any existing file.
pickle.dump(self.collection, output, pickle.HIGHEST_PROTOCOL)
def read_from_file(self, filename):
with open(filename, 'rb') as input:
self.collection = pickle.load(input)
def _plot_barplot_comparison(self, filename):
configurations_to_plot = "all"
# mplstyle.use('default')
rects = []
n_groups = len(self.collection.keys())
n_iterations = 100 # high number so that true value is smaller
for configuration in self.collection.keys():
n_iterations = min(n_iterations,
len(self.collection[configuration].keys()))
n_bars = 1
data = np.zeros([n_iterations, n_bars, n_groups])
means = np.zeros([n_bars, n_groups])
stderror = np.zeros([n_bars, n_groups])
mcnemar_data = {}
ev_it = 1
list_of_configurations = list(self.collection.keys())
list_of_configurations.sort()
for j, configuration in enumerate(list_of_configurations):
list_of_iterations = list(
self.collection['config{}'.format(j)].keys())
list_of_iterations.sort()
for it, iteration in enumerate(list_of_iterations):
data[it, 0, j] = 1 - \
self.collection[configuration][iteration]['testing']['testing/accuracy'].values[-1]
mcnemar_data[it, 0, j] = \
self.collection[configuration][iteration]['evaluation']['boolean_classification']
chancelevel = 1 - 1. / 10.
means = np.mean(data, axis=0)
stderror = np.std(data, axis=0) / np.sqrt(n_iterations)
# printout
# -----
print((means).round(3))
print((stderror).round(3))
fig_width = 8.0 / 6 * n_groups if n_groups > 4 else 6.0
fig_height = 6.0
fig, ax = plt.subplots(figsize=[fig_width, fig_height])
index = np.arange(n_groups)
bar_width = 0.8
opacity = 1
error_config = {'ecolor': '0.3'}
current_palette = sns.dark_palette(
sns.color_palette('colorblind')[0], 5, reverse=True)
current_palette = sns.dark_palette(
sns.color_palette('Set3')[0], 5, reverse=True)
current_palette = sns.dark_palette(
'LightGray', 5, reverse=True)
current_palette = sns.dark_palette(
'Black', 5, reverse=False)
for k in range(n_bars):
rects.append(ax.bar(index + k * bar_width, means[k], bar_width,
alpha=opacity,
yerr=stderror[k], error_kw=error_config,
label='{}'.format('configuration'),
linewidth=0, color=current_palette[k]))
# , edgecolor=current_palette[(k+1)],
# hatch=hatches[k]))
ax.set_xlabel('Network configuration')
ax.set_ylabel('Error rate')
ax.set_title('experiment: {}'.format(
self.path_to_experiment.rsplit('/')[-1]), fontsize=10)
ax.set_ylim([0, ax.get_ylim()[1] * 1]) # *1.25])
ax.set_xticks(index + bar_width * (n_bars / 2. - 0.5))
# ax.set_xticklabels([arch[:-1] for arch in ARCHARRAY])
ax.legend(frameon=True, facecolor='white',
edgecolor='white', framealpha=1.0)
ax.grid(axis='y', zorder=0, alpha=0.5)
ax.set_axisbelow(True)
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
ax.tick_params(
axis='x', # changes apply to the x-axis
which='both', # both major and minor ticks are affected
bottom=False, # ticks along the bottom edge are off
top=False, # ticks along the top edge are off
labelbottom=True) # labels along the bottom edge are off
ax.spines['left'].set_visible(False)
ax.axhline(y=chancelevel, xmin=0, xmax=6, color='black')
ax.text(0.2, chancelevel + 0.005, r'chancelevel')
# Shrink current axis by 20%
box = ax.get_position()
ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
# Put a legend to the right of the current axis
ax.legend(frameon=True, facecolor='white', edgecolor='white',
framealpha=1.0, bbox_to_anchor=(1, .9), loc='center left')
# mcnemar's table
for i in range(n_bars):
qstar = 0.05
pval = np.ones([n_groups * n_bars, n_groups * n_bars])
chi_table = np.ones([n_groups * n_bars, n_groups * n_bars])
significance_table = np.zeros(
[n_groups * n_bars, n_groups * n_bars])
vertexlist = []
for k in range(n_groups * n_bars):
for j in range(n_groups * n_bars):
if k != j and k > j:
mcnemar_table = 2 * \
mcnemar_data[(ev_it - 1), i, k] - \
mcnemar_data[(ev_it - 1), i, j]
# i.e. nhot encoding
if (len(mcnemar_data[(ev_it - 1), i, k].shape) > 1):
ak = np.sum(
np.array(
[e for e in mcnemar_table == 1.]), axis=1)
bk = np.sum(
np.array(
[e for e in mcnemar_table == 2.]), axis=1)
ck = np.sum(
np.array(
[e for e in mcnemar_table == -1.]), axis=1)
dk = np.sum(
np.array(
[e for e in mcnemar_table == 0]), axis=1)
chi_sq_denom = np.sum(
((bk - ck) / mcnemar_table.shape[-1]))**2
chi_sq_count = np.sum(
((bk - ck) / mcnemar_table.shape[-1])**2)
chi_sq = chi_sq_denom / chi_sq_count
else:
a = mcnemar_table[mcnemar_table == 1.].shape[0]
b = mcnemar_table[mcnemar_table == 2.].shape[0]
c = mcnemar_table[mcnemar_table == -1.].shape[0]
d = mcnemar_table[mcnemar_table == 0].shape[0]
try:
chi_sq = ((b - c)**2) / (b + c)
except(ZeroDivisionError):
chi_sq = 1.
# stats.chi2.pdf(chi_sq , 1) #
pval[k, j] = 1 - stats.chi2.cdf(chi_sq, 1)
chi_table[k, j] = chi_sq
print(np.round(pval, 4))
print(np.round(chi_table, 4))
sorted_pvals = np.sort(pval[pval < 1])
bjq = np.arange(1, len(sorted_pvals) + 1) / \
len(sorted_pvals) * qstar
for k in range(n_groups * n_bars):
for j in range(n_groups * n_bars):
if k != j and k > j:
if pval[k, j] in sorted_pvals[sorted_pvals - bjq <= 0]:
significance_table[k, j] = 1
if j < 3 and k > 2 and (means[i][j] < means[i][k]):
vertexlist.append(
[[j + 0.25, k - 0.5],
[j + 0.5, k - 0.25],
[j + 0.5, k - 0.5]])
else:
significance_table[k, j] = 0
ax = plt.axes([0.8, 0.3, .2, .2])
ax.matshow(significance_table, cmap='Greys')
# ax.set_xticklabels(, fontsize=8)#fontsize=65)
ax.tick_params(labelbottom='on', labeltop='off',
top='off', right='off')
# ax.set_yticklabels(, fontsize=8)#fontsize=65)
# ax.set_xlim([-0.5, n_groups * n_bars - 1 - 0.5])
# ax.set_ylim([n_groups * n_bars - 0.5, -0.5 + 1])
ax.set_xlim([-0.5, n_groups * n_bars - 0.5])
ax.set_ylim([n_groups * n_bars - 0.5, -0.5 ])
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
#ax.axhline(y=2.5, xmin=-0.5, xmax=2.5, color='white', linewidth=1)
#ax.axvline(x=2.5, ymin=-0.5, ymax=2.5, color='white', linewidth=1)
for vertices in vertexlist:
pc = collections.PolyCollection(
(vertices,), color='white', edgecolor="none")
ax.add_collection(pc)
ax.add_patch(Rectangle((-2.5, 6.9), 1, 1, fill='black',
color='black', alpha=1, clip_on=False))
ax.text(-2.5, 9.4, 'Significant difference \n(two-sided McNemar ' + \
'test, \nexpected FDR=0.05)',
fontsize=8, horizontalalignment='left',
verticalalignment='center')
# plt.show()
plt.savefig(filename)
fig, ax = plt.subplots(figsize=(12, 12))
ax.matshow(significance_table, cmap='Greys')
ax.set_xticklabels([arch for arch in list_of_configurations], fontsize=65)
ax.tick_params(labelbottom='on', labeltop='off',
top='off', right='off')
ax.set_yticklabels(
[arch for arch in list_of_configurations], fontsize=65)
# ax.set_xlim([-0.5, n_groups * n_bars - 1 - 0.5])
# ax.set_ylim([n_groups * n_bars - 0.5, -0.5 + 1])
ax.set_xlim([-0.5, n_groups * n_bars - 0.5])
ax.set_ylim([n_groups * n_bars - 0.5, -0.5 ])
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
#ax.axhline(y=2.5, xmin=-0.5, xmax=2.5, color='white')
#ax.axvline(x=2.5, ymin=-0.5, ymax=2.5, color='white')
for vertices in vertexlist:
pc = collections.PolyCollection(
(vertices,), color='white', edgecolor="none")
ax.add_collection(pc)
# ax.set_ylabel('Network Architecture')
plt.tight_layout()
plt.savefig(filename.rsplit('.', 1)[0] + '_sigtable.pdf')
if __name__ == "__main__":
# import argparse
# parser = argparse.ArgumentParser()
# parser.add_argument("-c", "--config", type=str)
#
# args = parser.parse_args()
print('[INFO] afterburner running, collecting data')
ess_coll = EssenceCollection(remove_files=False) # TODO: Change back to True once this works reliably
ess_coll._plot_barplot_comparison(
ess_coll.path_to_experiment +
'/visualization/{}_comparision.pdf'.format(
ess_coll.path_to_experiment.rsplit('/')[-1]))
ess_coll.write_to_file(
ess_coll.path_to_experiment +
'/data/{}.pkl'.format(
ess_coll.path_to_experiment.rsplit('/')[-1]))
# _____________________________________________________________________________
# -----------------
# top-level comment
# -----------------
# medium level comment
# -----
# low level comment
| e30c8c2cc4f74aef185e703743818714c2b0cd18 | [
"Markdown",
"Python",
"Text"
] | 12 | Python | mrernst/CAR_torch | b98d2d2bc53c5a712ffb7fddf2a2ad9a40566a79 | 0053c2a2fac263da1fa6ec38a4e837170cdb6702 |
refs/heads/master | <repo_name>sven199109/montreal<file_sep>/lib/generators/montreal/install/install_generator.rb
module Montreal
module Generators
class InstallGenerator < Rails::Generators::NamedBase
source_root File.expand_path("../templates", __FILE__)
def create_install_file
template "montreal.rb", "config/initializers/#{file_name}.rb"
copy_file "redis.yml", "config/#{file_name}.yml"
end
end
end
end
<file_sep>/README.md
# Montreal
Welcome to your new gem! In this directory, you'll find the files you need to be able to package up your Ruby library into a gem. Put your Ruby code in the file `lib/montreal`. To experiment with that code, run `bin/console` for an interactive prompt.
`montreal` is a gem to use redis cache with connection pool.
## Installation
Add this line to your application's Gemfile:
```ruby
gem 'montreal'
```
And then execute:
$ bundle
Or install it yourself as:
$ gem install montreal
## Usage
```ruby
rails g montreal:install CONFIG_NAME
```
This will generate two files named `config/CONFIG_NAME.yml` and `config/initializers/CONFIG_NAME.rb`.
You can see that there is a config for redis in your `config/CONFIG_NAME.yml` named `cache`, this will generate a method in the class `Montreal`, so you can use the code `Montreal.cache` to return a redis connection pool.
If you are trying to get a connection, try that `Montreal.cache.with { |cli| ... }`, the `cli` is a connection that you can use.
Also you can add more config in `config/CONFIG_NAME.yml`, only to ensure that the format is like `cache`, you can also configure nothing about host, port and so on, but you should finger out a connection pool name like `cache` to generate a method `Montreal.YOUR_CONNECTION_POOL`.
## Development
After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake spec` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment.
To install this gem onto your local machine, run `bundle exec rake install`. To release a new version, update the version number in `version.rb`, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and tags, and push the `.gem` file to [rubygems.org](https://rubygems.org).
## Contributing
Bug reports and pull requests are welcome on GitHub at https://github.com/sven199109/montreal. This project is intended to be a safe, welcoming space for collaboration, and contributors are expected to adhere to the [Contributor Covenant](http://contributor-covenant.org) code of conduct.
## License
The gem is available as open source under the terms of the [MIT License](http://opensource.org/licenses/MIT).
<file_sep>/Gemfile
source 'https://ruby.taobao.org'
gem 'connection_pool'
gem 'redis'
# Specify your gem's dependencies in montreal.gemspec
gemspec
<file_sep>/lib/generators/montreal/install/templates/montreal.rb
<% module_namespacing do -%>
Montreal.config_path = Rails.root.join("config", "<%= file_name %>.yml")
Montreal.load
<% end -%>
<file_sep>/lib/montreal.rb
require "montreal/version"
require "rails/all"
require 'connection_pool'
require 'redis'
require "montreal/engine"
module Montreal
# Your code goes here...
class << self
attr_writer :config_path
def config_path
@config_path ||= File.expand_path("../generators/montreal/install/templates/redis.yml", __FILE__)
end
def load
config = YAML.load(File.new(::Montreal.config_path))[Rails.env]
config.each do |k, v|
v = {} if v.blank?
pool_size = v["pool"] || 5
timeout = v["timeout"] || 5
define_singleton_method(k) do
if !instance_variable_get("@#{k}")
pool = ::ConnectionPool.new(size: pool_size, timeout: timeout) do
v["host"] ||= 'localhost'
v["port"] ||= 6379
Redis.new(v)
end
instance_variable_set("@#{k}", pool)
end
instance_variable_get("@#{k}")
end
end
end
end
end
<file_sep>/lib/montreal/engine.rb
module Montreal
class Railtie < ::Rails::Railtie
initializer 'montreal_load', before: 'montreal' do
#::Montreal.load
end
end
end
| 714d4ea1eab95f0988f45cf623bc23b7a169b062 | [
"Markdown",
"Ruby"
] | 6 | Ruby | sven199109/montreal | 8c1c552e42c0fbb3c019459e0d36a5d47682d7d5 | 4896ccbd220df220e0798302cd1889c27fbef2a3 |
refs/heads/master | <file_sep>Boroda
======
Boroda is a tiny library for SQL SELECT query generation. The library provides a DSL which is as close as possible to SQL. Just look at the code:
require 'boroda'
sql = Boroda.build do
from :posts, :users
select posts.*
where (posts.author_id == users.id) & (users.name == '<NAME>')
end
The result:
SELECT posts.*
FROM posts, users
WHERE (posts.author_id = users.id) AND (users.name = '<NAME>')
As you see we are writing SQL queries in a pure ruby. Let's try to make something a little bit more complex.
min_rating = 5
sql = Boroda.build do
from :posts => :p
left join :comments => :c
on c.post_id == p.id
select p.id, p.title, p.content, c.id.count => :comment_count
group by p.id
where (p.title.like '%programming%') | # select all posts containing 'programming' in the title
(p.rating > min_rating) # or having the rating greater than 5
order by p.created_at.desc
limit 10
offset 20
end
The result:
SELECT p.id, p.title, p.content, COUNT(c.id) AS comment_count
FROM posts AS p
LEFT JOIN comments AS c
ON c.post_id = p.id
WHERE (p.title LIKE '%programming%') OR (p.rating > 5)
GROUP BY p.id
ORDER BY p.created_at DESC
LIMIT 10
OFFSET 20
Now let's see in general how to write queries using Boroda. Due to some techical limitations it was nessesary to change an order of SQL statements. The `from` method must be called first. A table name should be a symbol. You can specify aliases of tables passing a hash to the method like it is done in the second code snippet. Next you should specify `join`. The order you can call DSL methods:
from tables
[[left|right] [outer|inner] join tables
on condition | using columns
[..]]
[select columns]
[ where condition
| group by columns
| having condition
| order by columns
| limit number
| offset number ]*
In other words, you can call all methods from the last group in any order. Boroda will take care of building a correct SQL query.
Next operators which are used in `condition` in `where` and `having` clauses have the same meaning which they have in SQL:
+, -, *, /, >, <, >=, <=.
Due to some limitations in Ruby on operator overloading several operators vary from their SQL originals:
a == b # => a = b
a <=> b # => a <> b
(a) & (b) # => (a) AND (b)
(a) | (b) # => (a) OR (b)
**Warning!** It is absolutely necessary to use brackets around operands in last two cases. Otherwise you can get an unepected results. It is connected with the fact that this to operators have a very high priority in Ruby.
I don't recommend to use Boroda in production as far as it could be vulnerable to SQL injections.
By the way, boroda (борода) is the Russian for 'beard'.
<file_sep>require 'boroda'
puts '- ' * 40
sql = Boroda.build do
from :posts
select :id, :name
where (id == 5) & (name == "q'we")
end
puts sql
puts '- ' * 40
sql = Boroda.build do
from :posts, :users
select posts.*
where (posts.author_id == users.id) & (users.name == '<NAME>')
end
puts sql
puts '- ' * 40
sql = Boroda.build do
from :posts => :p
left join :comments => :c
on c.post_id == p.id
select p.id, p.title, p.content, c.id.count => :comment_count
group by p.id
where (p.title.like '%programming%') | # Выбираем все посты, содержащие в заголовке 'programming'
(p.rating > 5) # Или с рейтингом больше 5
order by p.created_at.desc
limit 10
offset 20
end
puts sql
puts '- ' * 40
max_count = 7
sql = Boroda.build do
from :posts, :accounts => :a, :users => :u
left join :avatars, :secret_passwords => :pass
on posts.id == avatars.post_id
select posts.title, a.address.count => :adr, posts.name.count => :p_name,
(posts.rating.max + 5) => :max_posts_rating,
posts.price.avg => :avg_price,
func(:concat, posts.name, ' * ', posts.last_name) => :full_name
where (a.max_rate > max_count + 6) | (avg_price <=> 100.5) & (a.id <=> [1, 5, 9, 7, nil])
order by posts.name, a.price.desc
limit 5
end
puts sql
puts '- ' * 40
current_user_id = 5
PostsLimit = 10
page = 7
sql = Boroda.build do
from :posts => :p
left inner join :comments => :c
on p.id == c.post_id
select p.id, p.title, p.content, c.id.count => :comment_count
group by p.id
where (p.user_id == current_user_id) & (p.active == true) & (func(:year, p.created_at) <=> 2008) & p.title.like('ruby%')
having comment_count > 0
order by p.created_at.desc
limit PostsLimit
offset PostsLimit * (page - 1)
end
puts sql
| 8e6adef39b26e02ebad69148d58024302de2655e | [
"Markdown",
"Ruby"
] | 2 | Markdown | dmitryck/boroda | 0e07deb1192ceef893118ddedd5ecaf9c3085f7f | 0f9c2af5120de17c67d4deeb07d5ea14f131d91f |
refs/heads/master | <repo_name>AlexNava/sketchbook<file_sep>/SerialTest/SerialTest.ino
const int INBUFSIZE = 64;
const int OUTBUFSIZE = 64;
char inBuf[INBUFSIZE] = "";
char inBufPtr = 0;
bool dequeueSerial()
{
while (Serial.available() > 0)
{
if (inBufPtr < INBUFSIZE - 1)
{
Serial.readBytes(inBuf + inBufPtr, 1);
if (inBuf[inBufPtr] == '\r')
{
// ignore it
}
else if (inBuf[inBufPtr] == '\n')
{
inBuf[inBufPtr] = '\0';
inBufPtr = 0;
}
else
inBufPtr++;
}
else // inBufPtr == INBUFSIZE - 1, accept only newline
{
Serial.readBytes(inBuf + inBufPtr, 1);
if (inBuf[inBufPtr] == '\n')
{
inBuf[inBufPtr] = '\0';
inBufPtr = 0;
}
}
}
return (inBufPtr == 0);
}
void setup() {
pinMode(13, OUTPUT);
pinMode(12, INPUT);
// initialize serial communication
Serial.begin(115200);
}
void loop() {
// Keep looping here until the init string is received on the serial port
// meanwhile, keep sending a keepalive string
bool bInit = false;
while (!bInit)
{
Serial.write("YO");
delay(1000);
if (dequeueSerial()
&& (inBuf[0] != 0)
&& (strcmp(inBuf, "GO") == 0))
bInit = true;
}
// Wait for queries and setup
bool bSetup = false;
while (!bSetup)
{
int sensorValue = analogRead(A0);
// print out the value you read:
Serial.println(sensorValue);
delay(1);
}
}
| c9c7935a5800478a9243b54cfe57401553252310 | [
"C++"
] | 1 | C++ | AlexNava/sketchbook | c8f890e87325ee97b845b44c143b7920df015157 | af8e4a18e295467d8b1d704212e0dc19e8e34f78 |
refs/heads/master | <repo_name>M-Porter/budgeter-mobile<file_sep>/www/js/ember-templates.js
Ember.TEMPLATES["application"] = Ember.Handlebars.template(function anonymous(Handlebars,depth0,helpers,partials,data) {
this.compilerInfo = [4,'>= 1.0.0'];
helpers = this.merge(helpers, Ember.Handlebars.helpers); data = data || {};
var buffer = '', stack1, helper, options, escapeExpression=this.escapeExpression, helperMissing=helpers.helperMissing, self=this;
function program1(depth0,data) {
var buffer = '';
data.buffer.push("\n <div ");
data.buffer.push(escapeExpression(helpers.action.call(depth0, "moveSlider", "0", {hash:{},hashTypes:{},hashContexts:{},contexts:[depth0,depth0],types:["STRING","STRING"],data:data})));
data.buffer.push(">\n <span class=\"icon\"><i class=\"fa fa-usd\"></i></span>\n <span class=\"tab-label\">Budget</span>\n </div>\n ");
return buffer;
}
function program3(depth0,data) {
var buffer = '';
data.buffer.push("\n <div ");
data.buffer.push(escapeExpression(helpers.action.call(depth0, "moveSlider", "100", {hash:{},hashTypes:{},hashContexts:{},contexts:[depth0,depth0],types:["STRING","STRING"],data:data})));
data.buffer.push(">\n <span class=\"icon\"><i class=\"fa fa-money\"></i></span>\n <span class=\"tab-label\">Spent</span>\n </div>\n ");
return buffer;
}
function program5(depth0,data) {
var buffer = '';
data.buffer.push("\n <div ");
data.buffer.push(escapeExpression(helpers.action.call(depth0, "moveSlider", "200", {hash:{},hashTypes:{},hashContexts:{},contexts:[depth0,depth0],types:["STRING","STRING"],data:data})));
data.buffer.push(">\n <span class=\"icon\"><i class=\"fa fa-clock-o\"></i></span>\n <span class=\"tab-label\">History</span>\n </div>\n ");
return buffer;
}
function program7(depth0,data) {
var buffer = '';
data.buffer.push("\n <div ");
data.buffer.push(escapeExpression(helpers.action.call(depth0, "moveSlider", "300", {hash:{},hashTypes:{},hashContexts:{},contexts:[depth0,depth0],types:["STRING","STRING"],data:data})));
data.buffer.push(">\n <span class=\"icon\"><i class=\"fa fa-bar-chart-o\"></i></span>\n <span class=\"tab-label\">Visualize</span>\n </div>\n ");
return buffer;
}
function program9(depth0,data) {
var buffer = '';
data.buffer.push("\n <div ");
data.buffer.push(escapeExpression(helpers.action.call(depth0, "moveSlider", "400", {hash:{},hashTypes:{},hashContexts:{},contexts:[depth0,depth0],types:["STRING","STRING"],data:data})));
data.buffer.push(">\n <span class=\"icon\"><i class=\"fa fa-cog\"></i></span>\n <span class=\"tab-label\">Settings</span>\n </div>\n ");
return buffer;
}
data.buffer.push("<header class=\"bar bar-nav\" style=\"background: #dedede;\">\n <h1 class=\"title\" style=\"color: #555; font-size: 1.5em;\">Budgeter</h1>\n <div class=\"budgeter-bar\">\n <div class=\"f\" ");
data.buffer.push(escapeExpression((helper = helpers.bindAttr || (depth0 && depth0.bindAttr),options={hash:{
'style': ("aBarWidth")
},hashTypes:{'style': "STRING"},hashContexts:{'style': depth0},contexts:[],types:[],data:data},helper ? helper.call(depth0, options) : helperMissing.call(depth0, "bindAttr", options))));
data.buffer.push("></div>\n <div class=\"h\" ");
data.buffer.push(escapeExpression((helper = helpers.bindAttr || (depth0 && depth0.bindAttr),options={hash:{
'style': ("bBarWidth")
},hashTypes:{'style': "STRING"},hashContexts:{'style': depth0},contexts:[],types:[],data:data},helper ? helper.call(depth0, options) : helperMissing.call(depth0, "bindAttr", options))));
data.buffer.push("></div>\n <div class=\"p\" ");
data.buffer.push(escapeExpression((helper = helpers.bindAttr || (depth0 && depth0.bindAttr),options={hash:{
'style': ("cBarWidth")
},hashTypes:{'style': "STRING"},hashContexts:{'style': depth0},contexts:[],types:[],data:data},helper ? helper.call(depth0, options) : helperMissing.call(depth0, "bindAttr", options))));
data.buffer.push("></div>\n <div class=\"a\" ");
data.buffer.push(escapeExpression((helper = helpers.bindAttr || (depth0 && depth0.bindAttr),options={hash:{
'style': ("dBarWidth")
},hashTypes:{'style': "STRING"},hashContexts:{'style': depth0},contexts:[],types:[],data:data},helper ? helper.call(depth0, options) : helperMissing.call(depth0, "bindAttr", options))));
data.buffer.push("></div>\n <div class=\"o\" ");
data.buffer.push(escapeExpression((helper = helpers.bindAttr || (depth0 && depth0.bindAttr),options={hash:{
'style': ("eBarWidth")
},hashTypes:{'style': "STRING"},hashContexts:{'style': depth0},contexts:[],types:[],data:data},helper ? helper.call(depth0, options) : helperMissing.call(depth0, "bindAttr", options))));
data.buffer.push("></div>\n </div>\n</header>\n\n<div class=\"content\" style=\"padding-bottom: 48px; padding-top: 48px;\">\n ");
stack1 = helpers._triageMustache.call(depth0, "outlet", {hash:{},hashTypes:{},hashContexts:{},contexts:[depth0],types:["ID"],data:data});
if(stack1 || stack1 === 0) { data.buffer.push(stack1); }
data.buffer.push("\n</div>\n\n<div id=\"slider-bar\" class=\"bottom-floater\"></div>\n<nav class=\"bar bar-tab\" style=\"background: #dedede; border-top: 1px solid rgba(0,0,0,0.20);\">\n ");
stack1 = (helper = helpers['link-to'] || (depth0 && depth0['link-to']),options={hash:{
'class': ("tab-item")
},hashTypes:{'class': "STRING"},hashContexts:{'class': depth0},inverse:self.noop,fn:self.program(1, program1, data),contexts:[depth0],types:["STRING"],data:data},helper ? helper.call(depth0, "index", options) : helperMissing.call(depth0, "link-to", "index", options));
if(stack1 || stack1 === 0) { data.buffer.push(stack1); }
data.buffer.push("\n ");
stack1 = (helper = helpers['link-to'] || (depth0 && depth0['link-to']),options={hash:{
'class': ("tab-item")
},hashTypes:{'class': "STRING"},hashContexts:{'class': depth0},inverse:self.noop,fn:self.program(3, program3, data),contexts:[depth0],types:["STRING"],data:data},helper ? helper.call(depth0, "spent", options) : helperMissing.call(depth0, "link-to", "spent", options));
if(stack1 || stack1 === 0) { data.buffer.push(stack1); }
data.buffer.push("\n ");
stack1 = (helper = helpers['link-to'] || (depth0 && depth0['link-to']),options={hash:{
'class': ("tab-item")
},hashTypes:{'class': "STRING"},hashContexts:{'class': depth0},inverse:self.noop,fn:self.program(5, program5, data),contexts:[depth0],types:["STRING"],data:data},helper ? helper.call(depth0, "history", options) : helperMissing.call(depth0, "link-to", "history", options));
if(stack1 || stack1 === 0) { data.buffer.push(stack1); }
data.buffer.push("\n ");
stack1 = (helper = helpers['link-to'] || (depth0 && depth0['link-to']),options={hash:{
'class': ("tab-item")
},hashTypes:{'class': "STRING"},hashContexts:{'class': depth0},inverse:self.noop,fn:self.program(7, program7, data),contexts:[depth0],types:["STRING"],data:data},helper ? helper.call(depth0, "visuals", options) : helperMissing.call(depth0, "link-to", "visuals", options));
if(stack1 || stack1 === 0) { data.buffer.push(stack1); }
data.buffer.push("\n ");
stack1 = (helper = helpers['link-to'] || (depth0 && depth0['link-to']),options={hash:{
'class': ("tab-item")
},hashTypes:{'class': "STRING"},hashContexts:{'class': depth0},inverse:self.noop,fn:self.program(9, program9, data),contexts:[depth0],types:["STRING"],data:data},helper ? helper.call(depth0, "settings", options) : helperMissing.call(depth0, "link-to", "settings", options));
if(stack1 || stack1 === 0) { data.buffer.push(stack1); }
data.buffer.push("\n</nav>");
return buffer;
});
Ember.TEMPLATES["history"] = Ember.Handlebars.template(function anonymous(Handlebars,depth0,helpers,partials,data) {
this.compilerInfo = [4,'>= 1.0.0'];
helpers = this.merge(helpers, Ember.Handlebars.helpers); data = data || {};
data.buffer.push("<h3>History</h3>");
});
Ember.TEMPLATES["index"] = Ember.Handlebars.template(function anonymous(Handlebars,depth0,helpers,partials,data) {
this.compilerInfo = [4,'>= 1.0.0'];
helpers = this.merge(helpers, Ember.Handlebars.helpers); data = data || {};
data.buffer.push("<h3>Index</h3>");
});
Ember.TEMPLATES["settings"] = Ember.Handlebars.template(function anonymous(Handlebars,depth0,helpers,partials,data) {
this.compilerInfo = [4,'>= 1.0.0'];
helpers = this.merge(helpers, Ember.Handlebars.helpers); data = data || {};
data.buffer.push("<h3>Settingd</h3>");
});
Ember.TEMPLATES["spent"] = Ember.Handlebars.template(function anonymous(Handlebars,depth0,helpers,partials,data) {
this.compilerInfo = [4,'>= 1.0.0'];
helpers = this.merge(helpers, Ember.Handlebars.helpers); data = data || {};
data.buffer.push("<h3>Spent</h3>");
});
Ember.TEMPLATES["visuals"] = Ember.Handlebars.template(function anonymous(Handlebars,depth0,helpers,partials,data) {
this.compilerInfo = [4,'>= 1.0.0'];
helpers = this.merge(helpers, Ember.Handlebars.helpers); data = data || {};
data.buffer.push("<h3>Visuals</h3>");
});<file_sep>/www/js/ember-app.js
App = Ember.Application.create();
App.Router.map( function() {
this.route('index', { path: '/'});
this.route('spent', { path: '/spent'});
this.route('history', { path: '/history'});
this.route('visuals', { path: '/visuals'});
this.route('settings', { path: '/settings'});
});
/********************
Ember Routes
*********************/
App.IndexRoute = Ember.Route.extend({
model: function() {
return ['red', 'yellow', 'blue'];
}
});
/********************
Ember Controllers
*********************/
App.ApplicationController = Ember.Controller.extend({
aBarWidth: 'width: ' + 40 + '%',
bBarWidth: 'width: ' + 15 + '%',
cBarWidth: 'width: ' + 15 + '%',
dBarWidth: 'width: ' + 15 + '%',
eBarWidth: 'width: ' + 15 + '%',
actions: {
moveSlider: function(pos) {
var activeWidth = 40;
var inactiveWidth = 15;
slider(pos);
switch(pos) {
case '0':
this.set('aBarWidth', 'width: ' + activeWidth + '%');
this.set('bBarWidth', 'width: ' + inactiveWidth + '%');
this.set('cBarWidth', 'width: ' + inactiveWidth + '%');
this.set('dBarWidth', 'width: ' + inactiveWidth + '%');
this.set('eBarWidth', 'width: ' + inactiveWidth + '%');
break;
case '100':
this.set('aBarWidth', 'width: ' + inactiveWidth + '%');
this.set('bBarWidth', 'width: ' + activeWidth + '%');
this.set('cBarWidth', 'width: ' + inactiveWidth + '%');
this.set('dBarWidth', 'width: ' + inactiveWidth + '%');
this.set('eBarWidth', 'width: ' + inactiveWidth + '%');
break;
case '200':
this.set('aBarWidth', 'width: ' + inactiveWidth + '%');
this.set('bBarWidth', 'width: ' + inactiveWidth + '%');
this.set('cBarWidth', 'width: ' + activeWidth + '%');
this.set('dBarWidth', 'width: ' + inactiveWidth + '%');
this.set('eBarWidth', 'width: ' + inactiveWidth + '%');
break;
case '300':
this.set('aBarWidth', 'width: ' + inactiveWidth + '%');
this.set('bBarWidth', 'width: ' + inactiveWidth + '%');
this.set('cBarWidth', 'width: ' + inactiveWidth + '%');
this.set('dBarWidth', 'width: ' + activeWidth + '%');
this.set('eBarWidth', 'width: ' + inactiveWidth + '%');
break;
case '400':
this.set('aBarWidth', 'width: ' + inactiveWidth + '%');
this.set('bBarWidth', 'width: ' + inactiveWidth + '%');
this.set('cBarWidth', 'width: ' + inactiveWidth + '%');
this.set('dBarWidth', 'width: ' + inactiveWidth + '%');
this.set('eBarWidth', 'width: ' + activeWidth + '%');
break;
}
}
}
});
/********************
Ember Views
*********************/
App.IndexView = Ember.View.extend({
templateName: 'index'
});
App.SpentView = Ember.View.extend({
templateName: 'spent'
});
App.HistoryView = Ember.View.extend({
templateName: 'history'
});
App.VisualsView = Ember.View.extend({
templateName: 'visuals'
});
App.SettingsView = Ember.View.extend({
templateName: 'settings'
});<file_sep>/Gruntfile.js
module.exports = function(grunt) {
grunt.initConfig({
emberTemplates: {
compile: {
options: {
templateFileExtensions: /\.hbs/,
amd: false,
concatenate: true,
precompile: true,
templateBasePath: /www\/js\/templates\//,
},
files: {
"www/js/ember-templates.js": ["www/js/templates/*.hbs"]
}
}
},
'http-server': {
'dev': {
root: 'www/',
port: 1337,
host: "0.0.0.0",
defaultExt: "html",
runInBackground: true
}
},
watch: {
emberTemplates: {
files: 'www/js/templates/*.hbs',
tasks: ['emberTemplates']
}
}
});
grunt.loadNpmTasks('grunt-ember-templates');
grunt.loadNpmTasks('grunt-http-server');
grunt.loadNpmTasks('grunt-contrib-watch');
grunt.registerTask('default', ['emberTemplates', 'http-server','watch']);
};<file_sep>/README.md
**Application deprecated and no longer being improved**
This appliation is no longer being worked on. Application has gone native rather than using Phonegap.
#Budgeter - Mobile Application
##T.O.C.
* [Progress](#progress)
* [TODO](#todo)
* [IN PROGRESS](#in-progress)
* [DONE](#done)
* [Design](#design)
* [Colors](#colors)
* [Fonts](#fonts)
* [Grid & Metrics](#grid--metrics)
* [Grunt](#grunt)
* [Simple HTTP Server](#http-server)
* [Ember Template Precompiling](#ember-template-precompile)
* [Watch](#watch)
##Progress
####TODO
* User Login (local db vs server)
* Budget Page
* Spent Page
* History Page
* Graphs & Charts Page (Visuals)
* Settings Page
####IN PROGRESS
* Application Layout & Ember Setup
####DONE
* Nothing :(
##Design
<img src="http://i.imgur.com/ao53LrI.png" width="200">
####Colors
<table>
<tr>
<td colspan="3"><strong>Text</strong></td>
</tr>
<tr>
<td>body text</td>
<td>#333333</td>
<td><img src="http://placehold.it/100x35/333333/333333"></td>
</tr>
<tr>
<td>body secondary text</td>
<td>#dedede</td>
<td><img src="http://placehold.it/100x35/dedede/dedede"></td>
</tr>
<tr>
<td colspan="3"><strong>Backgrounds</strong></td>
</tr>
<tr>
<td>body background</td>
<td>#f0f0f0</td>
<td><img src="http://placehold.it/100x35/f0f0f0/f0f0f0"></td>
</tr>
<tr>
<td>header/footer background</td>
<td>#dedede</td>
<td><img src="http://placehold.it/100x35/dedede/dedede"></td>
</tr>
<tr>
<td>section bar - first</td>
<td>#428bca</td>
<td><img src="http://placehold.it/100x35/428bca/428bca"></td>
</tr>
<tr>
<td>section bar - second</td>
<td>#5cb85c</td>
<td><img src="http://placehold.it/100x35/5cb85c/5cb85c"></td>
</tr>
<tr>
<td>section bar - third</td>
<td>#5bc0de</td>
<td><img src="http://placehold.it/100x35/5bc0de/5bc0de"></td>
</tr>
<tr>
<td>section bar - fourth</td>
<td>#f0ad4e</td>
<td><img src="http://placehold.it/100x35/f0ad4e/f0ad4e"></td>
</tr>
<tr>
<td>section bar - fifth</td>
<td>#d9534f</td>
<td><img src="http://placehold.it/100x35/d9534f/d9534f"></td>
</tr>
</table>
####Fonts
####Grid & Metrics
##Grunt
####HTTP Server
A basic server is run from [0.0.0.0:1337](http://0.0.0.0:1337) to do basic testing
of the mobile app within a web browser.
####Ember Template Precompile
Ember templates compiled from **src/** to **dist/**.
####Watch
Ember template file changes are watched for. If changes occured,
templates recompiled and force web live reload.
##License
Copyright 2014 <NAME>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
| 2a8fabcd5472553f1f24433bca241740c78d6085 | [
"JavaScript",
"Markdown"
] | 4 | JavaScript | M-Porter/budgeter-mobile | e40189a9af96798f237f9c8b3e69359203f75b64 | 4cb529d47480381961c30576e1c1290a119833ff |
refs/heads/master | <repo_name>SchoolJCUHBL/SAPs<file_sep>/SAPs/main.hpp
//C++ Standard Libraries
#include <cmath>
#include <fstream>
#include <iostream>
#include <mutex>
#include <sstream>
#include <string>
#include <thread>
#include <vector>
//external libraries
#include "concurrentqueue.hpp" //lockfree queue
#include <gmpxx.h> //MPIR of GNU-MP BigInt libraries
//Object to store all parameters for the queue
class Parameters
{
public:
std::vector< std::vector< bool > > grid;
int startrow;
int length;
int row;
int col;
int remaining;
};
void printVector(std::vector<std::vector<bool> > &grid); //print the vector to the screen for debugging purposes
void initForbidden(std::vector<std::vector<bool>> &grid, const unsigned int length); //set primary off-limits fields
void ProduceStep(std::vector<std::vector<bool>> &grid, mpz_class &counter, const int &startrow, const int &length, int row, int col, int remaining, int &depth); //Produce queue jobs
void TakeStep(std::vector<std::vector<bool>> &grid, mpz_class &counter, const int &startrow, const int &length, int row, int col, int remaining); //General walking function
void IncreaseCCC(mpz_class &Incr); //Thread-safe increase of the total counter
mpz_class ReadCCC(); //Thread-safe read from the total counter, not absolutely necessary
int checkInput(); //check if the given input meets the requirements of >4 and an even number
void WorkerFunc(); //Retrieves jobs from queue en proceeds calculating them with TakeStep
void HostWorkerFunc(unsigned int n, bool quiet, int d);
int main(int argc,char *argv[]); //Main function. Variables are initialized and all parts of the code are managed.
<file_sep>/SAPs/main.cpp
#include "main.hpp"
std::mutex tex;
mpz_class CCCounter;
moodycamel::ConcurrentQueue<Parameters> q;
moodycamel::ProducerToken ptok(q);
void printVector(std::vector<std::vector<bool> > &grid)
{
for ( std::vector<std::vector<bool>>::size_type i = 0; i < grid.size(); ++i )
{
for ( std::vector<bool>::size_type j = 0; j < grid.at(i).size(); ++j )
{
//std::cout << grid[i][j] << ' ';
std::cout << grid.at(i).at(j);
}
std::cout << std::endl;
}
}
void initForbidden(std::vector<std::vector<bool>> &grid, const unsigned int length)
{
int starty=(length-6)/2+1;
for ( std::vector< std::vector<bool> >::size_type i = 0; i < grid.size(); ++i )
{
grid.at(i).at(0)=1;
grid.at(i).at(grid.at(i).size()-1)=1;
if (i<=(length-6)/2 && length>4) //if length == 4, no other fields should be off limits, else the grid would become too small.
{
grid.at(i).at(1)=1;
}
}
for ( std::vector< std::vector<bool> >::size_type i = 0; i < grid.at(0).size(); ++i )
{
grid.at(0).at(i)=1;
grid.at(grid.size()-1).at(i)=1;
}
if (length>4)
{
for ( int i = 1; i < (int)grid.size()-1; ++i )
{
for (int j = 1; j < (int)grid.at(i).size()-1; ++j)
{
if (i<starty)
{
if (abs(i-starty)+abs(j-1)>(int)length/2-1)
{
grid.at(i).at(j)=1;
}
}
else
{
if (abs(i-starty)+abs(j-1)>(int)length/2)
{
grid.at(i).at(j)=1;
}
}
}
}
}
}
void ProduceStep(std::vector<std::vector<bool>> &grid, mpz_class &counter, const int &startrow, const int &length, int row, int col, int remaining, int &depth)
{
if (!grid.at(row).at(col))
{
if (row==startrow+1 && col==1 && remaining==1)
{
++counter;
}
else
{
if (abs(row-startrow-1)+abs(col-1) <= remaining)
{
if (row==startrow+1 && col==1 && remaining>1);
else
{
grid.at(row).at(col)=true;
if (remaining>=length-depth)
{
ProduceStep(grid,counter,startrow,length,row,col+1,remaining-1,depth);
ProduceStep(grid,counter,startrow,length,row+1,col,remaining-1,depth);
ProduceStep(grid,counter,startrow,length,row,col-1,remaining-1,depth);
ProduceStep(grid,counter,startrow,length,row-1,col,remaining-1,depth);
}
else
{
Parameters taskparm;
taskparm.grid = grid;
taskparm.startrow = startrow;
taskparm.length = length;
taskparm.remaining = remaining-1;
if (!grid.at(row).at(col+1))
{
taskparm.row = row;
taskparm.col = col+1;
q.enqueue(ptok, taskparm);
}
if (!grid.at(row+1).at(col))
{
taskparm.row = row+1;
taskparm.col = col;
q.enqueue(ptok, taskparm);
}
if (!grid.at(row).at(col-1))
{
taskparm.row = row;
taskparm.col = col-1;
q.enqueue(ptok, taskparm);
}
if (!grid.at(row-1).at(col))
{
taskparm.row = row-1;
taskparm.col = col;
q.enqueue(ptok, taskparm);
}
}
grid.at(row).at(col)=false;
}
}
}
}
}
void TakeStep(std::vector<std::vector<bool>> &grid, mpz_class &counter, const int &startrow, const int &length, int row, int col, int remaining)
{
if (!grid.at(row).at(col))
{
if (row==startrow+1 && col==1 && remaining==1)
{
++counter;
}
else
{
if (abs(row-startrow-1)+abs(col-1) <= remaining)
{
if (row==startrow+1 && col==1 && remaining>1);
else
{
grid.at(row).at(col)=true;
TakeStep(grid,counter,startrow,length,row,col+1,remaining-1);
TakeStep(grid,counter,startrow,length,row+1,col,remaining-1);
TakeStep(grid,counter,startrow,length,row,col-1,remaining-1);
TakeStep(grid,counter,startrow,length,row-1,col,remaining-1);
grid.at(row).at(col)=false;
}
}
}
}
}
void IncreaseCCC(mpz_class &Incr)
{
std::lock_guard<std::mutex> lock(tex);
CCCounter=CCCounter+Incr;
}
mpz_class ReadCCC()
{
std::lock_guard<std::mutex> lock(tex);
return CCCounter;
}
int checkInput()
{
int n = 0;
//prompt user for length
std::cout << "Enter the length in steps of the SAP (integer, even, larger or equal to 4) : ";
std::cin >> n;
//while the length doesn't meet the requirements, keep asking.
while ((n % 2) != 0 || n < 4) {
std::cout << "Error, Wrong number! Please enter an even integer, larger or equal to 4 : ";
std::cin >> n;
}
return n;
}
void WorkerFunc()
{
mpz_class counter;
Parameters recvparm;
while (q.try_dequeue_from_producer(ptok, recvparm))
{
TakeStep(recvparm.grid,counter,recvparm.startrow,recvparm.length,recvparm.row,recvparm.col,recvparm.remaining);
}
IncreaseCCC(counter);
}
void HostWorkerFunc(unsigned int n, bool quiet, int d)
{
mpz_class counter;
Parameters recvparm;
int i=0;
if (n==0)
{
n=1;
}
while (q.try_dequeue_from_producer(ptok, recvparm))
{
TakeStep(recvparm.grid,counter,recvparm.startrow,recvparm.length,recvparm.row,recvparm.col,recvparm.remaining);
if (++i%d==0 && !quiet)
{
std::cout << q.size_approx() << "/" << n << " " << (1.0-((float)q.size_approx()/(float)n))*100.0 << "%" << std::endl;
}
}
IncreaseCCC(counter);
}
int main(int argc,char *argv[])
{
int SAPlength = 0;
bool quiet = false;
bool superquiet = false;
int depth=7;
int ReportRate = 30;
std::ofstream outputFile;
outputFile.open("SAPsLog.csv", std::ios::out | std::ios::app);
//check if length was an argument or if it is still necessary to ask it in runtime
if (outputFile.is_open())
{
if (argc <= 1)
{
SAPlength = checkInput();
std::cout << "SAP length is: " << SAPlength << std::endl << std::endl;
}
else
{
std::istringstream ss(argv[1]);
int x;
if (!(ss >> x))
{
SAPlength = checkInput();
std::cout << "SAP length is: " << SAPlength << std::endl << std::endl;
}
else
{
SAPlength = x;
}
if (argc > 2)
{
std::string arg2(argv[2]);
if (arg2=="-q")
{
quiet=true;
}
if (arg2=="-qq")
{
superquiet=true;
}
}
}
if (!quiet && !superquiet)
{
std::cout << "Initializing startup variables" << std::endl;
}
//calculate the dimensions of the grid the program is going to walk over.
int width=SAPlength/2+2;
int height;
int startrow;
if (SAPlength==4) //exception for length 4 because else the grid will be too small
{
height=4;
startrow=1;
}
else
{
height=SAPlength/2+(2+(SAPlength-6)/2);
startrow=(SAPlength-6)/2+1;
}
if (SAPlength>=28)
{
ReportRate = 15;
}
if (SAPlength>=34)
{
ReportRate = 5;
}
std::vector<std::thread> threads;
mpz_class counter;
//initialize grid and set primary off-limits fields
if (!quiet && !superquiet)
{
std::cout <<width << "x" << height << " Grid initializing...";
}
std::vector< std::vector< bool > > grid ( height, std::vector<bool> ( width, 0 ) );
initForbidden(grid,SAPlength);
if (!quiet && !superquiet)
{
std::cout << " Complete"<< std::endl;
printVector(grid);
std::cout << "Initialize queue...";
std::cout << " Complete"<< std::endl;
std::cout << "Produce and Queue subjobs...";
}
//Set first field to forbidden and force first step to right
grid.at(startrow).at(1)=true;
ProduceStep(grid,counter,startrow,SAPlength,startrow,2,SAPlength-1,depth);
IncreaseCCC(counter);
if (!quiet && !superquiet)
{
std::cout << " Complete"<< std::endl;
std::cout << "Approximately " << q.size_approx() << " items in queue" <<std::endl;
std::cout << "Starting Consumer Threads..."<<std::endl;
}
unsigned int queueSize = q.size_approx();
unsigned concurentThreadsSupported = std::thread::hardware_concurrency();
unsigned concurentThreadsUsed = 0;
if (queueSize > 2512)
{
switch(concurentThreadsSupported)
{
case 0:
if (!quiet && !superquiet)
{
std::cout << "Request for amount of threads failed, spawning 0 additional threads for safety" <<std::endl;
}
break;
case 1:
if (!quiet && !superquiet)
{
std::cout << "This Machine reports: 1 available thread, 0 additional threads will be spawned" <<std::endl;
}
break;
default:
concurentThreadsUsed = concurentThreadsSupported-1;
if (!quiet && !superquiet)
{
std::cout << "This Machine reports: "<< concurentThreadsSupported << " available threads." <<std::endl;
std::cout << concurentThreadsUsed << " additional Threads will be spawned (main thread will also occupy one thread)." <<std::endl;
}
}
for (unsigned int i = 0; i<concurentThreadsUsed; ++i)
{
if (!quiet && !superquiet)
{
std::cout << "Thread " << i+1 << "/" << concurentThreadsUsed << "spawned" <<std::endl;
}
threads.emplace_back(std::thread(WorkerFunc));
}
if (!quiet && !superquiet)
{
std::cout << " Complete"<<std::endl;
std::cout << "Waiting for Threads to Finish..." << std::endl;
}
}
HostWorkerFunc(queueSize, superquiet, ReportRate);
for(auto& thread : threads)
{
thread.join();
}
if (!quiet && !superquiet)
{
std::cout << "Finished!" << std::endl << "Total Correct Paths: ";
}
std::cout << ReadCCC() << std::endl;
outputFile << SAPlength << ";" << ReadCCC() << std::endl;
//end program successfully
if (!quiet && !superquiet)
{
std::cout << "Program finished"<< std::endl;
}
}
outputFile.close();
return 0;
}
<file_sep>/README.md
SAPs
====
Self Avoiding Polygons searcher
A program written by <NAME>, <NAME> & <NAME>
This program may be copied or altered for educational or research purposes only.
A reference and a place in the credits list would be nice. | 831a7e6ae269935dda9a46f19f43dac4c1320e5b | [
"Markdown",
"C++"
] | 3 | C++ | SchoolJCUHBL/SAPs | 917bb63ffac16306e67d8018f6032f4364539644 | 2cf87615e2366e9f61c5364c0f75c28c44ce9b23 |
refs/heads/master | <file_sep>#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <sys/time.h>
#include <pthread.h>
#include <omp.h>
#include <CL/cl.h>
#include "../heads/world.h"
#include "../heads/clerrors.h"
/*
Kreiraj nov svet(visina, sirina):
-svet je na zacetku polje mrtvih celic
*/
world* createWorld(int size_x, int size_y) {
world *w = (world*) malloc(sizeof(world));
w->area = (int**) malloc(sizeof(int*) * size_x);
w->height = size_x;
w->width = size_y;
int i;
for(i = 0; i < size_x; i++) {
w->area[i] = (int*) calloc(sizeof(int), size_y);
}
return w;
}
/*
Metoda presteje zive sosede celice(i, j) v svetu w po Moorovem pravilu 3x3
*/
int numOfNeighbors_Moore_3x3(int i, int j, world *w) {
int n, m, num_of_neighbors = 0;
for(n = -1; n < 2; n++) {
for(m = -1; m < 2; m++) {
if(n == 0 && m == 0) continue;
if((i + n) >= 0 && (i + n) < w->height && (j + m) >= 0 && (j + m) < w->width) {
if(w->area[i+n][j+m] > 0) {
num_of_neighbors++;
}
}
}
}
return num_of_neighbors;
}
void cell_destiny_3x3(int i, int j, world *w, int *area) {
int non = numOfNeighbors_Moore_3x3(i, j, w);
if(w->area[i][j] > 0) {
if(non == 2 || non == 3) {
area[j] = 1;
}
else {
area[j] = 0;
}
}
else {
if(non == 3) {
area[j] = 1;
}
}
}
/*
Metoda presteje zive sosede celice(i, j) v svetu w po Moorovem pravilu 5x5
*/
int numOfNeighbors_Moore_5x5(int i, int j, world *w) {
int n, m, num_of_neighbors = 0;
for(n = -2; n < 3; n++) {
for(m = -2; m < 3; m++) {
if(n == 0 && m == 0) continue;
if((i + n) >= 0 && (i + n) < w->height && (j + m) >= 0 && (j + m) < w->width) {
if(w->area[i+n][j+m] > 0) {
num_of_neighbors++;
}
}
}
}
return num_of_neighbors;
}
void cell_destiny_5x5(int i, int j, world *w, int *area) {
int non = numOfNeighbors_Moore_5x5(i, j, w);
if(w->area[i][j] > 0) {
if(non == 3) {
area[j] = 1;
}
else {
area[j] = 0;
}
}
else {
if(non == 4) {
area[j] = 1;
}
}
}
/*
Dodaj novo polje v svet in sprosti resurse starega polja
*/
void addNewArea(world *w, int **area) {
int i;
for(i = 0; i < w->height; i++) {
free(w->area[i]);
}
free(w->area);
w->area = area;
}
/*
Vrne novo polje velikosti height x width
*/
int** createNewArea(int height, int width) {
int **area = (int**) malloc(sizeof(int*) * height), i;
for(i = 0; i < height; i++) {
area[i] = (int*) calloc(sizeof(int), width);
}
return area;
}
/*
Simulira eno generacijo sveta na eni niti
*/
void simulateOneCicle(world *w) {
int i, j, **area = (int**) malloc(sizeof(int*) * w->height);
for(i = 0; i < w->height; i++) {
area[i] = (int*) calloc(sizeof(int), w->width);
for(j = 0; j < w->width; j++) {
cell_destiny_5x5(i, j, w, area[i]);
}
}
addNewArea(w, area);
}
/*
Funkcija primerja svetova in vrne 1 ce sta enaka in 0 ce sta razlicna
*/
int compareWorldAreas(world *w1, world *w2) {
if(w1->width == w2->width && w1->height == w2->height) {
int i, j;
for(i = 0; i < w1->height; i++) {
for(j = 0; j < w1->width; j++) {
if(w1->area[i][j] != w2->area[i][j]) {
return 0;
}
}
}
return 1;
}
return 0;
}
/*
Sprosti resurse za svet
*/
void destroyWorld(world *w) {
int i;
for(i = 0; i < w->height; i++) {
free(w->area[i]);
}
free(w->area);
free(w);
}
void printWorld(world *w) {
int i, j;
for(i = 0; i < w->height; i++) {
for(j = 0; j < w->width; j++) {
printf("%3u ", w->area[i][j]);
}
printf("\n");
}
printf("World size[height:%d, width:%d]\n", w->height, w->width);
}
/*
Simulira v neskoncno z izpisom v konzolo na eni niti
*/
void simulate(world *w) {
while(1) {
simulateOneCicle(w);
printWorld(w);
}
}
/*
Simulira max generacij na eni niti in vrne double v milisekundah porabljenega casa
*/
double simulateMax(world *w, int max) {
struct timeval t1, t2;
int i;
gettimeofday(&t1, NULL);
for(i = 0; i < max; i++) {
simulateOneCicle(w);
}
gettimeofday(&t2, NULL);
double time_elapsed = (t2.tv_sec - t1.tv_sec) * 1000;
time_elapsed += (double)(t2.tv_usec - t1.tv_usec) / 1000;
return time_elapsed;
}
double simulateOMPMax(world *w, int max) {
struct timeval t1, t2;
int i;
gettimeofday(&t1, NULL);
for(i = 0; i < max; i++) {
int j, k, **area = (int**) malloc(sizeof(int*) * w->height);
#pragma omp parallel for shared(area) private(k)
for(j = 0; j < w->height; j++) {
area[j] = (int*) calloc(sizeof(int), w->width);
for(k = 0; k < w->width; k++) {
cell_destiny_5x5(j, k, w, area[j]);
}
}
addNewArea(w, area);
}
gettimeofday(&t2, NULL);
double time_elapsed = (t2.tv_sec - t1.tv_sec) * 1000;
time_elapsed += (double)(t2.tv_usec - t1.tv_usec) / 1000;
return time_elapsed;
}
/*
Funkcija ki jo izvaja nit
*/
void* doSomething(void *arg) {
param *p = (param*) arg;
int i, j, non;
for(i = p->min; i < p->max; i++) {
for(j = 0; j < p->w->width; j++) {
cell_destiny_5x5(i, j, p->w, p->area[i]);
}
}
return NULL;
}
/*
Simulira eno generacijo sveta na threadCount nitih in za vsako
generacijo naredi niti in nakoncu jih 'pobije'
*/
void simulateMultyOneCicle(world *w, int threadCount) {
pthread_t t[threadCount];
param p[threadCount];
int **newArea = createNewArea(w->height, w->width), i, min = 0, max, delta = w->height/threadCount + 1;
for(i = 0; i < threadCount; i++) {
max = min + delta;
if(min >= w->height) {
min = w->height;
max = w->height;
}
else if(max > w->height) {
max = w->height;
}
p[i].min = min;
p[i].max = max;
p[i].area = newArea;
p[i].w = w;
pthread_create(&t[i], NULL, doSomething, (void*) &p[i]);
min += delta;
}
for(i = 0; i < threadCount; i++) {
pthread_join(t[i], NULL);
}
addNewArea(w, newArea);
}
/*
Simulira max generacij na threadCount nitih in vrne long long v milisekundah porabljenega casa
*/
double simulateMaxMulty(world *w, int threadCount, int max) {
struct timeval t1, t2;
int i;
gettimeofday(&t1, NULL);
for(i = 0; i < max; i++) {
simulateMultyOneCicle(w, threadCount);
}
gettimeofday(&t2, NULL);
double time_elapsed = (t2.tv_sec - t1.tv_sec) * 1000;
time_elapsed += (double)(t2.tv_usec - t1.tv_usec) / 1000;
return time_elapsed;
}
/*
Simulira v neskoncno z izpisom v konzolo na threadCount nitih
-exit = ctrl + c
*/
void simulateMulty(world *w, int threadCount) {
while(1) {
simulateMultyOneCicle(w, threadCount);
sleep(2.0);
printWorld(w);
}
}
/*
Funkcija ki jo izvaja nit
*/
//polje v katerega pisejo vse niti, vsaka v svoje obmocje: [min, max]
static int **tmpArea;
static pthread_barrier_t barrierMove;
void* doSomething2(void *arg) {
param *p = (param*) arg;
int i, j, k, non;
for(k = 0; k < p->num_iter; k++) {
for(i = p->min; i < p->max; i++) {
for(j = 0; j < p->w->width; j++) {
cell_destiny_5x5(i, j, p->w, tmpArea[i]);
}
}
pthread_barrier_wait(p->barrier);
int m;
for(m = p->min; m < p->max; m++) {
free(p->w->area[m]);
p->w->area[m] = tmpArea[m];
tmpArea[m] = calloc(sizeof(int), p->w->width);
}
pthread_barrier_wait(&barrierMove);
}
return NULL;
}
/*
Simulira max generacij na threadCount nitih in vrne double v milisekundah porabljenega casa
*/
double simulateMaxMulty2(world *w, int threadCount, int max) {
struct timeval t1, t2;
pthread_t t[threadCount];
param p[threadCount];
pthread_barrier_t barrier;
pthread_barrier_init(&barrier, NULL, threadCount);
pthread_barrier_init(&barrierMove, NULL, threadCount);
tmpArea = createNewArea(w->height, w->width);
int i, min_t = 0, max_t, delta = w->height/threadCount + 1;
gettimeofday(&t1, NULL);
//printf("st.niti:%d, visina sveta:%d\n", threadCount, w->height);
for(i = 0; i < threadCount; i++) {
max_t = min_t + delta;
if(min_t >= w->height) {
min_t = w->height;
max_t = w->height;
}
else if(max_t > w->height) {
max_t = w->height;
}
p[i].min = min_t;
p[i].max = max_t;
p[i].area = tmpArea;
p[i].w = w;
p[i].barrier = &barrier;
p[i].num_iter = max;
//printf("%d, %d\n", min_t, max_t);
pthread_create(&t[i], NULL, doSomething2, (void*) &p[i]);
min_t += delta;
}
//printf("-----------------------\n");
for(i = 0; i < threadCount; i++) {
pthread_join(t[i], NULL);
}
gettimeofday(&t2, NULL);
double time_elapsed = (t2.tv_sec - t1.tv_sec) * 1000;
time_elapsed += (double)(t2.tv_usec - t1.tv_usec) / 1000;
pthread_barrier_destroy(&barrier);
pthread_barrier_destroy(&barrierMove);
return time_elapsed;
}
char* read_kernel(char* path) {
int MAX_SOURCE_SIZE = 1 << 14;
FILE *fp;
char *source_str;
size_t source_size;
fp = fopen(path, "r");
source_str = (char*)malloc(MAX_SOURCE_SIZE);
source_size = fread(source_str, 1, MAX_SOURCE_SIZE, fp);
source_str[source_size] = '\0';
fclose(fp);
return source_str;
}
float* mat2vec(world *w) {
float *vec = (float*) malloc(sizeof(float)*w->height*w->width);
int m = 0, i, j;
for(i = 0; i < w->height; i++) {
for(j = 0; j < w->width; j++) {
vec[m] = (float) w->area[i][j];
m++;
}
}
return vec;
}
int** vec2mat(float *vec, int height, int width) {
int **mat = (int**) malloc(sizeof(int*)*height);
int m = 0, i, j;
for(i = 0; i < height; i++) {
mat[i] = (int*) malloc(sizeof(int)*width);
for(j = 0; j < width; j++) {
mat[i][j] = (int) vec[m];
m++;
}
}
return mat;
}
double simulatemax_ocl(world *w, int max) {
cl_int ret;
// Podatki o platformi
cl_platform_id platform_id[10];
cl_uint ret_num_platforms;
char *buf;
size_t buf_len;
ret = clGetPlatformIDs(10, platform_id, &ret_num_platforms);
// max. "stevilo platform, kazalec na platforme, dejansko "stevilo platform
// Podatki o napravi
cl_device_id device_id[10];
cl_uint ret_num_devices;
// Delali bomo s platform_id[0] na GPU
ret = clGetDeviceIDs(platform_id[0], CL_DEVICE_TYPE_GPU, 10,
device_id, &ret_num_devices);
// izbrana platforma, tip naprave, koliko naprav nas zanima
// kazalec na naprave, dejansko "stevilo naprav
// Kontekst
cl_context context = clCreateContext(NULL, 1, &device_id[0], NULL, NULL, &ret);
// kontekst: vklju"cene platforme - NULL je privzeta, "stevilo naprav,
// kazalci na naprave, kazalec na call-back funkcijo v primeru napake
// dodatni parametri funkcije, "stevilka napake
// Ukazna vrsta
cl_command_queue command_queue = clCreateCommandQueue(context, device_id[0], 0, &ret);
// kontekst, naprava, INORDER/OUTOFORDER, napake
// Alokacija pomnilnika na napravi
cl_mem world_mem_obj = clCreateBuffer(context, CL_MEM_READ_ONLY | CL_MEM_COPY_HOST_PTR,
w->height*w->width*sizeof(float), mat2vec(w), &ret);
// kontekst, na"cin, koliko, lokacija na hostu, napaka
cl_mem new_world_mem_obj = clCreateBuffer(context, CL_MEM_WRITE_ONLY,
w->height*w->width*sizeof(float), NULL, &ret);
char *kernel_source = read_kernel("src/kernel_local.cl");
// Priprava programa
cl_program program = clCreateProgramWithSource(context, 1, (const char **)&kernel_source,
NULL, &ret);
// kontekst, "stevilo kazalcev na kodo, kazalci na kodo,
// stringi so NULL terminated, napaka
// Prevajanje
ret = clBuildProgram(program, 1, &device_id[0], NULL, NULL, NULL);
// program, "stevilo naprav, lista naprav, opcije pri prevajanju,
// kazalec na funkcijo, uporabni"ski argumenti
// Log
size_t build_log_len;
char *build_log;
ret = clGetProgramBuildInfo(program, device_id[0], CL_PROGRAM_BUILD_LOG,
0, NULL, &build_log_len);
// program, "naprava, tip izpisa,
// maksimalna dol"zina niza, kazalec na niz, dejanska dol"zina niza
build_log =(char*) malloc(sizeof(char)*(build_log_len+1));
ret = clGetProgramBuildInfo(program, device_id[0], CL_PROGRAM_BUILD_LOG,
build_log_len, build_log, NULL);
// printf("%s\n", build_log);
free(build_log);
// "s"cepec: priprava objekta
cl_kernel kernel = clCreateKernel(program, "simulateOne", &ret);
// program, ime "s"cepca, napaka
struct timeval t1, t2;
int i;
float *new_world;
gettimeofday(&t1, NULL);
for(i = 0; i < max; i++) {
// "s"cepec: argumenti
clSetKernelArg(kernel, 0, sizeof(cl_mem), (void *)&world_mem_obj);
clSetKernelArg(kernel, 1, sizeof(cl_mem), (void *)&new_world_mem_obj);
clSetKernelArg(kernel, 2, sizeof(cl_int), (void *)&w->height);
clSetKernelArg(kernel, 3, sizeof(cl_int), (void *)&w->width);
// "s"cepec, "stevilka argumenta, velikost podatkov, kazalec na podatke
// Delitev dela
size_t local_item_size[2] = {WORKGROUP_SIZE, WORKGROUP_SIZE};
size_t global_item_size[2] = {GLOBALITEM_SIZE, GLOBALITEM_SIZE};
// "s"cepec: zagon
ret = clEnqueueNDRangeKernel(command_queue, kernel, 2, NULL,
global_item_size, local_item_size, 0, NULL, NULL);
// vrsta, "s"cepec, dimenzionalnost, mora biti NULL,
// kazalec na "stevilo vseh niti, kazalec na lokalno "stevilo niti,
// dogodki, ki se morajo zgoditi pred klicem
// printf("kernel error: %s\n", getErrorString(ret));
new_world = malloc(sizeof(float)*w->height*w->width);
// Kopiranje rezultatov
ret = clEnqueueReadBuffer(command_queue, new_world_mem_obj, CL_TRUE, 0,
w->height*w->width*sizeof(int), new_world, 0, NULL, NULL);
// branje v pomnilnik iz naparave, 0 = offset
// zadnji trije - dogodki, ki se morajo zgoditi prej
ret = clFlush(command_queue);
ret = clFinish(command_queue);
if(i < max - 1) {
cl_mem tmp_mem = world_mem_obj;
world_mem_obj = new_world_mem_obj;
new_world_mem_obj = tmp_mem;
}
}
gettimeofday(&t2, NULL);
double time_elapsed = (t2.tv_sec - t1.tv_sec) * 1000;
time_elapsed += (double)(t2.tv_usec - t1.tv_usec) / 1000;
addNewArea(w, vec2mat(new_world, w->height, w->width));
ret = clReleaseKernel(kernel);
ret = clReleaseProgram(program);
ret = clReleaseMemObject(world_mem_obj);
ret = clReleaseMemObject(new_world_mem_obj);
ret = clReleaseCommandQueue(command_queue);
ret = clReleaseContext(context);
return time_elapsed;
}<file_sep>#ifndef _WORLD_GRAPHICS_H_
#define _WORLD_GRAPHICS_H_
#define WINDOW_WIDTH 1000
#define WINDOW_HEIGHT 1000
#define THREADS_COUNT 8
int initSDL(world*);
void destroySDL();
void render(world*);
void SDL_Simulate_Game_of_Life();
#endif
<file_sep>#include <stdio.h>
#include <SDL2/SDL.h>
#include "../heads/world.h"
#include "../heads/world_graphics.h"
#include "../heads/world_test.h"
SDL_Window *main_window = NULL;
SDL_Renderer *main_renderer = NULL;
SDL_Surface *loading_surface = NULL;
SDL_Texture *background_texture = NULL, *cellular_cell = NULL, *worldground_texture = NULL;
SDL_Rect *src_rect, *dest_rect;
SDL_Event *event;
void SDL_Simulate_Game_of_Life() {
world *w = createWorld(WORLD_SIZE_HEIGHT, WORLD_SIZE_WIDTH);
initInfWorld(w);
initSDL(w);
int run = 1;
while(run) {
while(!SDL_PollEvent(event)) {
render(w);
SDL_Delay(60);
//simulateOneCicle(w);
//simulateMultyOneCicle(w, THREADS_COUNT);
//simulateMaxMulty2(w, THREADS_COUNT, 1);
//simulateOMPMax(w, 1);
simulatemax_ocl(w, 1);
}
//ce je bila tipka q pritisnjena koncaj program....
if(event->key.keysym.sym == SDLK_q) {
printf("Exited by keyboard input!!!\n");
run = 0;
}
}
destroySDL();
destroyWorld(w);
}
/*
Sprosti in zapri vse strukture za grafiko:
*/
void destroySDL() {
SDL_DestroyTexture(cellular_cell);
SDL_DestroyTexture(background_texture);
SDL_DestroyRenderer(main_renderer);
SDL_DestroyWindow(main_window);
free(src_rect);
free(dest_rect);
free(event);
SDL_Quit();
}
/*
Inicializacija grafike:
*/
int initSDL(world *w) {
src_rect = (SDL_Rect*) malloc(sizeof(SDL_Rect));
dest_rect = (SDL_Rect*) malloc(sizeof(SDL_Rect));
event = (SDL_Event*) malloc(sizeof(SDL_Event));
SDL_PollEvent(NULL);
main_window = SDL_CreateWindow("SDL: Game of Life! - Press key: 'q' to exit", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, WINDOW_WIDTH, WINDOW_HEIGHT, 0);
main_renderer = SDL_CreateRenderer(main_window, -1, 0);
loading_surface = SDL_LoadBMP("textures/background.bmp");
background_texture = SDL_CreateTextureFromSurface(main_renderer, loading_surface);
SDL_FreeSurface(loading_surface);
loading_surface = SDL_LoadBMP("textures/worldground.bmp");
worldground_texture = SDL_CreateTextureFromSurface(main_renderer, loading_surface);
SDL_FreeSurface(loading_surface);
loading_surface = SDL_LoadBMP("textures/yellow.bmp");
cellular_cell = SDL_CreateTextureFromSurface(main_renderer, loading_surface);
SDL_FreeSurface(loading_surface);
return 0;
}
/*
Funkcija za renderiranje slik na window
*/
void render(world *w) {
SDL_RenderCopy(main_renderer, background_texture, NULL, NULL);
int c_x = WINDOW_HEIGHT/w->height;
int c_y = WINDOW_WIDTH/w->width;
int totalSizeHeight = c_x*w->height;
int totalSizeWidth = c_y*w->width;
int offHeight = (WINDOW_HEIGHT - totalSizeHeight) / 2;
int offWidth = (WINDOW_WIDTH - totalSizeWidth) / 2;
src_rect->x = offWidth;
src_rect->y = offHeight;
src_rect->w = totalSizeWidth;
src_rect->h = totalSizeHeight;
dest_rect->x = offWidth;
dest_rect->y = offHeight;
dest_rect->w = totalSizeWidth;
dest_rect->h = totalSizeHeight;
SDL_RenderCopy(main_renderer, worldground_texture, src_rect, dest_rect);
src_rect->x = 2;
src_rect->y = 2;
src_rect->w = WINDOW_WIDTH/w->width;
src_rect->h = WINDOW_HEIGHT/w->height;
dest_rect->x = 0;
dest_rect->y = 0;
dest_rect->w = src_rect->w;
dest_rect->h = src_rect->h;
int i,j;
for(i = 0; i < w->height; i++) {
for(j = 0; j < w->width; j++) {
if(w->area[i][j] > 0) {
dest_rect->x = src_rect->w*j + offWidth;
dest_rect->y = src_rect->h*i + offHeight;
SDL_RenderCopy(main_renderer, cellular_cell, src_rect, dest_rect);
}
}
}
SDL_RenderPresent(main_renderer);
}<file_sep>#ifndef _WORLD_TEST_H_
#define _WORLD_TEST_H_
typedef struct {
double *tab, povp, sum;
int len;
} casovna_t;
void initInfWorld(world*);
void initInf2World(world*);
void initWorld(world*);
void initTest(world*);
double SE(casovna_t*, int);
casovna_t* casovna_analiza_1_nit(int, int, int, int, int);
casovna_t* casovna_analiza_vec_niti(int, int, int, int, int, int);
casovna_t* casovna_analiza_vec_niti_OMP(int, int, int, int, int, int);
casovna_t* casovna_analiza_vec_niti2(int, int, int, int, int, int);
casovna_t* casovna_analiza_openCL(int, int, int, int, int);
#endif<file_sep>#ifndef _WORLD_H_
#define _WORLD_H_
#define WORLD_SIZE_HEIGHT 150
#define WORLD_SIZE_WIDTH 150
#define MAX_ITERATION 500
#define WORKGROUP_SIZE 8
#define GLOBALITEM_SIZE 512
typedef struct {
int width, height, **area;
} world;
typedef struct {
int min, max, **area, num_iter;
world *w;
pthread_barrier_t *barrier;
} param;
world* createWorld(int, int);
void destroyWorld(world*);
void printWorld(world*);
void addNewArea(world*, int**);
int** createNewArea(int , int);
int compareWorldAreas(world*, world*);
void cell_destiny_3x3(int, int, world*, int*);
void cell_destiny_5x5(int, int, world*, int*);
int numOfNeighbors_Moore_3x3(int, int, world*);
int numOfNeighbors_Moore_5x5(int, int, world*);
void simulate(world*);
void simulateOneCicle(world*);
double simulateMax(world*, int);
double simulateOMPMax(world*, int);
void* doSomething(void*);
void simulateMulty(world*, int);
void simulateMultyOneCicle(world*, int);
double simulateMaxMulty(world*, int, int);
void* doSomething2(void *arg);
double simulateMaxMulty2(world*, int , int);
double simulatemax_ocl(world*, int);
char* read_kernel(char*);
float* mat2vec(world*);
int** vec2mat(float*, int, int);
#endif
<file_sep>echo -e "\e[1;32mPrevajam c programe:\e[0m"
echo -e "\e[1;34m---src/world.c\e[0m"
echo -e "\e[1;34m---src/world_graphics.c\e[0m"
echo -e "\e[1;34m---src/world_test.c\e[0m"
echo -e "\e[1;34m---./main.c\e[0m"
gcc main.c src/world.c src/world_graphics.c src/world_test.c -I/usr/local/cuda-8.0/include -L/usr/lib/nvidia-367 -lSDL2 -lOpenCL -pthread -fopenmp -lm -o main && echo -e "\e[1;32mUSPELO!!\e[0m" || echo -e "\e[1;32mNI USPELO!!\e[0m"
<file_sep>#include <SDL2/SDL.h>
#include "heads/world.h"
#include "heads/world_graphics.h"
#include "heads/world_test.h"
/*
./makefile
*/
int main(int argc, char **argv) {
//graficna simulacija cellularnega avtomata
SDL_Simulate_Game_of_Life();
//printf("threads:%d, procs: %d\n", omp_get_thread_num(), omp_get_num_threads());
//:parametri funkcije casovna_analiza_1(vec)_nit(i): #ponovitev, visina, sirina, st. iteracij, #niti, DEBUG(izpisuje vmesne rezultate = 1, ne izpisuj = 0)
/*
casovna_analiza_1_nit (25, 100, 100, 100, 1);
casovna_analiza_vec_niti (25, 100, 100, 100, 2, 1);
casovna_analiza_vec_niti2 (25, 100, 100, 100, 2, 1);
casovna_analiza_vec_niti_OMP(25, 100, 100, 100, 2, 1);
casovna_analiza_openCL (25, 100, 100, 100, 1);
*/
// casovna_analiza_vec_niti_OMP (2, 800, 800, 100, 2, 1);
// casovna_analiza_openCL (2, 800, 800, 100, 1);
/*
casovna_analiza_openCL (25, 100, 100, 100, 1);
casovna_analiza_openCL (25, 200, 100, 100, 1);
casovna_analiza_openCL (25, 400, 100, 100, 1);
casovna_analiza_openCL (25, 800, 100, 100, 1);
casovna_analiza_openCL (25, 100, 100, 100, 1);
casovna_analiza_openCL (25, 100, 200, 100, 1);
casovna_analiza_openCL (25, 100, 400, 100, 1);
casovna_analiza_openCL (25, 100, 800, 100, 1);
casovna_analiza_openCL (25, 100, 100, 100, 1);
casovna_analiza_openCL (25, 100, 100, 200, 1);
casovna_analiza_openCL (25, 100, 100, 400, 1);
casovna_analiza_openCL (25, 100, 100, 800, 1);
*/
return 0;
}
<file_sep>#include <SDL2/SDL.h>
#include <stdlib.h>
#include <math.h>
#include <omp.h>
#include "../heads/world.h"
#include "../heads/world_graphics.h"
#include "../heads/world_test.h"
#define TEST_COUNT 3
#define START_ITER 1000
#define DELTA_ITER 500
#define TEST_HEIGHT 100
#define TEST_WIDTH 250
double SE(casovna_t *c, int DEBUG) {
double s = 0.0, sum = 0.0;
int i;
for(i = 0; i < c->len; i++) {
sum += (c->tab[i] - c->povp) * (c->tab[i] - c->povp);
}
s = sum/(c->len - 1);
s = sqrt(s)/sqrt(c->len);
if(DEBUG) printf("---SE: %f\n", s);
return s;
}
casovna_t* casovna_analiza_openCL(int n, int visina, int sirina, int st_iteracij, int DEBUG) {
casovna_t* ct = malloc(sizeof(casovna_t));
world *w = createWorld(visina, sirina);
ct->tab = malloc(sizeof(double)*n);
int i;
double sum_time = 0, tmp_time;
if(DEBUG) printf("---v:%d, s:%d, st_iteracij:%d :: %3d OCL---\n", visina, sirina, st_iteracij, omp_get_num_threads());
for(i = 0; i < n; i++) {
tmp_time = simulatemax_ocl(w, st_iteracij);
if(DEBUG) printf("cas %3d: %.2f ms\n", i, tmp_time);
ct->tab[i] = tmp_time;
sum_time += tmp_time;
}
ct->sum = sum_time;
sum_time /= n;
ct->povp = sum_time;
ct->len = n;
if(DEBUG) {
printf("povprecen cas: %.4f ms\n", sum_time);
printf("standard error: %.4f ms\n", SE(ct, 0));
printf("\n\n");
}
return ct;
}
casovna_t* casovna_analiza_1_nit(int n, int visina, int sirina, int st_iteracij, int DEBUG) {
casovna_t* ct = malloc(sizeof(casovna_t));
world *w = createWorld(visina, sirina);
ct->tab = malloc(sizeof(double)*n);
int i;
double sum_time = 0, tmp_time;
if(DEBUG) printf("---v:%d, s:%d, st_iteracij:%d :: 1 NIT---\n", visina, sirina, st_iteracij);
for(i = 0; i < n; i++) {
tmp_time = simulateMax(w, st_iteracij);
if(DEBUG) printf("cas %3d: %.2f ms\n", i, tmp_time);
ct->tab[i] = tmp_time;
sum_time += tmp_time;
}
ct->sum = sum_time;
sum_time /= n;
ct->povp = sum_time;
ct->len = n;
if(DEBUG) {
printf("povprecen cas: %.4f ms\n", sum_time);
printf("standard error: %.4f ms\n", SE(ct, 0));
printf("\n\n");
}
return ct;
}
casovna_t* casovna_analiza_vec_niti_OMP(int n, int visina, int sirina, int st_iteracij, int st_niti, int DEBUG) {
casovna_t* ct = malloc(sizeof(casovna_t));
world *w = createWorld(visina, sirina);
ct->tab = malloc(sizeof(double)*n);
int i;
double sum_time = 0, tmp_time;
omp_set_num_threads(st_niti);
if(DEBUG) printf("---v:%d, s:%d, st_iteracij:%d :: %3d OMP---\n", visina, sirina, st_iteracij, omp_get_num_threads());
for(i = 0; i < n; i++) {
tmp_time = simulateOMPMax(w, st_iteracij);
if(DEBUG) printf("cas %3d: %.2f ms\n", i, tmp_time);
ct->tab[i] = tmp_time;
sum_time += tmp_time;
}
ct->sum = sum_time;
sum_time /= n;
ct->povp = sum_time;
ct->len = n;
if(DEBUG) {
printf("povprecen cas: %.4f ms\n", sum_time);
printf("standard error: %.4f ms\n", SE(ct, 0));
printf("\n\n");
}
return ct;
}
casovna_t* casovna_analiza_vec_niti(int n, int visina, int sirina, int st_iteracij, int st_niti, int DEBUG) {
casovna_t* ct = malloc(sizeof(casovna_t));
world *w = createWorld(visina, sirina);
ct->tab = malloc(sizeof(double)*n);
int i;
double sum_time = 0, tmp_time;
if(DEBUG) printf("---v:%d, s:%d, st_iteracij:%d :: %3d NITI---\n", visina, sirina, st_iteracij, st_niti);
for(i = 0; i < n; i++) {
tmp_time = simulateMaxMulty(w, st_niti, st_iteracij);
if(DEBUG) printf("cas %3d: %.2f ms\n", i, tmp_time);
ct->tab[i] = tmp_time;
sum_time += tmp_time;
}
ct->sum = sum_time;
sum_time /= n;
ct->povp = sum_time;
ct->len = n;
if(DEBUG) {
printf("povprecen cas: %.4f ms\n", sum_time);
//printf("casovna_t:%p tab:%p len:%d\n", ct, ct->tab, ct->len);
printf("standard error: %.4f ms\n", SE(ct, 0));
printf("\n\n");
}
return ct;
}
casovna_t* casovna_analiza_vec_niti2(int n, int visina, int sirina, int st_iteracij, int st_niti, int DEBUG) {
casovna_t* ct = malloc(sizeof(casovna_t));
world *w = createWorld(visina, sirina);
//initTest(w);
//printWorld(w);
if(DEBUG) {printf("multy2:\n");}
ct->tab = malloc(sizeof(double)*n);
int i;
double sum_time = 0, tmp_time;
if(DEBUG) printf("---v:%d, s:%d, st_iteracij:%d :: %3d NITI---\n", visina, sirina, st_iteracij, st_niti);
for(i = 0; i < n; i++) {
tmp_time = simulateMaxMulty2(w, st_niti, st_iteracij);
//printWorld(w);
if(DEBUG) printf("cas %3d: %.2f ms\n", i, tmp_time);
ct->tab[i] = tmp_time;
sum_time += tmp_time;
}
ct->sum = sum_time;
sum_time /= n;
ct->povp = sum_time;
ct->len = n;
if(DEBUG) {
printf("povprecen cas: %.4f ms\n", sum_time);
//printf("casovna_t:%p tab:%p len:%d\n", ct, ct->tab, ct->len);
printf("standard error: %.4f ms\n", SE(ct, 0));
printf("\n\n");
}
return ct;
}
void initTest(world *w) {
w->area[5][3] = 1;
w->area[4][4] = 1;
w->area[3][4] = 1;
w->area[3][2] = 1;
w->area[2][4] = 1;
w->area[1][3] = 1;
w->area[3][5] = 1;
}
void initWorld(world *w) {
w->area[15][2] = 1;
w->area[15][1] = 1;
w->area[16][2] = 1;
w->area[16][1] = 1;
w->area[15][11] = 1;
w->area[16][11] = 1;
w->area[17][11] = 1;
w->area[14][12] = 1;
w->area[13][13] = 1;
w->area[13][14] = 1;
w->area[18][12] = 1;
w->area[19][13] = 1;
w->area[19][14] = 1;
w->area[18][16] = 1;
w->area[17][17] = 1;
w->area[16][17] = 1;
w->area[16][15] = 1;
w->area[15][17] = 1;
w->area[14][16] = 1;
w->area[16][18] = 1;
w->area[14][21] = 1;
w->area[15][21] = 1;
w->area[16][21] = 1;
w->area[14][22] = 1;
w->area[15][22] = 1;
w->area[16][22] = 1;
w->area[17][23] = 1;
w->area[17][25] = 1;
w->area[18][25] = 1;
w->area[13][23] = 1;
w->area[13][25] = 1;
w->area[12][25] = 1;
w->area[13][35] = 1;
w->area[13][36] = 1;
w->area[14][35] = 1;
w->area[14][36] = 1;
}
/*
Inicializa zacetni svet: kvadrati v vogalih
*/
void initInf2World(world *w) {
w->area[0][0] = 1;
w->area[0][1] = 1;
w->area[1][0] = 1;
w->area[1][1] = 1;
w->area[0][w->width-1] = 1;
w->area[0][w->width-2] = 1;
w->area[1][w->width-1] = 1;
w->area[1][w->width-2] = 1;
w->area[w->height-1][0] = 1;
w->area[w->height-1][1] = 1;
w->area[w->height-2][0] = 1;
w->area[w->height-2][1] = 1;
w->area[w->height-1][w->width-1] = 1;
w->area[w->height-1][w->width-2] = 1;
w->area[w->height-2][w->width-1] = 1;
w->area[w->height-2][w->width-2] = 1;
}
/*
Inicializa zacetni svet za demo, svet mora biti vecji ali enak 94x94 celic
*/
void initInfWorld(world *w) {
w->area[0][0] = 1;
w->area[0][1] = 1;
w->area[1][0] = 1;
w->area[1][1] = 1;
w->area[0][w->width-1] = 1;
w->area[0][w->width-2] = 1;
w->area[1][w->width-1] = 1;
w->area[1][w->width-2] = 1;
w->area[w->height-1][0] = 1;
w->area[w->height-1][1] = 1;
w->area[w->height-2][0] = 1;
w->area[w->height-2][1] = 1;
w->area[w->height-1][w->width-1] = 1;
w->area[w->height-1][w->width-2] = 1;
w->area[w->height-2][w->width-1] = 1;
w->area[w->height-2][w->width-2] = 1;
w->area[4][4] = 1;
w->area[4][5] = 1;
w->area[5][4] = 1;
w->area[5][5] = 1;
w->area[3][80] = 1;
w->area[3][81] = 1;
w->area[4][80] = 1;
w->area[4][81] = 1;
w->area[10][10] = 1;
w->area[10][11] = 1;
w->area[10][12] = 1;
w->area[9][12] = 1;
w->area[8][11] = 1;
w->area[20][80] = 1;
w->area[20][81] = 1;
w->area[20][82] = 1;
w->area[19][80] = 1;
w->area[18][81] = 1;
w->area[50][50] = 1;
w->area[50][51] = 1;
w->area[50][52] = 1;
w->area[60][60] = 1;
w->area[60][61] = 1;
w->area[60][62] = 1;
w->area[61][63] = 1;
w->area[61][61] = 1;
w->area[61][62] = 1;
w->area[90][90] = 1;
w->area[91][90] = 1;
w->area[90][91] = 1;
w->area[91][91] = 1;
w->area[92][92] = 1;
w->area[92][93] = 1;
w->area[93][92] = 1;
w->area[93][93] = 1;
}
| 20fcd23050bd3686c3d9781226195fa8cec0a381 | [
"C",
"Shell"
] | 8 | C | kristanm1/Game_of_life | 48a142586a2927ace6628123fd8eee3f75ddcc45 | 4438dafd4c226504d6a52bea0f61989c59d9c45c |
refs/heads/master | <repo_name>chrisberkhout/ffaker<file_sep>/README.rdoc
= ffaker
Fast Faker == Faker refactored. http://rubygems.org/gems/ffaker
= Faker
A port of Perl's Data::Faker library that generates fake data. http://rubygems.org/gems/faker
== Usage
require 'ffaker'
Faker::Name.name => "<NAME>"
Faker::Internet.email => "<EMAIL>"
== Ffaker / Faker
Faker and Ffaker APIs are mostly the same, although the API on ffaker keeps
growing with its users additions. In general, the only difference is that you
need to:
gem install ffaker
and then
require 'ffaker'
instead of "faker" with only one f.
== Faster? does it really matter ?
Ffaker was initially written in an effort to speed up a slow spec suite. Bear
in mind, if your spec suite is slow, chances are the generation of random data
will not account for much of the run time.
Since then, the original faker gem has become faster (at the moment it did not
seem it was mantained). Nevertheless, ffaker is still about 20x faster than
faker.
$ cat scripts/benchmark.rb
# ...
N = 10_000
Benchmark.bm do |rep|
rep.report("generating #{ N } names") do
N.times do
Faker::Name.name
end
end
end
* ruby 1.8.7 (2011-02-18 patchlevel 334) [x86_64-linux], MBARI 0x6770, Ruby Enterprise Edition 2011.03
generating 10000 names (faker 0.9.5) 1.500000 0.000000 1.500000 ( 1.506865)
generating 10000 names (ffaker 1.5.0) 0.070000 0.000000 0.070000 ( 0.067526)
* ruby 1.9.2p180 (2011-02-18 revision 30909) [x86_64-linux]
generating 10000 names (faker 0.9.5) 1.030000 0.020000 1.050000 ( 1.046116)
generating 10000 names (ffaker 1.7.0) 0.040000 0.000000 0.040000 ( 0.045917)
== Contributors
* <NAME> ( http://github.com/jamesarosen/ffaker ).
* <NAME> ( http://github.com/rafaelss/ffaker ).
* <NAME> ( https://github.com/bdigital ).
* qichunren ( http://github.com/qichunren ).
* <NAME> ( https://github.com/kristjan )
* <NAME> ( https://github.com/rstacruz )
* <NAME> ( https://github.com/7even )
* <NAME> (https://github.com/marvin)
* Port 80 Labs (https://github.com/port80labs)
* <NAME> (https://github.com/swcool)
* <NAME> (https://github.com/chrisbloom7)
== Changelog
* 1.8.0
Added Faker::HTMLIpsum module inspired in http://html-ipsum.com/ (Thanks <NAME>loom, https://github.com/EmmanuelOga/ffaker/pull/18)
* 1.7.0
Added Faker::LoremCN (Thanks Shane Weng, Shane Weng, https://github.com/swcool, https://github.com/EmmanuelOga/ffaker/pull/16/files)
* 1.6.0
Added Faker::NameDE (Thanks David Noelte, https://github.com/marvin, https://github.com/EmmanuelOga/ffaker/pull/17)
Added Faker::Internet#disposable_email (Thanks Port 80 Labs, https://github.com/port80labs, https://github.com/EmmanuelOga/ffaker/pull/15)
* 1.5.0
Added Faker::NameRU (Thanks <NAME>, https://github.com/7even, https://github.com/EmmanuelOga/ffaker/pull/14)
* 1.4.0
Added Faker::Product and Faker::Education ( Thanks <NAME>, https://github.com/EmmanuelOga/ffaker/pull/12 )
* 1.3.0
Faker::Lorem.word as a convenience method, instead of Faker::Lorem.words(1).first.
* 1.2.0
New Faker::NameCN module (contributions by qichunren)
Faker::NameCN.first_name # => 鑫洋
Faker::NameCN.last_name # => 禹
Faker::NameCN.name # => 俊伶漫
* 1.1.0
Api additions (contributions by <NAME>)
Faker::Internet.uri(protocol)
Faker::Internet.http_url
Faker::Internet.ip_v4_address
* 0.4.0
Api additions by <NAME>
Faker::Geolocation.lat # => 40.6609944585817
Faker::Geolocation.lng # => -73.8454648940358
Faker::Address.neighborhood # => "Renton West"
== TODO
* Even though the API is pretty simple, better rdoc documentation would not hurt.
== Note on Patches/Pull Requests
* Fork the project.
* Make your feature addition or bug fix.
* Add tests for it. This is important so I don't break it in a
future version unintentionally.
* Commit, do not mess with rakefile, version, or history.
(if you want to have your own version, that is fine but bump version in a commit by itself I can ignore when I pull)
* Send me a pull request. Bonus points for topic branches.
== Copyright
Copyright (c) 2010 <NAME>. See LICENSE for details.
Copyright (c) 2007 <NAME>
<file_sep>/lib/ffaker/phone_number.rb
module Faker
module PhoneNumber
extend ModuleUtils
extend self
def phone_number
Faker.numerify case rand(20)
when 0 then '###-###-#### x#####'
when 1 then '###-###-#### x####'
when 2 then '###-###-#### x###'
when 3..4 then '###-###-####'
when 5 then '###.###.#### x#####'
when 6 then '###.###.#### x####'
when 7 then '###.###.#### x###'
when 8..9 then '###.###.####'
when 10 then '(###)###-#### x#####'
when 11 then '(###)###-#### x####'
when 12 then '(###)###-#### x###'
when 13..14 then '(###)###-####'
when 15 then '1-###-###-#### x#####'
when 16 then '1-###-###-#### x####'
when 17 then '1-###-###-#### x###'
when 18..19 then '1-###-###-####'
end
end
def self.short_phone_number
Faker.numerify('###-###-####')
end
end
end
<file_sep>/test/test_array_utils.rb
require 'helper'
require 'set'
class TestArrayUtils < Test::Unit::TestCase
def setup
@array = Faker::ArrayUtils.const_array("a".."c")
@elems = Set.new("a".."c")
end
it "provides a way of freezing the elements and itself" do
assert @array.respond_to?(:freeze_all)
@array.freeze_all
assert @array.frozen?
@array.each { |e| assert e.frozen? }
end
it "provides a way of getting a random element" do
set = Set.new
1000.times { set << @array.rand }
assert set == @elems
end
it "provides a way of getting n random elements" do
assert_equal @array.random_pick(3).sort, @array.sort
1.upto(3) do |n|
1000.times do
new_arr = @array.random_pick(n)
assert_equal n, new_arr.length
new_arr.each do |e|
assert @elems.include? e
end
end
end
end
it "provides a way of shuffling the array" do
assert_equal @array.shuffle.sort, @array.sort
different_arrangements = 0
1000.times do
new_arr = @array.shuffle
assert new_arr.to_set == @elems
different_arrangements += 1 if new_arr != @array
end
assert different_arrangements > 0
end
end
<file_sep>/lib/ffaker/education.rb
module Faker
module Education
extend ModuleUtils
extend self
def degree_short
"#{DEGREE_SHORT_PREFIX.rand} in #{major}"
end
def degree
"#{DEGREE_PREFIX.rand} in #{major}"
end
def major
"#{MAJOR_ADJ.rand} #{MAJOR_NOUN.rand}"
end
def school_name
SCHOOL_PREFIX.rand + SCHOOL_SUFFIX.rand
end
def school_generic_name
case rand(2)
when 0 then Address::STATE.rand
when 1 then school_name
end
end
def school
case rand(5)
when (0..1) then "#{school_name} #{SCHOOL_TYPE.rand}"
when 2 then "#{school_generic_name} #{SCHOOL_ADJ.rand} #{SCHOOL_TYPE.rand}"
when 3 then "#{SCHOOL_UNI.rand} of #{school_generic_name}"
when 4 then "#{school_generic_name} #{SCHOOL_TYPE.rand} of #{MAJOR_NOUN.rand}"
end
end
DEGREE_SHORT_PREFIX = k %w(AB BS BSc MA MD DMus DPhil)
DEGREE_PREFIX = k ['Bachelor of Science', 'Bachelor of Arts', 'Master of Arts', 'Doctor of Medicine', 'Bachelor of Music', 'Doctor of Philosophy']
MAJOR_ADJ = k (%w(Business Systems Industrial Medical Financial Marketing Political Social) + ['Human Resource'])
MAJOR_NOUN = k %w(Science Arts Administration Engineering Management Production Economics Architecture Accountancy Education Development Philosophy Studies)
SCHOOL_PREFIX = k %w(Green South North Wind Lake Hill Lark River Red White)
SCHOOL_SUFFIX = k %w(wood dale ridge ville point field shire shore crest spur well side coast)
SCHOOL_ADJ = k %w(International Global Polytechnic National)
SCHOOL_TYPE = k %w(School University College Institution Academy)
SCHOOL_UNI = k %w(University College)
end
end
<file_sep>/lib/ffaker.rb
module Faker
VERSION = "1.8.0"
require 'ffaker/utils/module_utils'
extend ModuleUtils
LETTERS = k('a'..'z')
def self.numerify(number_string)
number_string.gsub!(/#/) { rand(10).to_s }
number_string
end
def self.letterify(letter_string)
letter_string.gsub!(/\?/) { LETTERS.rand }
letter_string
end
def self.bothify(string)
letterify(numerify(string))
end
autoload :Address, 'ffaker/address'
autoload :AddressDE, 'ffaker/address_de'
autoload :Company, 'ffaker/company'
autoload :Education, 'ffaker/education'
autoload :HTMLIpsum, 'ffaker/html_ipsum'
autoload :Internet, 'ffaker/internet'
autoload :Lorem, 'ffaker/lorem'
autoload :LoremCN, 'ffaker/lorem_cn'
autoload :Name, 'ffaker/name'
autoload :NameCN, 'ffaker/name_cn'
autoload :NameRU, 'ffaker/name_ru'
autoload :NameDE, 'ffaker/name_de'
autoload :PhoneNumber, 'ffaker/phone_number'
autoload :Product, 'ffaker/product'
autoload :Geolocation, 'ffaker/geolocation'
autoload :VERSION, 'ffaker/version'
end
<file_sep>/test/test_address.rb
require 'helper'
class TestAddress < Test::Unit::TestCase
def test_city
assert_match /[ a-z]+/, Faker::Address.city
end
def test_city_prefix
assert_match /[ a-z]/, Faker::Address.city_prefix
end
def test_city_suffix
assert_match /[ a-z]/, Faker::Address.city_suffix
end
def test_secondary_address
assert_match /[ a-z]/, Faker::Address.secondary_address
end
def test_street_address
assert_match /[ a-z]/, Faker::Address.street_address
end
def test_street_name
assert_match /[ a-z]/, Faker::Address.street_name
end
def test_street_suffix
assert_match /[ a-z]/, Faker::Address.street_suffix
end
def test_uk_country
assert_match /[ a-z]/, Faker::Address.uk_country
end
def test_uk_county
assert_match /[ a-z]/, Faker::Address.uk_county
end
def test_uk_postcode
assert_match /[ a-z]/, Faker::Address.uk_postcode
end
def test_us_state
assert_match /[ a-z]/, Faker::Address.us_state
end
def test_us_state_abbr
assert_match /[A-Z]/, Faker::Address.us_state_abbr
end
def test_zip_code
assert_match /[0-9]/, Faker::Address.zip_code
end
def test_zip_code_frozen
assert Faker::Address.zip_code.frozen? == false
end
def test_neighborhood
assert_match /[ a-z]+/, Faker::Address::neighborhood
end
end
<file_sep>/lib/ffaker/internet.rb
module Faker
module Internet
extend ModuleUtils
extend self
def email(name = nil)
[ user_name(name), domain_name ].join('@')
end
# returns an email address of an online disposable email service (like tempinbox.com).
# you can really send an email to these addresses an access it by going to the service web pages.
def disposable_email(name = nil)
[ user_name(name), DISPOSABLE_HOSTS.rand ].join('@')
end
def free_email(name = nil)
"#{user_name(name)}@#{HOSTS.rand}"
end
def user_name(name = nil)
if name
parts = ArrayUtils.shuffle(name.scan(/\w+/)).join(ArrayUtils.rand(%w(. _)))
parts.downcase!
parts
else
case rand(2)
when 0
Name.first_name.gsub(/\W/, '').downcase
when 1
parts = [ Name.first_name, Name.last_name ].each {|n| n.gsub!(/\W/, '') }
parts = parts.join ArrayUtils.rand(%w(. _))
parts.downcase!
parts
end
end
end
def domain_name
"#{domain_word}.#{domain_suffix}"
end
def domain_word
dw = Company.name.split(' ').first
dw.gsub!(/\W/, '')
dw.downcase!
dw
end
def domain_suffix
DOMAIN_SUFFIXES.rand
end
def uri(protocol)
"#{protocol}://#{domain_name}"
end
def http_url
uri("http")
end
def ip_v4_address
(1..4).map { BYTE.random_pick(1) }.join(".")
end
BYTE = k((0..255).to_a.map { |n| n.to_s })
HOSTS = k %w(gmail.com yahoo.com hotmail.com)
DISPOSABLE_HOSTS = k %w(mailinator.com suremail.info spamherelots.com binkmail.com safetymail.info tempinbox.com)
DOMAIN_SUFFIXES = k %w(co.uk com us uk ca biz info name)
end
end
<file_sep>/lib/ffaker/name_de.rb
module Faker
module NameDE
include Faker::Name
extend ModuleUtils
extend self
def name
case rand(10)
when 0 then "#{prefix} #{first_name} #{last_name}"
else "#{first_name} #{last_name}"
end
end
def first_name
FIRST_NAMES.rand
end
def last_name
LAST_NAMES.rand
end
def prefix
PREFIXES.rand
end
FIRST_NAMES = k %w(<NAME>
<NAME>
<NAME> <NAME>
<NAME> <NAME>
<NAME> Isabell Is<NAME>
<NAME> Isabel<NAME>
Franziska <NAME>
<NAME>
<NAME> Kimberley <NAME>
<NAME> Annabelle Annabell Annabel An<NAME> <NAME>
<NAME>
<NAME> Olivia <NAME> S<NAME> N<NAME> Mar<NAME> Cheyenne
Chayenne Annalena Car<NAME> Tabea Aliyah A<NAME> J<NAME> Alissa Alyssa Anastasia
Mar<NAME> Noemi Lene Milla Rosalie Lu<NAME> Evelin Eveline Fenja Tessa
<NAME>
<NAME> Ver<NAME> Liana Livia Mal<NAME>
<NAME> <NAME> Meike The<NAME>
<NAME> <NAME> Thalia Alea Rieke Rike Svenja Liliana Jan<NAME> Alisha
Ava Kaja Kaya Caja Liv Rosa Val<NAME> Joyce Selin Ina Aleyna Enya Jamie Naomi <NAME>
<NAME> <NAME> Melek <NAME> M<NAME>
<NAME> Alma Eda <NAME> <NAME> <NAME> Jenny <NAME>
Cassandra Kassandra Esila Felicia Malia Smilla Alena Amelia Aurora Ceyda Juliane Leandra Lilith Madita
Melisa Nika Summer Fatima Ilayda Joleen Malina Sandra Jasmina Katja Medina An<NAME> Val<NAME>
Madleen Aliya Charlotta Eleni Hailey Mailin Denise Fine Flora Madeleine Sena Vivian Ann Annemarie Asya
Christin Kristin Jara J<NAME> Tara Viola Alia Ellen Enie Lydia Milana Nala Adriana
Aimee Anja Chantal Elise Elsa Gina Jo<NAME> Malea Mar<NAME> Talea Thalea Tilda Delia Joana
Kiana Mel<NAME> Sude Amanda Enna Esther Holly Irem Marlena Mirja Phoebe Rahel Verena <NAME>
<NAME> Catrin Katrin Kathrin Lavinia Lenia N<NAME> As<NAME>
<NAME> Maila Mareike Selena Soey Ylvi Ylvie Zara Abby Ayse <NAME>
<NAME> Luzie Nila S<NAME> Sydney Tina Ty<NAME> C<NAME>
Inga J<NAME> Nia Ruby Stine Sunny Tamia Tiana Alara Charleen Collien Fanny Fatma
Felina Ines Jane Maxima Tarja Adelina Alica Dila Elanur Elea Gloria Jamila Kate Loreen Lou Maxi
Melody Nela Rania Sabrina Ariana Charline Christine Cosima Leia Leya Leonora Lindsay Megan Naemi
Nahla Sahra Saphira Serafina Stina Toni Tony Yaren Abigail Ece Evelina Frederike Inka Irma Kayra
Mariam Mar<NAME> Violetta Yagmur Celin Eleonora Felia Femke Finia Hedda Hedi Henrike Jody
<NAME> Lilliana Luca Luka Maira Naila Naima Natalia Neela Sal<NAME>
<NAME> <NAME> <NAME>
<NAME> <NAME>
<NAME> <NAME>
<NAME> <NAME> Felix Tim <NAME>
<NAME> <NAME> <NAME>
<NAME> <NAME> <NAME>
<NAME> <NAME> <NAME> <NAME> <NAME>
<NAME> <NAME>
<NAME> <NAME> <NAME> <NAME> <NAME>
<NAME>
<NAME> <NAME> <NAME> <NAME>
<NAME> <NAME> <NAME> <NAME>
<NAME> <NAME> <NAME>
<NAME> <NAME>
<NAME> <NAME>
<NAME>
<NAME>
<NAME> <NAME> <NAME>
<NAME> <NAME>
<NAME> <NAME>
<NAME> <NAME>
<NAME>
<NAME> <NAME>
<NAME> <NAME> <NAME>
<NAME>
<NAME>
<NAME> <NAME>
<NAME>
<NAME> <NAME> <NAME>
<NAME> <NAME>
<NAME>
<NAME> <NAME>
<NAME> <NAME> <NAME> <NAME>
<NAME>
<NAME> <NAME> <NAME>
<NAME> <NAME>
<NAME> <NAME>
<NAME> <NAME> Hanno <NAME> <NAME>
Milow <NAME> <NAME>
<NAME> <NAME> Jendrik <NAME> O<NAME> <NAME> Sydn<NAME>
<NAME> Amon Benny <NAME>
<NAME> <NAME> <NAME>
<NAME> Enzo)
LAST_NAMES = k %w(<NAME> <NAME>
<NAME>
<NAME>
<NAME> <NAME> <NAME>
<NAME> <NAME>
<NAME>
<NAME> <NAME>
<NAME>
<NAME> Sauer <NAME>
Schmenke <NAME> <NAME>)
PREFIXES = k %w(<NAME> Dr. Prof.)
end
end
<file_sep>/lib/ffaker/product.rb
module Faker
module Product
extend ModuleUtils
extend self
def brand
case rand(12)
when (0..4) then B1.rand + B2.rand
when (5..10) then "#{START.rand}#{VOWELS.rand}#{SUFFIX.rand}#{ADDON.rand if rand(2)==0}".capitalize
when 11 then "#{letters(2..3)}"
end
end
def product_name
case rand(2)
when 0 then "#{ADJ.rand} #{NOUN.rand}"
when 1 then "#{[ADJ.rand, ADJ.rand].uniq.join(" ")} #{NOUN.rand}"
end
end
def product
"#{brand} #{product_name}"
end
def letters(n)
max = n.is_a?(Range) ? n.to_a.shuffle.first : n
(0...max).map { LETTERS.rand.upcase }.join
end
def model
case rand(2)
when 0 then "#{LETTERS.rand.upcase}#{rand(90)}" # N90
when 1 then "#{letters(0..rand(2))}-#{rand(9900)}" # N-9400
end
end
B1 = k %w(So Lu Sir Bri Reu Gen Fin Pana Sine Co Aqua Am Ca Cyg Tech After Sub One Tri)
B2 = k %w(nix cell sync func balt sche pod)
VOWELS = k %w(a e i o u ou ie y io)
START = k %w(tr br p ph)
SUFFIX = k %w(ck ns nce nt st ne re ffe ph)
ADDON = k %w(wood forge func)
ADJ = k %w(Air Gel Auto Power Tag Audible HD GPS Portable Disc Electric Performance Side Video Input Output Direct Remote Digital)
NOUN = k %w(Filter Compressor System Viewer Mount Case Adapter Amplifier Bridge Bracket Kit Transmitter Receiver Tuner Controller Component)
end
end
<file_sep>/test/test_faker_name_cn.rb
# encoding: utf-8
require 'helper'
class TestFakerNameCN < Test::Unit::TestCase
def setup
@tester = Faker::NameCN
end
def test_name
assert Faker::NameCN.name.length > 2
end
end
<file_sep>/test/test_company.rb
require 'helper'
class TestCompany < Test::Unit::TestCase
def test_bs
assert_match /[ a-z]+/, Faker::Company.bs
assert_match /\s/, Faker::Company.bs
end
def test_catch_phrase
assert_match /[ a-z]+/, Faker::Company.catch_phrase
assert_match /\s/, Faker::Company.catch_phrase
end
def test_name
assert_match /[ a-z]+/, Faker::Company.name
end
def test_suffix
assert_match /[ a-z]+/i, Faker::Company.suffix
end
end
<file_sep>/lib/ffaker/address_de.rb
module Faker
module AddressDE
include Faker::Address
extend ModuleUtils
extend self
def zip_code
Faker.numerify ZIP_FORMATS.rand
end
def state
STATE.rand
end
def city
CITY.rand
end
ZIP_FORMATS = k ['#####']
STATE = k ['Baden-Wuerttemberg', 'Bayern', 'Berlin', 'Brandenburg', 'Bremen',
'Hamburg', 'Hessen', 'Mecklenburg-Vorpommern', 'Niedersachsen', 'Nordrhein-Westfalen',
'Rheinland-Pfalz', 'Saarland', 'Sachsen', 'Schleswig-Holstein',
'Thueringen']
CITY = k %w(Aach Aachen Aalen Abenberg Abensberg Achern Achim Adelsheim Adenau Adorf Ahaus Ahlen
Ahrensburg Aichach Aichtal Aken Albstadt Alfeld Allendorf Allstedt Alpirsbach Alsfeld
Alsdorf Alsleben Altdorf Altena Altenau Altenberg Altenburg Altenkirchen Altensteig
Altentreptow Altlandsberg Alzenau Alzey Amberg Amorbach Andernach Angermuende Anklam
Annaberg-Buchholz Annaburg Annweiler Ansbach Apolda Arendsee Arneburg Arnis Arnsberg
Arnstadt Arnstein Artern Arzberg Aschaffenburg Aschersleben Asperg Attendorn Aub
Aue Auerbach Augsburg Augustusburg Aulendorf Auma Aurich Babenhausen Bacharach Backnang
Baden-Baden Baesweiler Baiersdorf Balingen Ballenstedt Balve Bamberg Barby Bargteheide
Barmstedt Baernau Barntrup Barsinghausen Barth Baruth Bassum Battenberg Baumholder Baunach
Baunatal Bautzen Bayreuth Bebra Beckum Bedburg Beelitz Beerfelden Beeskow Beilngries Beilstein
Belgern Belzig Bendorf Benneckenstein Bensheim Berching Berga Bergen
Bergheim Bergisch Gladbach Bergkamen Bergneustadt Berka Berlin Bernburg Bernkastel-Kues
Bernsdorf Bersenbrueck Besigheim Betzdorf Betzenstein Beverungen Bexbach Biedenkopf
Bielefeld Biesenthal Bietigheim-Bissingen Billerbeck Birkenfeld Bischofswerda Bismark
Bitburg Bitterfeld Blankenburg Blankenhain Blaubeuren Bleckede Bleicherode Blieskastel
Blomberg Blumberg Bobingen Bocholt Bochum Bockenem Bodenwerder Bogen Boizenburg Bonn
Bopfingen Boppard Borgentreich Borgholzhausen Borken Borken Borkum Borna Bornheim Bottrop
Boxberg Brackenheim Brake Brakel Bramsche Brand-Erbisdorf Brandis Braubach Braunfels
Braunlage Braeunlingen Braunsbedra Braunschweig Breckerfeld Bredstedt Brehna Bremen
Bremerhaven Bretten Breuberg Brilon Brotterode Bruchsal Brueck Brueel Bruehl Brunsbuettel
Bruessow Buchen Buchloe Bueckeburg Buckow Buedelsdorf Buedingen Buehl Buende Bueren Burg
Burgau Burgbernheim Burgdorf Buergel Burghausen Burgkunstadt Burglengenfeld Burgstaedt
Burgwedel Burladingen Burscheid Buerstadt Buttelstedt Buttstaedt Butzbach Buetzow Buxtehude
Calau Calbe Calw Camburg Castrop-Rauxel Celle Cham Chemnitz Clausthal-Zellerfeld Clingen
Cloppenburg Coburg Cochem Coesfeld Colditz Coswig Coswig Cottbus Crailsheim Creglingen
Creuzburg Crimmitschau Crivitz Cuxhaven Dachau Dahlen Dahn Damme Dannenberg
Dargun Darmstadt Dassel Dassow Datteln Daun Deggendorf Deidesheim Delbrueck Delitzsch
Delmenhorst Demmin Derenburg Dessau Detmold Dettelbach Dieburg Diemelstadt Diepholz Dierdorf
Dietenheim Dietfurt Dietzenbach Diez Dillenburg Dillingen Dingolfing Dinkelsbuehl Dinklage
Dinslaken Dippoldiswalde Dissen Ditzingen Doberlug-Kirchhain Dohna Dommitzsch
Donaueschingen Donzdorf Dorfen Dormagen Dornhan Dornstetten Dorsten Dortmund Dransfeld
Drebkau Dreieich Drensteinfurt Dresden Drolshagen Duderstadt Duisburg Duelmen Dueren
Duesseldorf Ebeleben Eberbach Ebermannstadt Ebern Ebersbach Ebersberg Eberswalde Eckartsberga
Eckernfoerde Edenkoben Egeln Eggenfelden Eggesin Ehingen Ehrenfriedersdorf Eibelstadt
Eibenstock Eichstaett Eilenburg Einbeck Eisenach Eisenberg Eisenberg Eisenhuettenstadt
Eisfeld Eisleben Eislingen Elbingerode Ellingen Ellrich Ellwangen Elmshorn Elsfleth Elsterberg
Elsterwerda Elstra Elterlein Eltmann Eltville Elzach Elze Emden Emmendingen Emmerich Emsdetten
Endingen Engen Enger Ennepetal Ennigerloh Eppelheim Eppingen Eppstein Erbach Erbach Erbendorf
Erding Erftstadt Erfurt Erkelenz Erkner Erkrath Erlangen Erlenbach Erwitte Eschborn Eschenbach
Eschershausen Eschwege Eschweiler Esens Espelkamp Essen Esslingen Ettenheim Ettlingen Euskirchen
Eutin Falkenberg Falkensee Falkenstein Fehmarn Fellbach Felsberg Feuchtwangen Filderstadt
Finsterwalde Fladungen Flensburg Forchheim Forchtenberg Forst Frankenau Frankenberg Frankenberg
Frankenthal Frankfurt Franzburg Frauenstein Frechen Freiberg Freilassing Freinsheim Freising
Freital Freren Freudenberg Freudenberg Freudenstadt Freyburg Freystadt Freyung Friedberg Friedberg
Friedland Friedland Friedrichroda Friedrichsdorf Friedrichshafen Friedrichstadt Friedrichsthal
Friesack Friesoythe Fritzlar Frohburg Fulda Gadebusch Gaggenau Gaildorf Gammertingen Garbsen
Gardelegen Garding Gartz Gau-Algesheim Gebesee Gedern Geesthacht Gefell Gefrees Gehrden Gehren
Geilenkirchen Geisa Geiselhoering Geisenfeld Geisenheim Geising Geisingen Geislingen Geithain
Geldern Gelnhausen Gelsenkirchen Gemuenden Gengenbach Genthin Georgsmarienhuette Gera Gerabronn
Gerbstedt Geretsried Geringswalde Gerlingen Germering Germersheim Gernrode Gernsbach Gernsheim
Gerolstein Gerolzhofen Gersfeld Gersthofen Gescher Geseke Gevelsberg Geyer Gifhorn
Gladbeck Gladenbach Glashuette Glauchau Glinde Gluecksburg Glueckstadt Gnoien Goch Goldberg
Goldkronach Gommern Goeppingen Go<NAME> Grabow
Grafenau Graefenberg Graefenhainichen Graefenthal Grafenwoehr Gransee Grebenau Grebenstein
Greding Greifswald Greiz Greven Grevenbroich Grevesmuehlen Griesheim Grimma Grimmen
Groebzig Groeditz Groitzsch Gronau Gronau Groeningen
Gruenberg Gruenhain-Beierfeld Gruensfeld Gruenstadt Guben Gudensberg Gueglingen
Gummersbach Gundelsheim Guentersberge Guenzburg Gunzenhausen Guesten Guestrow Guetersloh
Guetzkow Haan Hachenburg Hadamar Hadmersleben Hagen Hagenbach Hagenow Haiger Haigerloch
Hainichen Haiterbach Halberstadt Haldensleben Halle Halle Hallenberg Hallstadt Halver
Hamburg Hameln Hamm Hammelburg Hamminkeln Hanau Hannover Harburg Hardegsen Haren Harsewinkel
Hartenstein Hartha Harzgerode Haseluenne Hasselfelde Hattingen Hatzfeld Hausach
Hauzenberg Havelberg Havelsee Hayingen Hechingen Hecklingen Heide Heideck Heidelberg
Heidenau Heilbronn Heiligenhafen Heiligenhaus Heilsbronn Heimbach Heimsheim Heinsberg
Heitersheim Heldrungen Helmbrechts Helmstedt Hemau Hemer Hemmingen Hemmoor Hemsbach Hennef
Hennigsdorf Heppenheim Herbolzheim Herborn Herbrechtingen Herbstein Herdecke Herdorf Herford
Heringen Hermeskeil Hermsdorf Herne Herrenberg Herrieden Herrnhut Hersbruck Herten Herzberg
Herzogenaurach Herzogenrath Hettingen Hettstedt Heubach Heusenstamm Hilchenbach Hildburghausen
Hilden Hildesheim Hillesheim Hilpoltstein Hirschau Hirschberg Hirschhorn Hitzacker Hockenheim
Hof Hofgeismar Hohenleuben Hohenmoelsen Hohnstein Hoehr-Grenzhausen Hollfeld Holzgerlingen
Holzminden Homberg Homberg Homburg Hornbach Hornberg Hornburg Hoerstel Horstmar Hoexter Hoya
Hoyerswerda Hoym Hueckelhoven Hueckeswagen Huefingen Huenfeld Hungen Huerth Husum Ibbenbueren
Ichenhausen Idar-Oberstein Idstein Illertissen Ilmenau Ilsenburg Ilshofen Immenhausen Ingelfingen
Ingolstadt Iphofen Iserlohn Isselburg Itzehoe Jarmen Jena Jerichow Jessen Jever Joachimsthal
Johanngeorgenstadt Joehstadt Juelich Jueterbog Kaarst Kahla Kaisersesch Kaiserslautern Kalbe Kalkar
Kaltenkirchen Kaltennordheim Kamen Kamenz Kamp-Lintfort Kandel Kandern Kappeln Karben Karlsruhe
Karlstadt Kassel Kastellaun Katzenelnbogen Kaub Kaufbeuren Kehl Kelbra Kelheim Kelkheim Kellinghusen
Kelsterbach Kemberg Kemnath Kempen Kempten Kenzingen Kerpen Ketzin Kevelaer Kiel Kierspe
Kindelbrueck Kirchberg Kirchberg Kirchen Kirchenlamitz Kirchhain Kirchheimbolanden Kirn Kirtorf
Kitzingen Kitzscher Kleve Knittlingen Koblenz Kohren-Sahlis Kolbermoor Konstanz Konz Korbach
Korntal-Muenchingen Kornwestheim Korschenbroich Kraichtal Kranichfeld Krautheim Krefeld Kremmen
Krempe Kreuztal Kronach Kroppenstedt Krumbach Kuehlungsborn Kulmbach Kuelsheim Kuenzelsau
Kupferberg Kuppenheim Kusel Kyllburg Kyritz Laage Laatzen Ladenburg Lage Lahnstein Laichingen
Lambrecht Lampertheim Landsberg Landshut Landstuhl Langelsheim Langen Langen Langenau Langenburg
Langenfeld Langenhagen Langenselbold Langenzenn Langewiesen Lassan Laubach Lauchhammer Lauchheim
Lauda-Koenigshofen Laufen Laufenburg Lauingen Laupheim Lauscha Lauta Lauter Lauterbach Lauterecken
Lauterstein Lebach Lebus Leer Lehesten Lehrte Leichlingen Leimen Leinefelde-Worbis Leinfelden-Echterdingen
Leipheim Leipzig Leisnig Lemgo Lengefeld Lengenfeld Lengerich Lennestadt Lenzen Leonberg Leun Leuna
Leutenberg Leutershausen Leverkusen Lich Lichtenau Lichtenberg Lichtenfels Lichtenstein Liebenau
Liebenwalde Lieberose Liebstadt Lindau Lindau Linden Lindenfels Lindow Lingen Linnich Lippstadt
Loebau Loebejuen Loburg Lohmar Lohne Loehne Loitz Lollar Lommatzsch Loeningen Lorch Lorch Loerrach
Lorsch Loewenstein Luebbecke Luebben Luebeck Luebtheen Luebz Luechow Lucka Luckau
Luckenwalde Luedenscheid Luedinghausen Ludwigsburg Ludwigsfelde Ludwigslust Ludwigsstadt Luegde
Lueneburg Luenen Lunzenau Luetjenburg Luetzen Lychen Magdala Magdeburg Mahlberg Mainbernheim
Mainburg Maintal Mainz Malchin Malchow Mannheim Manderscheid Mansfeld Marburg Marienberg
Marienmuenster Markdorf Markgroeningen Markkleeberg Markneukirchen Markranstaedt Marktbreit
Marktheidenfeld Marktleuthen Marktoberdorf Marktredwitz Marktsteft Marl Marlow Marne Marsberg
Maulbronn Maxhuette-Haidhof Mayen Mechernich Meckenheim Medebach Meerane Meerbusch Meersburg
Meinerzhagen Meiningen Meisenheim Meldorf Melle Mellrichstadt Melsungen Memmingen
Menden Mendig Mengen Meppen Merkendorf Merseburg Merzig Meschede Mettmann
Metzingen Meuselwitz Meyenburg Michelstadt Miesbach Miltenberg Mindelheim Minden Mirow
Mittenwalde Mitterteich Mittweida Moers Monheim Monschau Montabaur Moerfelden-Walldorf
Moringen Mosbach Moessingen Muecheln Muegeln Muehltroff Muelheim-Kaerlich Muellheim Muellrose
Muenchberg Muencheberg Muenchen Muenchenbernsdorf Munderkingen Muennerstadt Muensingen
Munster Muenster Muenstermaifeld Muenzenberg Murrhardt Mutzschen Mylau Nabburg Nagold Naila
Nassau Nastaetten Nauen Naumburg Naumburg Naunhof Nebra Neckarbischofsheim Neckargemuend
Neckarsteinach Neckarsulm Nerchau Neresheim Netphen Nettetal Netzschkau Neubrandenburg Neubukow
Neubulach Neudenau Neuenbuerg Neuenhaus Neuenrade Neuenstein Neuerburg Neuffen Neugersdorf
Neu-Isenburg Neukalen Neukirchen Neukirchen-Vluyn Neukloster Neumark Neunkirchen Neuruppin
Neusalza-Spremberg Neuss Neustadt Neustadt-Glewe Neustrelitz Neutraubling Neu-Ulm Neuwied
Nidda Niddatal Nidderau Nideggen Niebuell Niedenstein Niederkassel Niedernhall Niederstetten
Niederstotzingen Nieheim Niemegk Nienburg Nienburg Niesky Nittenau Norden Nordenham Norderney
Norderstedt Nordhausen Nordhorn Noerdlingen Northeim Nortorf Nossen Nuernberg Nuertingen
Oberasbach Oberhausen Oberhof Oberkirch Oberkochen Oberlungwitz Obermoschel Obernkirchen
Ober-Ramstadt Oberriexingen Obertshausen Oberursel Oberviechtach Oberwesel Oberwiesenthal
Ochsenfurt Ochsenhausen Ochtrup Oderberg Oebisfelde Oederan Oelde Oelsnitz Oer-Erkenschwick
Oerlinghausen Oestrich-Winkel Offenburg Ohrdruf Oehringen Olbernhau Oldenburg Olfen Olpe
Olsberg Oppenau Oppenheim Oranienbaum Oranienburg Orlamuende Ornbau Ortenberg Ortrand Oschatz
Oschersleben Osnabrueck Osterburg Osterburken Osterfeld Osterhofen Osterholz-Scharmbeck
Osterwieck Ostfildern Osthofen Oestringen Ostritz Otterberg Otterndorf Ottweiler Overath Owen
Paderborn Papenburg Pappenheim Parchim Parsberg Pasewalk Passau Pattensen Pegau Pegnitz Peine
Peitz Penig Penkun Penzberg Penzlin Perleberg Petershagen Pfarrkirchen Pforzheim Pfreimd
Pfullendorf Pfullingen Pfungstadt Philippsburg Pinneberg Pirmasens Pirna Plattling Plaue Plauen
Plettenberg Pleystein Plochingen Ploen Pocking Pohlheim Polch Potsdam Pottenstein
Preetz Premnitz Prenzlau Pressath Prettin Pretzsch Prichsenstadt Pritzwalk Pruem Pulheim
Pulsnitz Putbus Putlitz Puettlingen Quakenbrueck Quedlinburg Querfurt Quickborn Rabenau
Radeberg Radebeul Radeburg Radegast Radevormwald Raguhn Rahden Rain Ramstein-Miesenbach Ranis
Ransbach-Baumbach Rastatt Rastenberg Rathenow Ratingen Ratzeburg Rauenberg Raunheim
Rauschenberg Ravensburg Ravenstein Recklinghausen Rees Regen Regensburg Regis-Breitingen
Rehau Rehburg-Loccum Rehna Reichelsheim Reinbek Reinfeld Reinheim Remagen Remda-Teichel
Remscheid Renchen Rendsburg Rennerod Renningen Rerik Rethem Reutlingen Rheda-Wiedenbrueck
Rhede Rheinau Rheinbach Rheinberg Rheine Rheinfelden Rheinsberg Rheinstetten Rhens Rhinow
Ribnitz-Damgarten Richtenberg Riedenburg Riedlingen Rieneck Riesa Rietberg Rinteln Rochlitz
Rockenhausen Rodalben Rodenberg Roedental Roedermark Rodewisch Rodgau Roding Roemhild Romrod
Ronneburg Ronnenberg Rosenfeld Rosenheim Rosenthal Rostock Rotenburg Roth Roetha Rothenfels
Roettingen Rottweil Roetz Rudolstadt Ruhla Ruhland Runkel Ruesselsheim Ruethen Saalburg-Ebersdorf
Saalfeld Saarburg Saarlouis Sachsenhagen Sachsenheim Salzgitter Salzkotten Salzwedel Sandau
Sandersleben Sangerhausen Sarstedt Sassenberg Sassnitz Sayda Schafstaedt Schalkau Schauenstein
Scheer Scheibenberg Scheinfeld Schelklingen Schenefeld Schieder-Schwalenberg
Schifferstadt Schildau Schillingsfuerst Schiltach Schirgiswalde Schkeuditz Schkoelen
Schleiden Schleiz Schleswig Schlettau Schleusingen Schlieben Schlitz Schlotheim Schluechtern
Schluesselfeld Schmalkalden Schmallenberg Schmoelln Schnackenburg Schnaittenbach Schneeberg
Schneverdingen Schongau Schoeningen Schoensee Schoenwald Schopfheim Schoeppenstedt Schorndorf
Schortens Schotten Schramberg Schraplau Schriesheim Schrobenhausen Schrozberg Schuettorf
Schwaan Schwabach Schwabmuenchen Schwaigern Schwalbach Schwalmstadt Schwandorf Schwanebeck
Schwarzenbek Schwarzenborn Schwarzheide Schweich Schweinfurt Schwelm Schwerin Schwerte
Schwetzingen Sebnitz Seehausen Seehausen Seelow Seelze Seesen Sehnde Seifhennersdorf Selb
Selbitz Seligenstadt Selm Selters Senden Sendenhorst Senftenberg Siegburg Siegen
Sigmaringen Simbach Sindelfingen Singen Sinsheim Sinzig Soest Solingen Solms Soltau
Soemmerda Sondershausen Sonneberg Sonnewalde Sonthofen Sontra Spaichingen Spalt Spangenberg
Spenge Speyer Spremberg Springe Sprockhoevel Stade Stadtallendorf Stadthagen Stadtilm
Stadtlengsfeld Stadtlohn Stadtoldendorf Stadtprozelten Stadtroda Stadtsteinach Starnberg
Staufenberg Stavenhagen Stein Steinach Steinbach Steinfurt Steinheim Stendal
Sternberg Stockach Stolberg Stolberg Stolpen Storkow Straelen Stralsund Strasburg
Straubing Strausberg Strehla Stromberg Stutensee Stuttgart Suhl Sulingen Sulzburg
Syke Tann Tanna Tauberbischofsheim Taucha Taunusstein Tecklenburg Tegernsee Telgte Teltow
Templin Tengen Tessin Teterow Tettnang Teublitz Teuchern Teupitz Teuschnitz Thale Thannhausen
Tharandt Themar Thum Tirschenreuth Titisee-Neustadt Tittmoning Todtnau
Toenisvorst Toenning Torgau Torgelow Tornesch Traben-Trarbach Traunreut Traunstein Trebbin
Treffurt Trendelburg Treuchtlingen Treuen Treuenbrietzen Tribsees Trier Triptis Trochtelfingen
Troisdorf Trossingen Trostberg Tuebingen Tuttlingen Twistringen
Uebigau-Wahrenbrueck Ueckermuende Uelzen Uetersen Uffenheim Uhingen Ulm Ulrichstein Ummerstadt
Unkel Unna Usedom Usingen Uslar <NAME> Varel Vechta Velbert Velburg
Velden Vellberg Vellmar Velten Verden Veringenstadt Versmold Viechtach Vienenburg Viernheim
Viersen Villingen-Schwenningen Vilsbiburg Vilseck Visselhoevede Vlotho Voerde
Volkach Volkmarsen Vreden Waechtersbach Wadern Waghaeusel Wahlstedt Waiblingen Waibstadt
Waischenfeld Waldeck Waldenbuch Waldenburg Waldenburg Waldershof Waldheim Waldkappel Waldkirch
Waldkirchen Waldkraiburg Waldmuenchen Waldsassen Waldshut-Tiengen Walldorf Wallduern Wallenfels
Walsrode Waltershausen Waltrop Wanfried Wanzleben Warburg Waren Warendorf Warin Warstein Wassenberg
Wassertruedingen Wasungen Wedel Weener Wegberg Wegeleben Wehr Weida Weikersheim Weilburg Weimar
Weingarten Weinheim Weinsberg Weinstadt Weismain Weiterstadt Welzheim
Welzow Wemding Werben Werdau Werder Werdohl Werl Wermelskirchen Wernau
Werne Werneuchen Wernigerode Wertheim Werther Wertingen Wesel Wesenberg Wesselburen Wesseling Westerburg
Westerland Westerstede Wetter Wetter Wettin Wetzlar Widdern Wiehe Wiehl Wiesbaden Wiesmoor Wiesensteig
Wiesloch Wildberg Wildemann Wildenfels Wildeshausen Wilhelmshaven Willebadessen Willich
Wilsdruff Wilster Wilthen Windischeschenbach Windsbach Winnenden Winsen Winterberg Wipperfuerth Wirges
Wismar Wissen Witten Wittenberg Wittenberge Wittenburg Wittichenau Wittlich Wittingen Wittmund Witzenhausen
Woldegk Wolfach Wolfen Wolfenbuettel Wolfhagen Wolframs-Eschenbach Wolfratshausen Wolfsburg Wolfstein
Wolgast Wolkenstein Wolmirstedt Worms Wriezen Wuelfrath Wunsiedel Wunstorf Wuppertal Wuerselen Wurzbach
Wuerzburg Wurzen Wustrow Xanten Zahna Zehdenick Zeitz Zell Zella-Mehlis Zerbst
Zeulenroda-Triebes <NAME> <NAME>)
end
end
<file_sep>/lib/ffaker/lorem_cn.rb
# encoding: utf-8
module Faker
# Based on Perl's Text::Lorem
module LoremCN
extend ModuleUtils
extend self
def word
WORDS.rand
end
def words(num = 3)
WORDS.random_pick(num)
end
def sentence(word_count = 4)
s = words(word_count + rand(6))
s = s.join
"#{s},"
end
def sentences(sentence_count = 3)
s = (1..sentence_count).map { sentence }
def s.to_s
result = self.join(' ')
result[-1] = '。'
result
end
s
end
def paragraph(sentence_count = 3)
sentences(sentence_count + rand(3)).to_s
end
def paragraphs(paragraph_count = 3)
(1..paragraph_count).map { paragraph }
end
WORDS = k %w(
瞥 瞅 望 瞄 瞪 盯 观察 凝视 注视 看望 探望 瞻仰 扫视 环视 仰望 俯视 鸟瞰 俯瞰 远望 眺望 了望
讲 曰 讨论 议论 谈论 交流 交谈 嚷 吼 嚎 啼 鸣 嘶 嘶叫 嚎叫 叫嚷 首 元 甲 子 首先 首屈一指 名列前茅
吱呀 喀嚓 扑哧 哗啦 沙沙 咕咚 叮当 咕噜 嗖嗖 唧唧喳喳 叽叽喳喳 轰轰隆隆 叮叮当当 叮叮咚咚 哗哗啦啦
鸟语花香 春暖花开 阳春三月 万物复苏 春风轻拂 烈日当空 暑气逼人 大汗淋漓 挥汗如雨 乌云翻滚
秋高气爽 五谷丰登 万花凋谢 天高云淡 落叶沙沙 三九严寒 天寒地冻 雪花飞舞 寒冬腊月 千里冰封
头重脚轻 指手画脚 愁眉苦脸 心明眼亮 目瞪口呆 张口结舌 交头接耳 面黄肌瘦 眼明手快 眼高手低 昂首挺胸
心灵手巧 摩拳擦掌 摩肩接踵 鼠目寸光 谈虎色变 兔死狐悲 龙马精神 杯弓蛇影 马到成功 与虎谋皮 亡羊补牢
雄狮猛虎 鹤立鸡群 狗急跳墙 叶公好龙 声名狼籍 狐假虎威 画蛇添足 九牛一毛 鸡犬不宁 一箭双雕 惊弓之鸟
胆小如鼠 打草惊蛇 鸡飞蛋打 指鹿为马 顺手牵羊 对牛弹琴 鸟语花香 虎背熊腰 杀鸡儆猴 莺歌燕舞 鸦雀无声
鱼目混珠 鱼龙混杂 龙争虎斗 出生牛犊 望女成凤 望子成龙 狗尾续貂 爱屋及乌 螳臂当车 蛛丝马迹 投鼠忌器
门口罗雀 管中窥豹 马到成功 龙马精神 马失前蹄 指鹿为马 一马当先 闻鸡起舞 鹤立鸡群 杀鸡取卵 鸡犬不宁
鸡飞蛋打 小试牛刀 九牛一毛 牛头马面 牛鬼蛇神 牛马不如 一诺千金 一鸣惊人 一马当先 一触即发 一气呵成
一丝不苟 一言九鼎 一日三秋 一落千丈 一字千金 一本万利 一手遮天 一文不值 一贫如洗 一身是胆 一毛不拔
二三其德 两面三刀 两肋插刀 两败俱伤 两情相悦 两袖清风 两全其美 三生有幸 三思而行 三令五申 三头六臂
三更半夜 三顾茅庐 四面楚歌 四面八方 四海为家 四通八达 四平八稳 四分五裂 五大三粗 五光十色 五花八门
五体投地 五谷丰登 五彩缤纷 五湖四海 六神无主 六根清净 六道轮回 六亲不认 七零八落 七嘴八舌 七高八低
七窍生烟 七上八下 七折八扣 七拼八凑 八面玲珑 八面威风 八仙过海,各显神通 九霄云外 九牛一毛 九死一生
九鼎一丝 十指连心 十面埋伏 十字街头 十全十美 十年寒窗 十万火急 十拿九稳 13.带有颜色的词语:桃红柳绿
万紫千红 青红皂白 黑白分明 绿意盎然 绿树成阴 素车白马 万古长青 漆黑一团 灯红酒绿 面红耳赤 青山绿水
白纸黑字 青黄不接 金灿灿 黄澄澄 绿莹莹 红彤彤 红艳艳 红通通 白茫茫 黑乎乎 黑压压 鹅黄 乳白 湖蓝 枣红
雪白 火红 梨黄 孔雀蓝 柠檬黄 象牙白 苹果绿 五彩缤纷 五光十色 万紫千红 绚丽多彩 色彩斑斓 千姿百态 千姿万状
姿态万千 形态多样 形态不一 不胜枚举 数不胜数 不可胜数 不计其数 成千上万 成群结队 人山人海 排山倒海
琳琅满目 车水马龙 铺天盖地 满山遍野 变化多端 变幻莫测 千变万化 瞬息万变 一泻千里 一目十行 快如闪电
移步换影 健步如飞 光阴似箭 日月如梭 星转斗移 流星赶月 慢慢 缓缓 冉冉 徐徐 缓慢 一眨眼 一瞬间 刹那间
顷刻间 霎时间 时而 去世 已故 牺牲 逝世 与世长辞 为国捐躯 驾崩 苦思冥想 静思默想
绞尽脑汁 拾金不昧 舍己为人 视死如归 坚贞不屈 不屈不挠 身材魁梧 亭亭玉立 老态龙钟 西装革履 婀娜多姿
洗耳恭听 昂首阔步 拳打脚踢 交头接耳 左顾右盼 扬眉吐气 怒目而视 火眼金睛 面红耳赤 热泪盈眶
泪流满面 泪如雨下 泪眼汪汪 泪如泉涌 嚎啕大哭 喜笑颜开 眉开眼笑 哈哈大笑 嫣然一笑 微微一笑
忐忑不安 惊慌失措 闷闷不乐 激动人心 笑容可掬 微微一笑 开怀大笑 喜出望外 乐不可支
火冒三丈 怒发冲冠 勃然大怒 怒气冲冲 咬牙切齿 可憎可恶 十分可恶 深恶痛绝 疾恶如仇 恨之入骨
伤心落泪 欲哭无泪 失声痛哭 泣不成声 潸然泪下 无精打采 顾虑重重 忧愁不安 愁眉苦脸 闷闷不乐
激动不已 激动人心 百感交集 激动万分 感慨万分 舒舒服服 高枕无忧 无忧无虑 悠然自得 心旷神怡
迫不及待 急急忙忙 急不可待 操之过急 焦急万分 追悔莫及 悔恨交加 于心不安 深感内疚 羞愧难言
心灰意冷 大失所望 灰心丧气 毫无希望 黯然神伤 惊弓之鸟 提心吊胆 惊惶失措 惊恐万状 惶惶不安
深入浅出 借尸还魂 买空卖空 内忧外患 前呼后拥 异口同声 声东击西 三长两短 凶多吉少 不进则退 大同小异 大公无私
承上启下 天长日久:天崩地裂 天老地荒 理直气壮 云开日出 长短不同 黑白相间 表里如一 喜怒哀乐 安危冷暖 生死存亡
茫雾似轻 枫叶似火 骄阳似火 秋月似钩 日月如梭:雪花如席 雪飘如絮 细雨如烟 星月如钩 碧空如洗 暴雨如注 吉祥如意
视死如归 挥金如土 疾走如飞 一见如故 和好如初 心急如焚 早出晚归 眉清目秀 月圆花好 李白桃红 心直口快
水落石出 水滴石穿 月白风清 字正腔圆 口蜜腹剑 雨打风吹 虎啸龙吟 龙争虎斗 走马观花:废寝忘食 张灯结彩 招兵买马
争分夺秒 坐井观天 思前顾后 投桃报李 行云流水 乘热打铁 生离死别 舍近求远 返老还童 载歌载舞 难舍难分
能屈能伸 蹑手蹑脚 有始有终 若即若离 古色古香 无影无踪 无牵无挂 无边无际 无情无义 无忧无虑 无缘无故 无穷无尽
不干不净 不清不楚 不明不白 不闻不问 不伦不类 不吵不闹 不理不睬 自言自语 自说自话 自吹自擂 自私自利 自高自大
自暴自弃 自给自足 时隐时现 时高时低 时明时暗 时上时下 半信半疑 半明半昧 半梦半醒 半推半就 神采奕奕 星光熠熠
小心翼翼 炊烟袅袅 白雪皑皑 烈日灼灼 赤日炎炎 绿浪滚滚 波浪滚滚 云浪滚滚 麦浪滚滚 热浪滚滚 江水滚滚 车轮滚滚
果实累累 秋实累累 硕果累累 果实累累 尸骨累累 弹孔累累 白骨累累 生气勃勃 生机勃勃 生气勃勃 朝气勃勃 兴致勃勃
雄心勃勃 千军万马 千言万语 千变万化 千山万水 千秋万代 千丝万缕 千奇百怪:千锤百炼 千方百计 千疮百孔 千姿百态
前因后果 前呼后拥 前思后想 前赴后继 前仰后合 前倨后恭 天经地义 天罗地网 天昏地暗 天诛地灭 天南地北 天荒地老
有眼无珠 有气无力 有始无终 有备无患 有恃无恐 有勇无谋 有名无实 东倒西歪 东张西望 东奔西走 东拉西扯 东拼西凑
东邻西舍 东鳞西爪 迫在眉睫 千钧一发 燃眉之急 十万火急 震耳欲聋 惊天动地 震天动地 响彻云霄 众志成城 齐心协力
同心同德 万众一心 废寝忘食 刻苦钻研 争分夺秒 精益求精 专心致志 全神贯注 聚精会神 一心一意 议论纷纷 各抒己见
七嘴八舌 争论不休 车水马龙 人山人海 人声鼎沸 摩肩接踵 生龙活虎 人流如潮 振奋人心 洁白无瑕 白璧无瑕 冰清玉洁
洁白如玉 言而有信 一言九鼎 一诺千金 信守诺言 毅然决然 当机立断 雷厉风行 前所未有 空前绝后 绝无仅有 史无前例
犹豫不决 出尔反尔 优柔寡断 狐疑不决 浩浩荡荡 气势磅礴 气势恢弘 气势非凡 枝繁叶茂 绿树成阴 绿阴如盖
闻名于世 举世闻名 闻名天下 大名鼎鼎 手足无措 手忙脚乱 手舞足蹈 足下生辉 赞不绝口 赞叹不已 连连称赞 叹为观止
慷慨激昂 壮志凌云 铿锵有力 语气坚定 汹涌澎湃 波涛汹涌 白浪滔天 惊涛骇浪 风平浪静 水平如镜 波光粼粼 碧波荡漾
旭日东升 绵绵细雨 桃红柳绿 艳阳高照 山河壮丽 高山峻岭 危峰兀立 连绵不断 飞流直下 一泻千里 万丈瀑布 水帘悬挂
雄鸡报晓 红日东升 朝霞辉映 金光万道 中午时分 丽日当空 艳阳高照 当午日明 暮色苍茫 夕阳西下 天色模糊 晚风习习
华灯初上 月明星稀 灯火通明 漫漫长夜 万家灯火 夜幕降临 狂风暴雨 倾盆大雨 瓢泼大雨 暴风骤雨 秋雨绵绵 绵绵细雨
细雨如烟 淅淅沥沥 暴雨如注 风和日丽 天高云淡 万里无云 秋高气爽 纷纷扬扬 粉妆玉砌 银妆素裹 白雪皑皑 冰雪消融
冰天雪地 白雪皑皑 雪花飞舞 大雪封门 雪中送炭 和风拂面 风狂雨猛 秋风凉爽 北风呼啸 轻风徐徐 令人发指 丧失人性
)
end
end
<file_sep>/test/test_module_utils.rb
require 'helper'
class TestModuleUtils < Test::Unit::TestCase
it "provides a k method for generating constant arrays" do
obj = Object.new
obj.extend Faker::ModuleUtils
result = obj.k ["1","2","3"]
assert result.frozen?
result.each {|e| assert e.frozen? }
end
end
<file_sep>/lib/ffaker/utils/module_utils.rb
require 'ffaker/utils/array_utils'
module Faker
module ModuleUtils
def k(arg)
Faker::ArrayUtils.const_array(arg)
end
end
end
<file_sep>/lib/ffaker/name_cn.rb
# encoding: utf-8
module Faker
module NameCN
extend ModuleUtils
extend self
def name
"#{first_name}#{last_name}"
end
def first_name
FIRST_NAMES.rand
end
def last_name
LAST_NAMES.rand
end
def last_first
"#{last_name}#{first_name}"
end
LAST_NAMES = k %w(赵 钱 孙 李 周 吴 郑 王 冯 陈 褚 卫 蒋 沈 韩 杨 朱 秦 尤
许 何 吕 施 张 孔 曹 严 华 金 魏 陶 姜 戚 谢 邹 喻 柏 水 窦 章 云 苏 潘 葛 奚
范 彭 郎 鲁 韦 昌 马 苗 凤 花 方 俞 任 袁 柳 酆 鲍 史 唐 费 廉 岑 薛 雷 贺 倪
汤 滕 殷 罗 毕 郝 邬 安 常 乐 于 时 傅 皮 卞 齐 康 伍 余 元 卜 顾 孟 平 黄
和 穆 萧 尹 姚 邵 湛 汪 祁 毛 禹 狄 米 贝 明 臧 计 伏 成 戴 谈 宋 茅 庞 熊 纪
舒 屈 项 祝 董 梁 杜 阮 蓝 闵 席 季 麻 强 贾 路 娄 危 江 童 颜 郭 梅 盛 林 刁
锺 徐 邱 骆 高 夏 蔡 田 樊 胡 凌 霍 虞 万 支 柯 昝 管 卢 莫 经 房 裘 缪 干 解
应 宗 丁 宣 贲 邓 郁 单 杭 洪 包 诸 左 石 崔 吉 钮 龚
程 嵇 邢 滑 裴 陆 荣 翁 荀 羊 於 惠 甄 麴 家 封 芮 羿
储 靳 汲 邴 糜 松 井 段 富 巫 乌 焦 巴 弓 牧 隗 山 谷
车 侯 宓 蓬 全 郗 班 仰 秋 仲 伊 宫 宁 仇 栾 暴 甘 钭
历 戎 祖 武 符 刘 景 詹 束 龙 叶 幸 司 韶 郜 黎 蓟 溥
印 宿 白 怀 蒲 邰 从 鄂 索 咸 籍 赖 卓 蔺 屠 蒙 池 乔
阳 郁 胥 能 苍 双 闻 莘 党 翟 谭 贡 劳 逄 姬 申 扶 堵
冉 宰 郦 雍 却 璩 桑 桂 濮 牛 寿 通 边 扈 燕 冀 僪 浦
尚 农 温 别 庄 晏 柴 瞿 阎 充 慕 连 茹 习 宦 艾 鱼 容
向 古 易 慎 戈 廖 庾 终 暨 居 衡 步 都 耿 满 弘 匡 国
文 寇 广 禄 阙 东 欧 殳 沃 利 蔚 越 夔 隆 师 巩 厍 聂
晁 勾 敖 融 冷 訾 辛 阚 那 简 饶 空 曾 毋 沙 乜 养 鞠
须 丰 巢 关 蒯 相 查 后 荆 红 游 竺 权 逮 盍 益 桓 公
万俟 司马 上官 欧阳 夏侯 诸葛 闻人 东方 赫连 皇甫 尉迟
公羊 澹台 公冶 宗政 濮阳 淳于 单于 太叔 申屠 公孙 仲孙
轩辕 令狐 钟离 宇文 长孙 慕容 司徒 司空 召 有 舜
叶赫那拉 丛 岳 寸 贰 皇 侨 彤 竭 端 赫 实 甫 集 象 翠
狂 辟 典 良 函 芒 苦 其 京 中 夕 之 章佳 那拉 冠 宾 香
果 依尔根觉罗 依尔觉罗 萨嘛喇 赫舍里 额尔德特 萨克达
钮祜禄 他塔喇 喜塔腊 讷殷富察 叶赫那兰 库雅喇 瓜尔佳
舒穆禄 爱新觉罗 索绰络 纳喇 乌雅 范姜 碧鲁 张廖 张简
图门 太史 公叔 乌孙 完颜 马佳 佟佳 富察 费莫 蹇 称 诺
来 多 繁 戊 朴 回 毓 税 荤 靖 绪 愈 硕 牢 买 但 巧 枚
撒 泰 秘 亥 绍 以 壬 森 斋 释 奕 姒 朋 求 羽 用 占 真
穰 翦 闾 漆 贵 代 贯 旁 崇 栋 告 休 褒 谏 锐 皋 闳 在
歧 禾 示 是 委 钊 频 嬴 呼 大 威 昂 律 冒 保 系 抄 定
化 莱 校 么 抗 祢 綦 悟 宏 功 庚 务 敏 捷 拱 兆 丑 丙
畅 苟 随 类 卯 俟 友 答 乙 允 甲 留 尾 佼 玄 乘 裔 延
植 环 矫 赛 昔 侍 度 旷 遇 偶 前 由 咎 塞 敛 受 泷 袭
衅 叔 圣 御 夫 仆 镇 藩 邸 府 掌 首 员 焉 戏 可 智 尔
凭 悉 进 笃 厚 仁 业 肇 资 合 仍 九 衷 哀 刑 俎 仵 圭
夷 徭 蛮 汗 孛 乾 帖 罕 洛 淦 洋 邶 郸 郯 邗 邛 剑 虢
隋 蒿 茆 菅 苌 树 桐 锁 钟 机 盘 铎 斛 玉 线 针 箕 庹
绳 磨 蒉 瓮 弭 刀 疏 牵 浑 恽 势 世 仝 同 蚁 止 戢 睢
冼 种 涂 肖 己 泣 潜 卷 脱 谬 蹉 赧 浮 顿 说 次 错 念
夙 斯 完 丹 表 聊 源 姓 吾 寻 展 出 不 户 闭 才 无 书
学 愚 本 性 雪 霜 烟 寒 少 字 桥 板 斐 独 千 诗 嘉 扬
善 揭 祈 析 赤 紫 青 柔 刚 奇 拜 佛 陀 弥 阿 素 长 僧
隐 仙 隽 宇 祭 酒 淡 塔 琦 闪 始 星 南 天 接 波 碧 速
禚 腾 潮 镜 似 澄 潭 謇 纵 渠 奈 风 春 濯 沐 茂 英 兰
檀 藤 枝 检 生 折 登 驹 骑 貊 虎 肥 鹿 雀 野 禽 飞 节
宜 鲜 粟 栗 豆 帛 官 布 衣 藏 宝 钞 银 门 盈 庆 喜 及
普 建 营 巨 望 希 道 载 声 漫 犁 力 贸 勤 革 改 兴 亓
睦 修 信 闽 北 守 坚 勇 汉 练 尉 士 旅 五 令 将 旗 军
行 奉 敬 恭 仪 母 堂 丘 义 礼 慈 孝 理 伦 卿 问 永 辉
位 让 尧 依 犹 介 承 市 所 苑 杞 剧 第 零 谌 招 续 达
忻 六 鄞 战 迟 候 宛 励 粘 萨 邝 覃 辜 初 楼 城 区 局
台 原 考 妫 纳 泉 老 清 德 卑 过 麦 曲 竹 百 福 言
第五 佟 爱 年 笪 谯 哈 墨 南宫 赏 伯 佴 佘 牟 商 西门
东门 左丘 梁丘 琴 后 况 亢 缑 帅 微生 羊舌 海 归 呼延
南门 东郭 百里 钦 鄢 汝 法 闫 楚 晋 谷梁 宰父 夹谷 拓跋
壤驷 乐正 漆雕 公西 巫马 端木 颛孙 子车 督 仉 司寇 亓官
鲜于 锺离 盖 逯 库 郏 逢 阴 薄 厉 稽 闾丘 公良 段干 开
光 操 瑞 眭 泥 运 摩 伟 铁 迮)
FIRST_NAMES = k %w(宝昌 佩均 之启 书宣 光弘 诚智 喜佩 欢莹 瑜念
昱行 雨钰 百鑫 睿妤 贤俐 枝淑 丰恬 宜玫 贵孝 汝勇 卉齐 宣华 康伟
季虹 为博 安慧 亮绿 军彬 容城 世帆 金宏 重元 坚夫 思凯 恆琳 妤书
琪幸 旻幸 松祯 宣源 淑峰 诚妃 夙仪 倩蓉 姿伶 纬妹 年琴 康贞 巧吉
其芸 铭桂 忠江 幼孝 雨乐 典昀 兴绮 志中 玟惟
协定 忠孝 洁安 淑定 雯东 仪年 心中 琇修 丞达
冰雅 昭仲 孟鑫 雅亚 孝郁 其峰 薇芷 智和 兆乔
翰星 颖意 亚新 纯苹 信念 以瑞 邦钰 添宜 思乔
天惟 昌淳 卉廷 豪华 江吟 柏豪 羽佳 孝哲 韵麟
儒年 丞坚 骏欣 琦全 柏宇 长念 汝名 治玫 嘉旺
枝易 念一 民乐 文雄 利英 俊苹 俞嘉 雯吟 琇旭
玲嘉 白富 伯名 璇孜 昀昆 秉安 秀桂 鸿迪 念琇
致年 修纯 映郁 忠妤 恒鑫 皓俊 隆彬 杰苓 俊雄
幼嘉 子善 尚湖 盈钰 清妤 佑华 恒汉 白哲 协纬
茵绍 吉龙 翰欣 定杰 少原 亚萱 舜欣 天亨 青茜
一乔 恬齐 上映 妍韦 利正 展冰 明荣 然士 玉茜
夫顺 茂全 明梦 清芸 如发 皓桦 宸鑫 泓豪 柏治
吉行 和嘉 仁行 建新 行信 昌良 珍舜 江钰 慈白
佐民 如原 义如 恒哲 奕行 玮瑞 柔来 柏茹 辛绮
江原 伟来 坤萱 瑞枝 孜汉 爱昌 盛纯 沛蓁 洁智
和宸 筠芬 克刚 明桦 琦美 正哲 萱娟 韦祥 必隆
如航 百隆 幸宣 祯行 丞然 尧旺 倩幸 琦岳 皓娥
胤安 月纶 均湖 人月 昇孝 旻夫 睿皓 阳琬 平恩
劭弘 尹玉 宸宁 意慈 舜音 阿琬 东男 爱伦 和行
以其 志俊 廷江 姿君 弘善 中芳 成亦 桓政 信斌
迪琦 欣怡 劭青 钰甫 皓坚 平诚 介玟 礼宇 侑青
佩妏 诚铭 方韦 珮莲 初帆 奕峰 延欣 郁嘉 梦盈
嘉雄 辰洋 克白 南君 苓妹 珍士 娇正 法甫 孜定
长桦 其坤 俐达 妏妹 智君 添弘 原芳 台谷 人轩
洋杰 如冰 屏政 湖德 能扬 玟珍 郁希 一意 成淑
昆坚 盛吟 初辛 柔瑶 靖芸 仲欣 钰昌 德州 伟源
昱治 雅弘 伊妹 安雪 仕月 群紫 登慧 璇光 恩君
珮侑 亨诚 士诚 毓映 廷吉 政怡 安紫 姵季 立仪
之一 卉裕 慈伦 巧伟 致孜 刚妹 妏珍 珮能 昀能
典靖 政嘉 忠洁 纹智 江汉 侑萱 石琪 瑜人 禾宇
蕙臻 芸安 伶秀 珍岳 仪如 琬利 采勇 少珊 恬仁
儒泉 英刚 克廷 妏南 典夫 之名 茂萍 春定 民政
新妤 百乐 雅迪 宣萍 郁豪 智怡 珮梦 钰明 柔正
文岳 颖蓁 凡仪 正汝 林谦 美爱 启爱 佩 纯利 绍铭
茜彦 亦南 辰博 映凯 怡绿 然云 谷香 奇菱 宇绍
延霞 茵盛 为成 可人 亨琦 明惟 紫玲 合玲 薇绿
永坤 盈俐 玫昇 玲伶 小忠 仲弘 铭治 水念 郁俐
禾刚 思梅 以富 爱恬 文钰 元城 振君 铭源 克花
军恬 博吟 慧幸 方星 育名 右治 妙桦 纯珍 泰宏
振香 启卿 荣恭 冠迪 长霞 燕谦 青亨 江莲 柏秀
乐岑 富祥 惠孜 士芸 卉乔 绍仪 泓宸 毓哲 弘莹
淳原 星隆 书忠 吉云 然杰 昇来 立梅 晋瑄 纹欣
希妹 芳琦 以彬 华松 中心 卉鑫 翰伶 桓原 展霖
玲坚 吟美 隆绮 容辰 品修 之皓 苓海 采州 财珍
白豪 孜雄 萱俐 财如 信乔 又季 杰瑶 璇城 思妮
如士 秀德 雪其 芸紫 乐齐 玲宇 郁桓 宥昆 宣任
士夫 隆治 廷绍 玉善 琪宜 侑娟 绮名 昇屏 芸舜
英妤 梦倩 伸君 安纯 音嘉 玄君 志生 山发 皇强
坚舜 香芝 火书 致盈 春俐 玲俊 修男 云淳 幼隆
雯佳 旻琪 延法 庭山 于瑶 书竹 维莹 屏宜
瑶任 梅人 哲皓 顺孝 重良 成珮 俞萍 禾仁 竹诚
家博 新苹 智人 山甫 芝隆 贤志 青臻 梅湖 尚生
元华 威芸 宥辉 茹鑫 卉凌 靖辉 妮侑 亭仁 音皓
延龙 上来 宣岑 书琬 鸿郁 洁昇 姵育 为菱 名新
骏书 能琪 白铭 凤幸 勇绿 良源 佑淑 骏宸 丹亦
宛泉 新良 然宜 政绮 宸瑜 伯坤 妙韦 春琪 真旺
柔冰 信喜 辛铭 育诚 木诚 竹睿 竹士 真念 佐谕
星皓 礼恭 新宇 佑霞 杰夫 人蓉 思维 君玉 昆凡
小琴 俐军 佩如 友玟 其昆 芳峰 纬毓 伟乐 左洋
宛芳 萱莲 杰香 妏恭 屏帆 行睿 博孜 勇倩 振娟
迪齐 香心 正勇 奕枝 宛意 念然 夫桦 喜月 冠任
力侑 茂易 中沛 政月 伊华 克宏 裕琳 登云 昱侑
思男 玄雪 吟蓁 韵慈 旺智 丰如 诚裕 柏湖 忠宁
惠汉 盈宇 彦昇 乐祥 阳琦 昆轩 雨钰 勇妃 舜瑄
喜哲 旭琳 旻纬 杰迪 立菱 利贵 善真 韦瑞 安惠
雪博 东人 力莹 枝雨 宏禾 士昆 宁均 廷霖 虹智
城坚 智佩 奇雪 珍心 上龙 阳青 卉妃 岑良 湖卿
新郁 乔孝 俐舜 亮凡 昕茂 沛舜 子妤 皓吉 沛汉
洋亚 白轩 雨纶 伶雄 行文 珮康 致芳 兆萱 安瑄
甫谦 姵富 千东 贵菁 妍茜 恬妮 俊伶 芷睿 瑜名
瑜峰 能纬 纯瑄 姵爱 妤睿 琦辛 松香 琬苹 妤治
智泰 奇莹 凯蓉 中惟 燕妤 中宸 年仁 正孝 城桦
义俐 品男 薇坚 昆阳 品群 育莹 亭毓 维紫 宣迪
易心 孝羽 妏虹 虹亨 骏哲 淑信 仰钰 铭玟 荣月
伸民 初贵 合雪 宜祯 典然 致信 幼沛 香智 立岳
伯尧 和容 承茜 羽维 玉迪 仪嘉 易雯 鸿蓁 季蓁
承夫 重生 劭峰 玟俊 仕宜 合纯 燕忠 立旺 兴伦
昭雨 丹仪 必政 汉郁 孝瑞 念辛 岑汝 欣蓉 纬刚 秋
容易 信任 夫 光婷 初志 玄萍 以安 水娟 林茜 靖映
乐松 坤喜 恬扬 芸瑄 群惟 桓坤 凡莹 芷雅 仪文
芃湖 月岑 雪海 维茵 铭旺 慧中 荣桂 冠宁 羽芸
人茂 夫东 妮念 紫钰 中伟 左慧 建士 民荣 希忠
百芷 子均 妏湖 奇雅 礼舜 彦元 新鑫 尚仲 郁齐
枝臻 添佳 辰成 晋珊 亨茵 兰宣 若绮 禾东 思睿
桂伯 春亨 夙嘉 光容 家绿 勇昆 惠倩 祯香 希志
奇东 灿郁 月伶 一汝 依友 夙忠 雨谦 政威 尹隆
廷新 裕吉 长希 弘隆 郁音 俐孝 惟音 宥毓 仲纬
扬皓 方桦 添臻 舜羽 孝绿 南雯 劭信 儒映 年伶
建意 妮峰 桓军 颖亚 苓民 仲花 廷青 富梅 幸轩
怡珠 振蓁 世真 承春 贵蓉 南莹 洁维 群婷 百修
慈阳 昭梦 惠源 雯隆 致士 美弘 光昇 瑶凯 杰旺
毓昌 绿恬 富纯 佳维 千中 彦芷 茹山 秉伟 康纶
映意 鑫祥 杰德 修美 宛雅 新乐 梅芝 德怡 晋麟
夙其 美良 函良 芃勇 琇航 行妮 纯祯 玉利 岳易
邦贞 巧群 民州 志仲 贵夫 柏舜 雅伦 惟阳 乃仁
又政 香士 胤帆 勇年 希喜 彦行 天茹 右城 屏元
枝琳 郁秀 云姗 姵书 子吉 亨芸 云沛 育汉 尹全
成花 伟东 夫海 云隆 芝桦 函志 亚瑞 姿人 于荣
善政 胤洋 春念 勇宸 洁淑 坤秋 星宸 政铭 雅孜
合云 昀发 燕宸 意仲 燕靖 铭雯 皇芳 惠人 中芸
莹芳 希杰 任洋 廷婷 卉冰 安一 莹达 孟辰 迪光
姵善 奇婷 韵昇 百达 睿义 扬桦 冰慈 昀诚 茂斌
孜容 星映 祥纯 映宣 又雄 慈玫 原霞 振玫 仕康
慈紫 劭桓 意军 毓维 山名 方纬 璇珊 星桦 鑫洋
苓乔 添香 屏蓁 伦君 永贵)
end
end
<file_sep>/lib/ffaker/html_ipsum.rb
module Faker
# Loosely based on http://html-ipsum.com/
# Requires Faker::Lorem module
module HTMLIpsum
extend ModuleUtils
extend self
def a(word_count = 2)
"<a href=\"##{word}\" title=\"#{words(word_count).capitalize!}\">#{words(word_count).capitalize!}</a>"
end
def p(count = 3, options = {})
options = {:fancy => false, :include_breaks => false}.merge(options)
if options[:fancy]
s = fancy_string(count, options[:include_breaks])
else
mode = options[:include_breaks] ? "paragraphs" : "paragraph"
s = send(mode.to_sym, count)
end
"<p>#{s}</p>"
end
def dl(definitions = 2)
s = "<dl>"
definitions.times do
s << "<dt>#{words(1).capitalize!}</dt><dd>#{paragraph 2}</dd>"
end
s << "</dl>"
end
def ul_short(items = 3)
s = "<ul>"
items.times do
s << "<li>#{sentence 2}</li>"
end
s << "</ul>"
end
def ul_long(items = 3)
s = "<ul>"
items.times do
s << "<li>#{paragraph 2}</li>"
end
s << "</ul>"
end
def ol_short(items = 3)
s = "<ol>"
items.times do
s << "<li>#{sentence 2}</li>"
end
s << "</ol>"
end
def ol_long(items = 3)
s = "<ol>"
items.times do
s << "<li>#{paragraph 2}</li>"
end
s << "</ol>"
end
def ul_links(items = 3)
s = "<ul>"
items.times do
s << "<li>#{a 1}</li>"
end
s << "</ul>"
end
def table(rows = 3)
s = "<table>
<thead>
<tr>
<th>#{word.capitalize!}</th>
<th>#{word.capitalize!}</th>
<th>#{word.capitalize!}</th>
<th>#{word.capitalize!}</th>
</tr>
</thead>
<tbody>"
rows.times do
s << "<tr>
<td>#{words(1).capitalize!}</td>
<td>#{words(1).capitalize!}</td>
<td>#{words(1).capitalize!}</td>
<td>#{a}</td>
</tr>"
end
s << "</tbody>
</table>"
end
def body
s = "<h1>#{words(2).capitalize!}</h1>"
rand(4).times do
s << "<p>#{fancy_string}</p>"
end
s << table(rand(4))
s << "<h2>#{words(2).capitalize!}</h2>
<ol>"
rand(4).times do
s << "<li>#{paragraph 1}</li>"
end
s << "</ol>
<blockquote><p>#{paragraphs 3}</p></blockquote>
<h3>#{words(2).capitalize!}</h3>
<ul>"
rand(4).times do
s << "<li>#{paragraph 1}</li>"
end
s << "</ul>
<pre><code>
##{word} h1 a {
display: block;
width: 300px;
height: 80px;
}
</code></pre>"
end
def fancy_string(count = 3, include_breaks = false)
sep = include_breaks ? "<br>" : " "
a = k([
"<strong>#{words(2).capitalize!}</strong>.",
"<em>#{paragraph}</em>",
"<code>#{words 2}</code>",
"#{a 2}"
] + Faker::Lorem::paragraphs(count))
a.random_pick(count).join(sep)
end
private
def word
Faker::Lorem::word
end
def words(word_count = 3)
Faker::Lorem::words(word_count).join(' ')
end
def sentence(word_count = 3)
Faker::Lorem::sentence(word_count)
end
def sentences(sentence_count = 3)
Faker::Lorem::sentences(sentence_count).join(' ')
end
def paragraph(sentence_count = 3)
Faker::Lorem::paragraph(sentence_count)
end
def paragraphs(paragraph_count = 3)
Faker::Lorem::paragraphs(paragraph_count).join('<br>')
end
end
end
<file_sep>/lib/ffaker/address.rb
module Faker
module Address
extend ModuleUtils
extend self
def zip_code
Faker.numerify ZIP_FORMATS.rand
end
def us_state
STATE.rand
end
def us_state_abbr
STATE_ABBR.rand
end
def city_prefix
CITY_PREFIXES.rand
end
def city_suffix
CITY_SUFFIXES.rand
end
def city
case rand(4)
when 0 then '%s %s%s' % [city_prefix, Name.first_name, city_suffix]
when 1 then '%s %s' % [city_prefix, Name.first_name]
when 2 then '%s%s' % [Name.first_name, city_suffix]
when 3 then '%s%s' % [Name.last_name, city_suffix]
end
end
def street_suffix
STREET_SUFFIX.rand
end
def street_name
case rand(2)
when 0 then "#{Name.last_name} #{street_suffix}"
when 1 then "#{Name.first_name} #{street_suffix}"
end
end
def street_address(include_secondary = false)
str = ( "#" * rand(3) ) << ('### %s' % street_name)
str << ' ' << secondary_address if include_secondary
Faker.numerify(str)
end
def secondary_address
Faker.numerify(SEC_ADDR.rand)
end
# UK Variants
def uk_county
UK_COUNTY.rand
end
def uk_country
UK_COUNTRY.rand
end
def uk_postcode
Faker.bothify(UK_POSTCODE.rand).upcase
end
def neighborhood
NEIGHBORHOOD.rand
end
ZIP_FORMATS = k ['#####', '#####-####']
STATE = k ['Alabama', 'Alaska', 'Arizona', 'Arkansas',
'California', 'Colorado', 'Connecticut', 'Delaware', 'Florida',
'Georgia', 'Hawaii', 'Idaho', 'Illinois', 'Indiana', 'Iowa', 'Kansas',
'Kentucky', 'Louisiana', 'Maine', 'Maryland', 'Massachusetts',
'Michigan', 'Minnesota', 'Mississippi', 'Missouri', 'Montana',
'Nebraska', 'Nevada', 'New Hampshire', 'New Jersey', 'New Mexico', 'New York',
'North Carolina', 'North Dakota', 'Ohio', 'Oklahoma', 'Oregon',
'Pennsylvania', 'Rhode Island', 'South Carolina', 'South Dakota',
'Tennessee', 'Texas', 'Utah', 'Vermont', 'Virginia', 'Washington',
'West Virginia', 'Wisconsin', 'Wyoming']
STATE_ABBR = k %w(AL AK AS AZ AR CA CO CT DE DC FM FL GA GU HI ID IL IN IA
KS KY LA ME MH MD MA MI MN MS MO MT NE NV NH NJ NM NY NC
ND MP OH OK OR PW PA PR RI SC SD TN TX UT VT VI VA WA WV
WI WY AE AA AP)
COMPASS_DIRECTIONS = k %w(North East West South)
CITY_PREFIXES = k(COMPASS_DIRECTIONS + %w(New Lake Port))
CITY_SUFFIXES = k %w(town ton land ville berg burgh borough bury view port
mouth stad furt chester mouth fort haven side shire)
STREET_SUFFIX = k %w(Alley Avenue Branch Bridge Brook Brooks
Burg Burgs Bypass Camp Canyon Cape Causeway Center Centers Circle Circles
Cliff Cliffs Club Common Corner Corners Course Court Courts Cove Coves
Creek Crescent Crest Crossing Crossroad Curve Dale Dam Divide Drive Drives
Estate Estates Expressway Extension Extensions Fall Falls Ferry
Field Fields Flat Flats Ford Fords Forest Forge Forges Fork Forks Fort
Freeway Garden Gardens Gateway Glen Glens Green Greens Grove Groves Harbor
Harbors Haven Heights Highway Hill Hills Hollow Inlet Island
Islands Isle Junction Junctions Key Keys Knoll Knolls Lake
Lakes Land Landing Lane Light Lights Loaf Lock Locks Lodge Loop
Mall Manor Manors Meadow Meadows Mews Mill Mills Mission Motorway
Mount Mountain Mountains Neck Orchard Oval Overpass Park
Parks Parkway Parkways Pass Passage Path Pike Pine Pines Place Plain Plains
Plaza Point Points Port Ports Prairie
Radial Ramp Ranch Rapid Rapids Rest Ridge Ridges River Road Roads
Route Row Rue Run Shoal Shoals Shore Shores Skyway Spring Springs
Spur Spurs Square Squares Station Stravenue
Stream Street Streets Summit Terrace
Throughway Trace Track Trafficway Trail Tunnel
Turnpike Underpass Union Unions Valley Valleys Via Viaduct View Views
Village Villages Ville Vista Walk Walks Wall Way Ways Well Wells)
SEC_ADDR = k ['Apt. ###', 'Suite ###']
UK_COUNTY = k ['Avon', 'Bedfordshire', 'Berkshire', 'Borders',
'Buckinghamshire', 'Cambridgeshire', 'Central', 'Cheshire', 'Cleveland',
'Clwyd', 'Cornwall', 'County Antrim', 'County Armagh', 'County Down',
'County Fermanagh', 'County Londonderry', 'County Tyrone', 'Cumbria',
'Derbyshire', 'Devon', 'Dorset', 'Dumfries and Galloway', 'Durham',
'Dyfed', 'East Sussex', 'Essex', 'Fife', 'Gloucestershire', 'Grampian',
'Greater Manchester', 'Gwent', 'Gwynedd County', 'Hampshire',
'Herefordshire', 'Hertfordshire', 'Highlands and Islands', 'Humberside',
'Isle of Wight', 'Kent', 'Lancashire', 'Leicestershire', 'Lincolnshire',
'Lothian', 'Merseyside', 'Mid Glamorgan', 'Norfolk', 'North Yorkshire',
'Northamptonshire', 'Northumberland', 'Nottinghamshire', 'Oxfordshire',
'Powys', 'Rutland', 'Shropshire', 'Somerset', 'South Glamorgan',
'South Yorkshire', 'Staffordshire', 'Strathclyde', 'Suffolk', 'Surrey',
'Tayside', 'Tyne and Wear', 'Warwickshire', 'West Glamorgan', 'West Midlands',
'West Sussex', 'West Yorkshire', 'Wiltshire', 'Worcestershire']
UK_COUNTRY = k ['England', 'Scotland', 'Wales', 'Northern Ireland']
UK_POSTCODE = k ['??# #??', '??## #??']
NEIGHBORHOOD = k ['East of Telegraph Road', 'North Norridge', 'Northwest Midlothian/Midlothian Country Club',
'Mott Haven/Port Morris', 'Kingsbridge Heights', 'Bronxdale', 'Pennypack', 'Bridesburg',
'Allegheny West', 'Bushwick South', 'Dyker Heights', 'Ocean Parkway South', 'Summerlin North',
'Seven Hills Area', 'Greater Las Vegas National', 'phoenix', 'Central Chandler', 'South of Bell Road',
'River Heights', 'White Plains Central', 'Mount Kisco West', 'Pound Ridge East', 'Babylon Bayside',
'Sagaponack Seaside', 'South of Lake Ave', 'Far Rockaway/Bayswater', 'Jamaica Estates/Holliswood',
'Murray Hill', 'East Renton', 'Renton West', 'Auburn North', 'Northwoods West', 'Florissant West',
'Ladue South', 'Candlewood Country Club', 'West Covina East', 'North East Irwindale', 'Sunshine-Gardens',
'Cipriani', 'Brentwood Central', 'Jupiter South/Abacoa', 'Sea Ranch Lakes', 'Schall Circle/Lakeside Green',
'Olmsted Falls Central', 'South of Lake Shore Blvd', 'Gates Mills North', 'White Oak South of Columbia Pike',
'Rockville East of Hungerford Dr', 'Cleveland Park']
end
end
<file_sep>/test/test_faker_name_ru.rb
# encoding: utf-8
require 'helper'
class TestFakerNameRu < Test::Unit::TestCase
def setup
@tester = Faker::NameRU
end
def test_name
@words = @tester.name.split
assert [2,3].include?(@words.size)
assert @words.each { |word| word.match /[А-Я][а-я]+/ }
end
def test_name_sex
@words = @tester.name.split
assert same_sex?(@words)
end
def test_uniqueness
unique_names = (1..10000).map { @tester.name }.uniq.size
assert unique_names > 9850, "got only #{unique_names} unique names out of 10000"
end
def test_last_name
assert @tester.last_name.match(/[А-Я][а-я]+/)
end
def test_male_last_name
assert Faker::NameRU::LAST_NAMES[:male].include?(@tester.last_name(:male))
end
def test_first_name
assert @tester.first_name.match(/[А-Я][а-я]+/)
end
def test_male_first_name
assert Faker::NameRU::FIRST_NAMES[:male].include?(@tester.first_name(:male))
end
def test_patronymic
assert @tester.patronymic.match(/[А-Я][а-я]+/)
end
def test_male_patronymic
assert Faker::NameRU::PATRONYMICS[:male].include?(@tester.patronymic(:male))
end
def test_with_same_sex
names = []
@tester.with_same_sex do
names << @tester.last_name
names << @tester.first_name
names << @tester.patronymic
end
assert same_sex?(names)
end
def test_with_same_sex_for_male
names = []
@tester.with_same_sex(:male) do
names << @tester.last_name
names << @tester.first_name
names << @tester.patronymic
end
assert same_sex?(names, :male)
end
# checks if every name is of the same sex
def same_sex?(words, sex = :any)
(sex == :any ? [:male, :female] : [sex]).any? do |sex|
words.all? do |word|
[Faker::NameRU::LAST_NAMES, Faker::NameRU::FIRST_NAMES, Faker::NameRU::PATRONYMICS].any? do |names|
names[sex].include?(word)
end
end
end
end
end
<file_sep>/test/test_phone_number.rb
require 'helper'
class TestPhoneNumer < Test::Unit::TestCase
def test_phone_number
assert_match /\d{3}[. -]\d{3}/, Faker::PhoneNumber.phone_number
end
def test_short_phone_number
assert_match /\d{3}-\d{3}-\d{4}/, Faker::PhoneNumber.short_phone_number
end
end
<file_sep>/test/test_address_de.rb
require 'helper'
class TestAddressDE < Test::Unit::TestCase
def test_city
assert_match /[ a-z]+/, Faker::Address.city
end
def test_state
assert_match /[ a-z]/, Faker::Address.us_state
end
end
<file_sep>/lib/ffaker/lorem.rb
module Faker
# Based on Perl's Text::Lorem
module Lorem
extend ModuleUtils
extend self
def word
WORDS.rand
end
def words(num = 3)
WORDS.random_pick(num)
end
def sentence(word_count = 4)
s = words(word_count + rand(6))
s = s.join(' ')
s.capitalize!
"#{s}."
end
def sentences(sentence_count = 3)
(1..sentence_count).map { sentence }
end
def paragraph(sentence_count = 3)
sentences(sentence_count + rand(3)).join(' ')
end
def paragraphs(paragraph_count = 3)
(1..paragraph_count).map { paragraph }
end
WORDS = k %w(alias consequatur aut perferendis sit voluptatem accusantium
doloremque aperiam eaque ipsa quae ab illo inventore veritatis
et quasi architecto beatae vitae dicta sunt explicabo aspernatur
aut odit aut fugit sed quia consequuntur magni dolores eos qui
ratione voluptatem sequi nesciunt neque dolorem ipsum quia dolor
sit amet consectetur adipisci velit sed quia non numquam eius
modi tempora incidunt ut labore et dolore magnam aliquam quaerat
voluptatem ut enim ad minima veniam quis nostrum exercitationem
ullam corporis nemo enim ipsam voluptatem quia voluptas sit
suscipit laboriosam nisi ut aliquid ex ea commodi consequatur
quis autem vel eum iure reprehenderit qui in ea voluptate velit
esse quam nihil molestiae et iusto odio dignissimos ducimus qui
blanditiis praesentium laudantium totam rem voluptatum deleniti
atque corrupti quos dolores et quas molestias excepturi sint
occaecati cupiditate non provident sed ut perspiciatis unde
omnis iste natus error similique sunt in culpa qui officia
deserunt mollitia animi id est laborum et dolorum fuga et harum
quidem rerum facilis est et expedita distinctio nam libero
tempore cum soluta nobis est eligendi optio cumque nihil impedit
quo porro quisquam est qui minus id quod maxime placeat facere
possimus omnis voluptas assumenda est omnis dolor repellendus
temporibus autem quibusdam et aut consequatur vel illum qui
dolorem eum fugiat quo voluptas nulla pariatur at vero eos et
accusamus officiis debitis aut rerum necessitatibus saepe
eveniet ut et voluptates repudiandae sint et molestiae non
recusandae itaque earum rerum hic tenetur a sapiente delectus ut
aut reiciendis voluptatibus maiores doloribus asperiores
repellat)
end
end
| 25b8536259217a9c7f6f946c928bcda10dd75ba1 | [
"RDoc",
"Ruby"
] | 22 | RDoc | chrisberkhout/ffaker | adc7960c9e5f96904cbf9cec3e828e421a9ed2bd | 47bd68d3db2b19974317bd1aee02225f7fb076f6 |
refs/heads/master | <file_sep>from django.urls import path
from basic_app import views
#Template URLs
app_name = 'basic_app'
urlpatterns =[
path('register/',views.register,name='register'),
path('login/',views.user_login,name= 'user_login')
]
| 2a6474c6d2a129db1e58eae51a09d342a24eeb2e | [
"Python"
] | 1 | Python | rammandal/django-deployment | 7504fe5aca0c1af089eec25669a65e1138ceaf83 | c2e9f62fec4259d5c48a5d608766f4cae66e6f1e |
refs/heads/master | <repo_name>Beondel/LinearAlgebraLibrary<file_sep>/vector.py
# <NAME>
# Vector Algebra Function Library
# 5/2/17
from math import sqrt, acos, pi
class Vector(object):
def __init__(self, coordinates):
try:
if not coordinates:
raise ValueError
self.coordinates = tuple(coordinates)
self.dimension = len(coordinates)
except ValueError:
raise ValueError('The coordinates must be nonempty')
except TypeError:
raise TypeError('The coordinates must be an iterable')
def __str__(self):
return 'Vector: {}'.format(self.coordinates)
def __eq__(self, v):
return self.coordinates == v.coordinates
def plus(self, other):
return Vector([x + y for x, y in zip(self.coordinates, other.coordinates)])
def minus(self, other):
return Vector([x - y for x, y in zip(self.coordinates, other.coordinates)])
def timesScalar(self, scalar):
return Vector([x * scalar for x in self.coordinates])
def magnitude(self):
return sqrt(sum([x**2 for x in self.coordinates]))
def normalize(self):
try:
return self.timesScalar(1.0/self.magnitude())
except:
raise Exception("can not normalize the zero vector")
def dot(self, other):
return sum([x * y for x, y in zip(self.coordinates, other.coordinates)])
def angle(self, other):
result = acos(self.dot(other) / self.magnitude() * other.magnitude())
print(str(result) + " radians")
print(str(result * (180 / pi)) + " degrees")
return result
v1 = Vector([0, 1, 0])
v2 = Vector([1, 0, 0])
v1.angle(v2)
| dfa1b9d5c681b414f30fc73cbc991ea01a5ba351 | [
"Python"
] | 1 | Python | Beondel/LinearAlgebraLibrary | 1b86177b9caba15f22ec0da4d5074097a422a602 | 8532a6822ccb8077a47156ac5d6b912427f066dd |
refs/heads/master | <file_sep>package encoder;
import java.awt.image.BufferedImage;
import java.awt.Color;
public class ThreeStepSearcher {
// Three step search
public static int[] ThreeStepSearch (int x, int y, BufferedImage left, BufferedImage right){
int[] t = new int[2];
int xCenter = x;
int yCenter = y;
//Coordinates of the moving orgin
int MovingX;
int MovingY;
// Creating pixel array with size macroblock(30X30) 30X30X3(RGB)
int[][][] pixelArrayLeft = new int[30][30][3];
int[][][] pixelArrayRight = new int[30][30][3];
double mseExtracted;
// Initializing Meansquare Error
double mseMin = 100000;
//System.out.println("extract real");
// Extracted Array extraction
pixelArrayLeft = extractPixelArray(x,y, left);
// Start of three step search with init step 64 and a convergence rate of 1/2
for (int step= 64 ; step > 1; step = step/2){
MovingX = xCenter - step;
MovingY = yCenter - step;
//System.out.println ("STEP" + step);
for(int i = 1 ; i <=3; i++){
//System.out.println ("coordinate " + i + "," + j);
MovingY = y - step;
if (MovingX < 0){
MovingX = MovingX + step;
continue;
}
// Limit check
if (MovingX >= 370){
continue;
}
for(int j = 1; j <=3 ; j++){
//System.out.println ("coordinate " + i + "," + j);
if (MovingY < 0){
MovingY = MovingY + step;
continue;
}
if (MovingY >= 570){
continue;
}
// Compare
//System.out.println("moving x = " + MovingX+" and moving y = "+ MovingY );
pixelArrayRight = extractPixelArray(MovingX,MovingY, right);
mseExtracted = MSE(pixelArrayLeft,pixelArrayRight);
// Checking if current MSE is less that the current minMSE and updating if yes
if (mseExtracted < mseMin){
mseMin = mseExtracted ;
xCenter = MovingX;
yCenter = MovingY;
}
//Changing step in Y-axis
MovingY = MovingY + step;
}
//Changing step in X-axis
MovingX = MovingX + step;
}
}
//System.out.println("Final min at " +xCenter+","+yCenter +" and min is "+ mseMin );
t[0] = xCenter;
t[1] = yCenter;
return t;
}
// Extracting pixels of a given coordinates in a buffered image
public static int[][][] extractPixelArray (int x, int y, BufferedImage image){
int pixelNum = 30;
int[][][] RGB = new int [30][30][3];
int bufferINT ;
Color temp;
for (int i = 0; i< pixelNum; i++){
//System.out.println("inside 1");
for (int j = 0; j< pixelNum; j++ ){
//System.out.println("pixel inside 2 ==> " + i +","+ j);
bufferINT = image.getRGB(x+i,y+j);
temp = new Color(bufferINT);
for (int k=0; k< 3 ; k++){
//System.out.println("inside 3" + k);
switch (k) {
case 0:
RGB[i][j][k] = temp.getRed();
break;
case 1:
RGB[i][j][k] = temp.getGreen();
break;
case 2:
RGB[i][j][k] = temp.getBlue();
break;
}
}
}
}
//System.out.println("extract Done");
return RGB;
}
// Calculating MSE between two macro-blocks
public static double MSE (int [][][]sourceImage,int [][][] ComparedImage) {
int sum_sq = 0;
//double []mse = new double[3];
// 8X8 block
int h= 30;
int w = 30;
int RGB = 3;
for (int k = 0 ; k<RGB ; ++k){
for (int i = 0; i < h; ++i)
{
for (int j = 0; j < w; ++j)
{
int p1 = sourceImage[i][j][k];
int p2 = ComparedImage[i][j][k];
int err = p2 - p1;
sum_sq += (err * err);
}
}
//mse[k] = (double)sum_sq / (h * w);
}
//double tmse = Math.pow((mse[0]*mse[0]),2) + Math.pow((mse[1]*mse[1]),2) + Math.pow((mse[2]*mse[2]),2);
//return (Math.sqrt(tmse));
//System.out.println("MSE is " + ((double)sum_sq / (h * w)));
return ((double)sum_sq / (h * w));
}
}
<file_sep># mu
Segmented and Gaze Controlled decompression for streaming displays such as VR
<file_sep>package decoder;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.image.BufferedImage;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import javax.swing.ImageIcon;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.SwingConstants;
public class decode5 {
static JFrame frame;
static JLabel lbIm1;
static JLabel lbIm2;
static BufferedImage img;
final static double pi=Math.PI;
static int q1=1000;
static int q2=10;
static BufferedImage original;
public static List<BufferedImage> outputVideo=new ArrayList<BufferedImage>();
public static List<BufferedImage> originalVideo=new ArrayList<BufferedImage>();
static double[][] blockXU=new double[8][8];
static double[][] blockYV=new double[8][8];
// public static void main(String[] args){
//getVideo();
// String filename= "output.cmp";
//// String filename= "output.cmp";
// String line="";
//
// int frameRate=15;
//
// //预先计算idct公式里重复的部分
// for(int x=0;x<8;x++){
// for(int y=0;y<8;y++){
// for(int u=0;u<8;u++){
// for(int v=0;v<8;v++){
// blockXU[x][u]=Math.cos((2*x+1)*u*pi/16.0);
// blockYV[y][v]=Math.cos((2*y+1)*v*pi/16.0);
// }
// }
// }
// }
//
// List<BufferedImage> outputVideo=new ArrayList<BufferedImage>();
// img = new BufferedImage(960, 540, BufferedImage.TYPE_INT_RGB);
// //一帧8160行
// try
// {
// BufferedReader in=new BufferedReader(new FileReader(filename));
// line=in.readLine();
// int blockCount=0;
// long startTime=System.currentTimeMillis(); //获取开始时间
// while (line!=null)
// {
// idct(line,blockCount);
// blockCount++;
// if(blockCount==8160){
// long endTime=System.currentTimeMillis(); //获取结束时间
// System.out.println("程序运行时间: "+(endTime-startTime)+"ms");
// outputVideo.add(img);
// blockCount=0;
// img = new BufferedImage(960, 540, BufferedImage.TYPE_INT_RGB);
// startTime=System.currentTimeMillis(); //获取开始时间
// }else{
//
// }
// line=in.readLine();
//
// }
//
// in.close();
// } catch (IOException e)
// {
// e.printStackTrace();
// }
//
//
//
// JFrame frame = new JFrame();
// GridBagLayout gLayout = new GridBagLayout();
// frame.getContentPane().setLayout(gLayout);
//
// JLabel lbText1 = new JLabel("Video height: ");
// lbText1.setHorizontalAlignment(SwingConstants.CENTER);
//
// lbIm1 = new JLabel(new ImageIcon(img));
// GridBagConstraints c = new GridBagConstraints();
// c.fill = GridBagConstraints.HORIZONTAL;
// c.anchor = GridBagConstraints.CENTER;
// c.weightx = 0.5;
// c.gridx = 0;
// c.gridy = 0;
// frame.getContentPane().add(lbText1, c);
// c.fill = GridBagConstraints.HORIZONTAL;
// c.gridx = 0;
// c.gridy = 1;
// frame.getContentPane().add(lbIm1, c);
//
// frame.pack();
// frame.setVisible(true);
//
// for(int i=1; i < outputVideo.size(); i++) {
// lbIm1.setIcon(new ImageIcon(outputVideo.get(i)));
// try {
// Thread.sleep(1000/frameRate);
// } catch(InterruptedException e) {
// e.printStackTrace();
// }
// }
// while(true){
// for(int i=0; i < outputVideo.size(); i++) {
// lbIm1.setIcon(new ImageIcon(outputVideo.get(i)));
// try {
// Thread.sleep(1000/frameRate);
// } catch(InterruptedException e) {
// e.printStackTrace();
// }
// }
// }
// }
public static void getVideo(String filename) {
// String filename= "output.cmp";
String line="";
int frameRate = 30;
//预先计算idct公式里重复的部分
for(int x=0;x<8;x++){
for(int y=0;y<8;y++){
for(int u=0;u<8;u++){
for(int v=0;v<8;v++){
blockXU[x][u]=Math.cos((2*x+1)*u*pi/16.0);
blockYV[y][v]=Math.cos((2*y+1)*v*pi/16.0);
}
}
}
}
outputVideo=new ArrayList<BufferedImage>();
originalVideo=new ArrayList<BufferedImage>();
img = new BufferedImage(960, 540, BufferedImage.TYPE_INT_RGB);
original = new BufferedImage(960, 540, BufferedImage.TYPE_INT_RGB);
//一帧8160行
try
{
BufferedReader in=new BufferedReader(new FileReader(filename));
line=in.readLine();
int blockCount=0;
long startTime=System.currentTimeMillis(); //获取开始时间
while (line!=null)
{
idct(line,blockCount);
blockCount++;
if(blockCount == 8160){
long endTime = System.currentTimeMillis(); //获取结束时间
System.out.println("程序运行时间: "+(endTime-startTime)+"ms");
outputVideo.add(img);
originalVideo.add(original);
blockCount=0;
img = new BufferedImage(960, 540, BufferedImage.TYPE_INT_RGB);
original = new BufferedImage(960, 540, BufferedImage.TYPE_INT_RGB);
startTime=System.currentTimeMillis(); //获取开始时间
}else{
}
line=in.readLine();
}
in.close();
} catch (IOException e)
{
e.printStackTrace();
}
//
//
// JFrame frame = new JFrame();
// GridBagLayout gLayout = new GridBagLayout();
// frame.getContentPane().setLayout(gLayout);
//
// JLabel lbText1 = new JLabel("Video height: ");
// lbText1.setHorizontalAlignment(SwingConstants.CENTER);
//
// lbIm1 = new JLabel(new ImageIcon(img));
// GridBagConstraints c = new GridBagConstraints();
// c.fill = GridBagConstraints.HORIZONTAL;
// c.anchor = GridBagConstraints.CENTER;
// c.weightx = 0.5;
// c.gridx = 0;
// c.gridy = 0;
// frame.getContentPane().add(lbText1, c);
// c.fill = GridBagConstraints.HORIZONTAL;
// c.gridx = 0;
// c.gridy = 1;
// frame.getContentPane().add(lbIm1, c);
//
// frame.pack();
// frame.setVisible(true);
//
// for(int i=1; i < outputVideo.size(); i++) {
// lbIm1.setIcon(new ImageIcon(outputVideo.get(i)));
// try {
// Thread.sleep(1000/frameRate);
// } catch(InterruptedException e) {
// e.printStackTrace();
// }
// }
// while(true){
// for(int i=0; i < outputVideo.size(); i++) {
// lbIm1.setIcon(new ImageIcon(outputVideo.get(i)));
// try {
// Thread.sleep(1000/frameRate);
// } catch(InterruptedException e) {
// e.printStackTrace();
// }
// }
// }
}
private static void idct(String line, int blockCount){
// System.out.println(blockCount);
int blockth=blockCount%4;
// System.out.println("th block "+blockth);
int offsetx=0;
int offsety=0;
offsetx=blockCount/4/34;//TODO FIXME
offsety=blockCount/4%34;
int actualx=offsetx*16;
int actualy=offsety*16;
// System.out.println(actualx+" "+actualy);
String[] coe=line.split(" ");
int blockType=Integer.valueOf(coe[0]);
int[] qcoe=new int[193];
if(blockType==0){
for(int i=0;i<coe.length;i++){
qcoe[i]=(int)((Double.valueOf(coe[i])/q1))*q1;
}
}else{
for(int i=0;i<coe.length;i++){
qcoe[i]=(int)((Double.valueOf(coe[i])/q2))*q2;
}
}
// if(blockType==1){
// System.out.println("fore!!! "+blockCount);
// }
// System.out.println(blockType);
for(int i=1;i<65;i++){
double fxyr=0;double fxyg=0;double fxyb=0;
int x=(i-1)/8;int y=(i-1)%8;
// System.out.println("x y "+x+" "+y);
for(int u=0;u<8;u++){
double ci = ((u==0)?1.0/Math.sqrt(2.0):1.0);
for(int v=0;v<8;v++){
double cj = ((v==0)?1.0/Math.sqrt(2.0):1.0);
double cc=ci*cj;
double cos=blockXU[x][u]*blockXU[y][v];
if(blockType==0){
fxyr+=cc* qcoe[u*8+v+1]*cos;
fxyg+=cc* qcoe[u*8+v+65]*cos;
fxyb+=cc* qcoe[u*8+v+129]*cos;
}else{
fxyr+=cc*qcoe[u*8+v+1]*cos;
fxyg+=cc* qcoe[u*8+v+65]*cos;
fxyb+=cc*qcoe[u*8+v+129]*cos;
}
//
}
}
// System.out.println("fxy "+fxyr);
int r=(int) (0.25*fxyr);
int g=(int) (0.25*fxyg);
int b=(int) (0.25*fxyb);
// System.out.println(r+" "+g+" "+b);
int pix = 0xff000000 | ((r & 0xff) << 16) | ((g & 0xff) << 8) | (b & 0xff);
// img.setRGB(x, y, pix);
// img.setRGB(x+8, y, pix);
// img.setRGB(x, y+8, pix);
// img.setRGB(x+8, y+8, pix);
// System.out.println((actualx+x)+" "+(actualy+y));
switch (blockth){//TODO FIXME
case 0:
if((actualx+x<960)&&(actualy+y)<540){
img.setRGB(actualx+x, actualy+y, pix);
}
break;
case 1:
if((actualx+x+8<960)&&(actualy+y)<540){
img.setRGB(actualx+x+8, actualy+y, pix);
}
break;
case 2:
if((actualx+x<960)&&(actualy+y+8)<540){
img.setRGB(actualx+x, actualy+y+8, pix);
}
break;
case 3:
if((actualx+x+8<960)&&(actualy+y+8)<540){
img.setRGB(actualx+x+8, actualy+y+8, pix);
}
break;
}
}
}
}
| ac28ffe7b6771ac3a2ebb4cb01823469ee21be32 | [
"Markdown",
"Java"
] | 3 | Java | cszongyang/mu | d7d0c4ae3c2a3a18fd0e2b936f965552ec973930 | 9a35b45eeba5182a7e51f3052f356d39fd9deb0d |
refs/heads/master | <repo_name>JustinLindhout/frontend-voor-designers-1920<file_sep>/opdracht2/README.md
# Frontend voor Designers - opdracht 2: Een interactie uitwerken voor verschillende gebruikers input
Werk een functionaliteit uit die je kunt bedienen met 'click' en nog een user interactie, zoals het toetsenbord, tab, dubbel click, swipe, long press, <del>force touch</del>, of iets anders ... Werk je ontwerp uit in HTML, CSS en Javascript om te kunnen testen in een [browser](https://en.m.wikipedia.org/wiki/List_of_web_browsers).
Lees hier de [opdrachtbeschrijving](./opdrachtbeschrijving.md).
# Project titel
Ik heb een carousel met foto's gemaakt die te bedienen is met pijltjes op het scherm. Het was ook de bedoeling om het carousel te bedienen met de pijltoetsen op het toetsenbord, dit is helaas niet gelukt.
https://justinlindhout.github.io/frontend-voor-designers-1920/opdracht2/demo/index.html
## interface
Leg de interface uit.
De gebruiker kan met de pijlen op het scherm door de foto's heen gaan, dit kan beide kanten op. Zo geef je gebruiker het gevoel dat zij in controle zijn van wat er gebeurd.
De pijlen zien er zo uit dat het aangeeft dat je kan sliden tussen meerdere foto's. Appearance follows behavior.
Het is niet me geulkt om principles 8 goed uit te voeren.
In de demo heb je meerdere [UI events](https://developer.mozilla.org/en-US/docs/Web/API/UIEvent) toegepast. Hoe heb je dat gedaan?
Ik heb de slider werkend gekregen doormiddel van buttons, met deze buttons kan je door de foto's sliden. Mijn idee was om ook tussen de foto's te kunnen sliden doormiddel van de pijltoetsen op het toetsenbord. Dit is mij helaas nog niet gelukt.
## code
De foto's in mijn carousel staan zonder css in een lange horizontale rijd= naast elkaar. Door de overflow van de slider op hidden te zetten zie je enkel 1 afbeelding. Over de foto's zit een container waar de slider in zit.
Met javascript en Jquery heb ik de slider laten bewegen. Iedere keer dat slider wordt geactiveerd worden de afbeeldingen met css opgeschoven.
(jQuery is een framework voor javascript.
Een framework is een code die het gemakkelijker maakt om in die programmeer- of scripttaal te programmeren.
De code wordt hierdoor vaak compacter, gemakkelijker te maken (je hebt minder code nodig om een functie te bereiken) en dus ook overzichtelijker.)
<file_sep>/opdracht1/demo/js/script.js
/*
Wat doe je ook alweer in Javascript voor een micro-interactie?
1. Gebruik de querySelector om een element in je html te selecteren
2. Koppel een evenListener aan het element om een mouse-event te detecteren
3. Gebruik het Classlist object om een css class aan een element toe te voegen of weg te halen.
*/
var blue = document.querySelectorAll("img.blue");
var green = document.querySelectorAll("img.green");
var yellow = document.querySelectorAll("img.yellow");
var red = document.querySelectorAll("img.red");
var button = document.querySelector("btnBlue");
btnBlue.addEventListener("click", function () {
blue.classList.toggle("greenhide", "yellowhide", "redhide");
}
| 179a4d35347bfd7b053aae1f601823ab69e148d0 | [
"Markdown",
"JavaScript"
] | 2 | Markdown | JustinLindhout/frontend-voor-designers-1920 | 77c141cbfe9905c208fb16dc9977021773b03172 | 27b170e9668cdb07c5e72a478442148eabb29722 |
refs/heads/master | <file_sep>const questions = [
{
question: "What does HTML stand for?",
answers: [
{text: 'HyperSex Marker Language', correct: false},
{text: 'Hyper Markup Language', correct: false},
{text: 'HyperVeryText Markup Language', correct: false},
{text: 'HyperText Markup Language', correct: true}
]
},
{
question: "What does CSS stand for?",
answers: [
{text: 'Cheats Style Sleep', correct: false},
{text: 'Clown Style Sheets', correct: false},
{text: 'Cascading Style Sheets', correct: true},
{text: 'Cyber Style Sun', correct: false}
]
},
{
question: "When appeared JS?",
answers: [
{text: '1995', correct: true},
{text: '1895', correct: false},
{text: '2001', correct: false},
{text: '1990', correct: false}
]
},
{
question: "What is DOM?",
answers: [
{text: 'Document Object Master', correct: false},
{text: 'Document Object Model', correct: true},
{text: 'Document Oriented Model', correct: false},
{text: 'DotsOracleMuseum', correct: false}
]
},
]
| 174dfa8cef75d8ea4f64c7270400da53f63d7e72 | [
"JavaScript"
] | 1 | JavaScript | ohwoow/test-your-knowledge-js | 88d5872ad1f58921da5e2c2c3f55d8b565434b20 | 975ccf5c7c75600b51b58af7fed5b07a308d5e2e |
refs/heads/master | <repo_name>RitamChakraborty/Christmas_Tree<file_sep>/README.md
# Christmas Tree
Generate Christmas Tree like structure from a text. It creates a complete binary tree with the text and prints it by
adding edges to every alphabet.
## Demo
```
********** Christmas Tree **********
Enter a text:
MerryChristmas
r
/ \
/ \
/ \
/ \
/ \
r m
/ \ / \
/ \ / \
e C s s
/ \ / \ / \ /
M r y h i t a
```
## Credit
Thanks [Shuktika15](https://github.com/Shuktika15) for giving me the idea.<file_sep>/src/service/DepthMapService.java
package service;
import model.DepthMap;
import model.Node;
import java.util.*;
public class DepthMapService {
private final Map<Integer, List<Character>> map = new TreeMap<>();
private final Node tree;
public DepthMapService(Node tree) {
this.tree = tree;
}
public DepthMap getDepthMap() {
generateDepthMap(tree, 0);
return new DepthMap(map);
}
private void generateDepthMap(Node node, int depth) {
if (node != null) {
if (map.get(depth) == null) {
map.put(depth, new ArrayList<>(Collections.singletonList(node.getAlphabet())));
} else {
map.get(depth).add(node.getAlphabet());
}
generateDepthMap(node.getLeft(), depth + 1);
generateDepthMap(node.getRight(), depth + 1);
}
}
}
<file_sep>/src/service/TreePrinterService.java
package service;
import model.DepthMap;
import util.Utils;
import java.util.Collections;
import java.util.List;
import java.util.Map;
public class TreePrinterService {
public String getDiagram(DepthMap depthMap) {
StringBuilder treeDiagram = new StringBuilder();
Map<Integer, List<Character>> map = depthMap.getMap();
int maxDepth = Collections.max(map.keySet());
for (Integer depth : map.keySet()) {
StringBuilder stringBuilder = new StringBuilder();
List<Character> nodes = map.get(depth);
int d = (maxDepth - depth);
int n = Utils.spacerCount(d);
for (int i = 0; i < nodes.size(); i++) {
Character node = nodes.get(i);
if (d == 0) {
stringBuilder.append(node);
if (i + 1 != nodes.size()) {
if (i % 2 == 0) {
stringBuilder.append(" ");
} else {
stringBuilder.append(" ");
}
}
} else {
StringBuilder stringBuilder1 = new StringBuilder();
stringBuilder1.append(" ".repeat(n));
stringBuilder.append(stringBuilder1);
stringBuilder.append(node);
if (i != nodes.size() - 1) {
stringBuilder.append(stringBuilder1);
stringBuilder.append(" ");
}
}
}
if (d != 0) {
StringBuilder stringBuilder1 = new StringBuilder("\n");
int nextDepthNodesSize = map.get(depth + 1).size();
int p = nextDepthNodesSize % 2 == 0 ? nextDepthNodesSize / 2 : nextDepthNodesSize / 2 + 1;
int m = Utils.spacerCount(d - 1);
int j = 2 * m;
j = d == 1 ? j - 1 : j;
int tempJ = j;
for (int i = 0; i < m; ++i) {
for (int b = 0; b < p; ++b) {
j = tempJ;
String string = " ".repeat(2 * n + 2);
char[] chars = string.toCharArray();
int k = (j - i) + 2 * (i + 1);
chars[j - i] = '/';
if (nextDepthNodesSize % 2 == 0 || b != p - 1) {
chars[k] = '\\';
}
string = new String(chars);
stringBuilder1.append(string);
}
stringBuilder1.append("\n");
}
stringBuilder.append(stringBuilder1);
}
treeDiagram.append(stringBuilder);
}
return treeDiagram.toString();
}
}
| 2e866be24672a91412b610376481385daca72d75 | [
"Markdown",
"Java"
] | 3 | Markdown | RitamChakraborty/Christmas_Tree | e864dd03543c45aee34d6d2d7641d3005cef4286 | fa28ea7b0400ae75ddb3d9853a0c1f2484fa0bd6 |
refs/heads/master | <repo_name>ReinForce-II/fbtft<file_sep>/fb_ili9225.c
/*
* FB driver for the ILI9225 LCD Controller
*
* Copyright (C) 2018 Reinforce-II
* Based on codes by <NAME>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
#include <linux/module.h>
#include <linux/kernel.h>
#include <linux/init.h>
#include <linux/gpio.h>
#include <linux/spi/spi.h>
#include <linux/delay.h>
#include "fbtft.h"
#define DRVNAME "fb_ili9225"
#define WIDTH 176
#define HEIGHT 220
static int init_display(struct fbtft_par *par)
{
fbtft_par_dbg(DEBUG_INIT_DISPLAY, par, "%s()\n", __func__);
par->fbtftops.reset(par);
/* Initialization sequence from ILI9225 Application Notes */
/* ***********Power On sequence *************** */
write_reg(par, 0x10, 0x0000); // Set SAP,DSTB,STB
write_reg(par, 0x11, 0x0000); // Set APON,PON,AON,VCI1EN,VC
write_reg(par, 0x12, 0x0000); // Set BT,DC1,DC2,DC3
write_reg(par, 0x13, 0x0000); // Set GVDD
write_reg(par, 0x14, 0x0000); // Set VCOMH/VCOML voltage
msleep(20); // Delay 20 ms
// Please follow this power on sequence
write_reg(par, 0x11, 0x0018); // Set APON,PON,AON,VCI1EN,VC
write_reg(par, 0x12, 0x1121); // Set BT,DC1,DC2,DC3
write_reg(par, 0x13, 0x0063); // Set GVDD
write_reg(par, 0x14, 0x3961); // Set VCOMH/VCOML voltage
write_reg(par, 0x10, 0x0800); // Set SAP,DSTB,STB
msleep(10); // Delay 10 ms
write_reg(par, 0x11, 0x1038); // Set APON,PON,AON,VCI1EN,VC
msleep(30); // Delay 30 ms
write_reg(par, 0x02, 0x0100); // set 1 line inversion
if ((par->info->var.rotate % 180) != 0) {
//R01H:SM=0,GS=0,SS=0 (for details,See the datasheet of ILI9225)
write_reg(par, 0x01, 0x001C); // set the display line number and display direction
//R03H:BGR=1,ID0=1,ID1=1,AM=1 (for details,See the datasheet of ILI9225)
write_reg(par, 0x03, 0x1038); // set GRAM write direction .
}
else {
//R01H:SM=0,GS=0,SS=1 (for details,See the datasheet of ILI9225)
write_reg(par, 0x01, 0x011C); // set the display line number and display direction
//R03H:BGR=1,ID0=1,ID1=1,AM=0 (for details,See the datasheet of ILI9225)
write_reg(par, 0x03, 0x1030); // set GRAM write direction.
}
write_reg(par, 0x07, 0x0000); // Display off
write_reg(par, 0x08, 0x0808); // set the back porch and front porch
write_reg(par, 0x0B, 0x1100); // set the clocks number per line
write_reg(par, 0x0C, 0x0000); // CPU interface
write_reg(par, 0x0F, 0x0501); // Set Osc
write_reg(par, 0x15, 0x0020); // Set VCI recycling
write_reg(par, 0x20, 0x0000); // RAM Address
write_reg(par, 0x21, 0x0000); // RAM Address
//------------------------ Set GRAM area --------------------------------//
write_reg(par, 0x30, 0x0000);
write_reg(par, 0x31, 0x00DB);
write_reg(par, 0x32, 0x0000);
write_reg(par, 0x33, 0x0000);
write_reg(par, 0x34, 0x00DB);
write_reg(par, 0x35, 0x0000);
write_reg(par, 0x36, 0x00AF);
write_reg(par, 0x37, 0x0000);
write_reg(par, 0x38, 0x00DB);
write_reg(par, 0x39, 0x0000);
// ---------- Adjust the Gamma 2.2 Curve -------------------//
write_reg(par, 0x50, 0x0603);
write_reg(par, 0x51, 0x080D);
write_reg(par, 0x52, 0x0D0C);
write_reg(par, 0x53, 0x0205);
write_reg(par, 0x54, 0x040A);
write_reg(par, 0x55, 0x0703);
write_reg(par, 0x56, 0x0300);
write_reg(par, 0x57, 0x0400);
write_reg(par, 0x58, 0x0B00);
write_reg(par, 0x59, 0x0017);
write_reg(par, 0x0F, 0x0701); // Vertical RAM Address Position
write_reg(par, 0x07, 0x0012); // Vertical RAM Address Position
msleep(50); // Delay 50 ms
write_reg(par, 0x07, 0x1017); // Vertical RAM Address Position
return 0;
}
static void set_addr_win(struct fbtft_par *par, int xs, int ys, int xe, int ye)
{
fbtft_par_dbg(DEBUG_SET_ADDR_WIN, par,
"%s(xs=%d, ys=%d, xe=%d, ye=%d)\n", __func__, xs, ys, xe, ye);
if ((par->info->var.rotate % 180) != 0) {
write_reg(par, 0x38, xe);
write_reg(par, 0x39, xs);
write_reg(par, 0x36, ye);
write_reg(par, 0x37, ys);
write_reg(par, 0x21, xs);
write_reg(par, 0x20, ys);
}
else {
write_reg(par, 0x36, xe);
write_reg(par, 0x37, xs);
write_reg(par, 0x38, ye);
write_reg(par, 0x39, ys);
write_reg(par, 0x20, xs);
write_reg(par, 0x21, ys);
}
write_reg(par, 0x0022); /* Write Data to GRAM */
}
static struct fbtft_display display = {
.regwidth = 16,
.width = WIDTH,
.height = HEIGHT,
.fbtftops = {
.init_display = init_display,
.set_addr_win = set_addr_win,
},
};
FBTFT_REGISTER_DRIVER(DRVNAME, "ilitek,ili9225", &display);
MODULE_ALIAS("spi:" DRVNAME);
MODULE_ALIAS("platform:" DRVNAME);
MODULE_ALIAS("spi:ili9225");
MODULE_ALIAS("platform:ili9225");
MODULE_DESCRIPTION("FB driver for the ILI9225 LCD Controller");
MODULE_AUTHOR("Reinforce-II");
MODULE_LICENSE("GPL");
| e77e019290b4d79540353938253b72dde4de6f10 | [
"C"
] | 1 | C | ReinForce-II/fbtft | 4e19aab0a2b122f2f5829ccc14d7dad7832deae3 | 93c36ecdd76423bbd4cfaeed9b019294cdade330 |
refs/heads/master | <file_sep>import React from "react";
class Subject extends React.Component {
render() {
return (
<header>
<h1>
<a href="/">{this.props.title}</a>
</h1>
<h3>{this.props.sub}</h3>
</header>
);
}
}
export default Subject;
<file_sep>import React from "react";
import Nav from "./components/Nav";
import Article from "./components/Article";
import Subject from "./components/Subject";
import "./App.css";
class App extends React.Component {
constructor(props) {
super(props);
this.state = {
mode: "read",
subject: { title: "WEB", sub: "world wide web!" }, //스테이트를 이용해서 초깃값 정해줌.
welcome: { title: "Welcome", desc: "Hello, React!" },
contents: [
{ id: 1, title: "HTML", desc: "HTML for Information " },
{ id: 2, title: "CSS", desc: "CSS for Design " },
{ id: 3, title: "JavaScript", desc: "JavaScript for interactive " },
],
};
}
render() {
var _title,
_desc = null;
if (this.state.mode === "welcome") {
_title = this.state.welcome.title;
_desc = this.state.welcome.desc;
} else if (this.state.mode === "read") {
_title = this.state.contents[1].title;
_desc = this.state.contents[1].desc;
}
return (
<div className="App">
{/* <Subject
title={this.state.subject.title} //처음 선언한 스테이트값을 Subject컴포넌트에 props로 전해줌.
sub={this.state.subject.sub}
/>*/}
<header>
<h1>
<a
href="/"
onClick={function (e) {
console.log(e);
e.preventDefault();
this.setState({
mode: "welcome",
});
}.bind(this)}
>
{this.state.subject.title}
</a>
</h1>
<h3>{this.state.subject.sub}</h3>
</header>
<Nav data={this.state.contents} />
<Article title={_title} desc={_desc} />
</div>
);
}
}
export default App;
//Constructor => 컴포넌트가 실행될때 render보다 먼저 실행이 되면서 컴포넌트를 초기화시켜주고싶다면 해당 코드는 constructor안에다가 작성한다.
//상위 컴포넌트인 App의 상태(state)를 하위 Component에 전달하고 싶을 때는 상위 컴포넌트의 state를 하위 컴포넌트에 props값으로 전달할 수 있다.
<file_sep>import React from "react";
class Nav extends React.Component {
render() {
var data = this.props.data; //내부적으로 data라는 props로부터 App의 state인 contents 값을 받음.
var i = 0;
var lists = [];
while (i < data.length) {
lists.push(
<li key={data[i].id}>
<a href={"/content/" + data[i].id}>{data[i].title}</a>
</li>
);
i = i + 1;
}
return <nav>{lists}</nav>;
}
}
export default Nav;
<file_sep># opentutorial_react
Opentutorial(생활코딩)_React 강의 학습 내용
| 2fdb3835da7bcde0ecb0c1fb93b8ff718825cb36 | [
"JavaScript",
"Markdown"
] | 4 | JavaScript | HwangWoong4708/opentutorial_react | f05d3f5591dab2c14a99cf1f28f630b6c5cef5ff | 8a1955b47d52272a1a9845ba0dd583384e98ee43 |
refs/heads/master | <repo_name>budiopl/ClientPlugin<file_sep>/clientplugin.php
<?php
/**
* Plugin Name: Hurtownia - Budio.pl
* Plugin URI: https://github.com/budiopl/ClientPlugin
* Description: Umieszcza link do twojej hurtowni w stopce strony.
* Version: 1.1.2
* Author: Budio.pl
* Author URI: https://budio.pl
* License: GPL2
*/
add_action('wp_head', 'cphb_add_style');
function cphb_add_style()
{
wp_enqueue_style( 'clientplugin-style', plugins_url( 'css/clientplugin.css', __FILE__ ), false );
}
add_action( 'wp_enqueue_scripts', 'cphb_add_google_fonts' );
function cphb_add_google_fonts()
{
wp_enqueue_style( 'wpb-google-fonts', '//fonts.googleapis.com/css?family=Roboto:300&display=swap&subset=latin-ext', false );
}
function cphb_get_itnavigator_data()
{
$domain = parse_url(get_site_url());
$response = wp_remote_get( 'https://itnavigator.budio.pl:444/wp-plugin-data?url='.$domain['host'] );
if(!is_wp_error($response))
{
$dataAPI = json_decode($response['body']);
update_option('clientplugin_data', $dataAPI);
return $dataAPI;
}
}
add_action( 'wp_footer', 'cphb_add_signature' );
function cphb_add_signature ()
{
$data = get_option('clientplugin_data') ?? cphb_get_itnavigator_data();
if(!empty($data))
{
echo '<div class="budio-partner-container">
<div class="budio-logo-container">
<div class="head">Jesteśmy częścią</div>
<a href="https://budio.pl/" target="_blank" class="ext-link grupa-link" rel="nofollow"><img src="'.plugins_url( 'image/budiopl-logo.svg', __FILE__ ).'" alt="Budio.pl"></a>';
if(!is_null($data->wholesale_link))
{
echo '<div class="links">
<a href="'.$data->wholesale_link.'" target="_blank">Zobacz naszą ofertę</a>
</div>';
}
echo '</div>
<div class="budio-apps-container">
<div class="head">Darmowa aplikacja mobilna dla Wykonawców</div>
<div class="badges">
<a href="'.$data->android_app_link.'" target="_blank" rel="nofollow" class="ext-link"><img src="'.plugins_url( 'image/googleplay.png', __FILE__ ).'"></a>
<a href="'.$data->ios_app_link.'" target="_blank" rel="nofollow" class="ext-link"><img src="'.plugins_url( 'image/appstore.png', __FILE__ ).'"></a>
</div>
</div>
</div>';
}
}
<file_sep>/README.md
# Hurtownia - Budio.pl - Plugin klienta
Plugin dla hurtowni będących w programie budio.pl
## Instalacja
Zainstaluj bezpośrednio z repozytorium WordPress lub pobierz plik zip, rozpakuj do katalog `/wp-content/plugins/clientplugin` i aktywuj wtyczkę!
## Użycie
Wtyczka nie wymaga dodatkowej konfiguracji. Wystarczy aktywować.
## Wsparcie
W razie problemów prosimy o kontakt: <EMAIL>
## Licencja
[MIT](https://choosealicense.com/licenses/mit/)
| 4a3304b8a3ad6d4a167e6754c2e80bf3e547926e | [
"Markdown",
"PHP"
] | 2 | PHP | budiopl/ClientPlugin | 72f9988b79ad51e0f0572f7d2b817cdcfe8ff748 | 8260dbf32e558fbf9e6c8b0b841f81732c1245f2 |
refs/heads/main | <repo_name>Irfanullahmunna/core-concepts<file_sep>/src/App.js
import React, {useEffect, useState} from 'react';
import logo from './logo.svg';
import './App.css';
function App() {
const nayoks = ['Razzak','Jasim', 'Anwar', 'Jafor', 'Alomgir', 'Salman']
const products = [
{name: 'Photoshop', price: '$90.99'},
{name: 'Illustrator', price: '$60.99'},
{name: 'PDF Reader', price: '$6.99'},
{name: 'Adobe', price: '$10.99'}
]
// const productNames = products.map(product => product.name)
// console.log(productNames);
const nayokNames = nayoks.map(nayok => nayok);
console.log(nayokNames);
var person = {
firstName : "<NAME>",
lastName : "Munna",
job: "Student",
}
var name = "<NAME>";
var style = {
color: "red",
backgroundColor: "black",
}
return (
<div className="App">
<header className="App-header">
<img src={logo} className="App-logo" alt="logo" />
<p>
Edit done <code>src/App.js</code> and save to reload.
</p>
<Counter></Counter>
<Users></Users>
<ul>
{
nayoks.map(nayok => <li>{nayok}</li>)
}
{
products.map(pd => <li>{pd.name}</li>)
}
{/* <li>{nayoks[0]}</li>
<li>{nayoks[1]}</li>
<li>{nayoks[2]}</li>
<li>{nayoks[3]}</li> */}
</ul>
<p className="" style={style}>{person.firstName} {person.lastName} {person.job}</p>
<p>Full Name: {name}</p>
<h1>My Heading: {(2+3*2+250)}</h1>
<p>My first React Paragraph</p>
{/* <Product name={products[0].name}></Product> */}
{
products.map(product => <Product product={product}></Product>)
}
<Product product={products[0]}></Product>
<Product product={products[1]}></Product>
<Person name="<NAME>" job="football"></Person>
<Person name="<NAME>" job="cricket"></Person>
<Person name="<NAME>" job="volyball"></Person>
{/* <a
className="App-link"
href="https://reactjs.org"
target="_blank"
rel="noopener noreferrer"
>
Learn React
</a> */}
</header>
</div>
);
}
function Counter(){
const [count, setCount] = useState(0);
const handleIncrease = () => {
// console.log('clicked');
// const newCount = count + 1;
// setCount(newCount);
setCount(count+1);
};
const handleDecrease = () => setCount(count-1);
return(
<div>
<h1>Count: {count}</h1>
<button onClick={handleDecrease}>Decrease</button>
<button onClick={handleIncrease}>Increase</button>
{/* <button onClick={ () => setCount(count+1)}>Increase</button> */}
</div>
)
}
function Users(){
const [users, setUsers] = useState([]);
useEffect( () => {
// console.log('Calling Effect');
fetch('https://jsonplaceholder.typicode.com/users')
.then(res => res.json())
.then(data => setUsers(data));
}, [])
return(
<div>
<h3>Dynamic Users: {users.length}</h3>
<ul>
{
console.log(users)
}
{
users.map(user => <li>{user.phone}</li>)
}
</ul>
{/* {
console.log(users)
} */}
</div>
)
}
function Product(props) {
const productStyle={
border: '1px solid gray',
borderRadius: '5px',
backgroundColor: 'lightgray',
height: '300px',
width: '200px',
float: 'left'
}
const {name, price} = props.product;
// console.log(name, price)
return(
<div style={productStyle}>
<h3>Name: {name}</h3>
<h5>{price}</h5>
<button>Buy Now</button>
</div>
// <div style={productStyle}>
// <h3>Name: {props.product.name}</h3>
// <h5>{props.product.price}</h5>
// <button>Buy Now</button>
// </div>
)
}
function Person(props) {
const personStyle={
border: '2px solid red',
margin: '10px'
}
return (
<div style={personStyle}>
<h1>Name: {props.name} Munna</h1>
<h3>Learner {props.job}</h3>
</div>
// <div style={{border: '2px solid red', margin: '10px'}}>
// <h1>Name: Irfan</h1>
// <h3>Student</h3>
// </div>
)
}
export default App;
| 47fca4dc2d2aa425c6f93b2e0a55758d7f066598 | [
"JavaScript"
] | 1 | JavaScript | Irfanullahmunna/core-concepts | dbea058766782ad5abfb31209c3be7eddccfb079 | 68297af55f0f0e27f9cfe50eed2aa24d07f3d9ba |
refs/heads/main | <repo_name>Md-Asaduzzaman-Muhid/edu-mcq<file_sep>/mcq-app/routes/web.php
<?php
use Illuminate\Support\Facades\Route;
use App\Http\Controllers\Auth\RegisterController;
/*
|--------------------------------------------------------------------------
| Web Routes
|--------------------------------------------------------------------------
|
| Here is where you can register web routes for your application. These
| routes are loaded by the RouteServiceProvider within a group which
| contains the "web" middleware group. Now create something great!
|
*/
Route::view('/', 'home');
Auth::routes();
Route::get('/login/admin', 'App\Http\Controllers\Auth\LoginController@showAdminLoginForm')->name('admin.login');
Route::post('/login/admin', 'App\Http\Controllers\Auth\LoginController@adminLogin');
Route::get('/register/admin', 'App\Http\Controllers\Auth\RegisterController@showAdminRegisterForm')->name('admin.register');
Route::post('/register/admin', 'App\Http\Controllers\Auth\RegisterController@createAdmin');
Route::get('/login/moderator', 'App\Http\Controllers\Auth\LoginController@showModeratorLoginForm');
Route::get('/register/moderator', 'App\Http\Controllers\Auth\RegisterController@showModeratorRegisterForm');
Route::post('/login/moderator', 'App\Http\Controllers\Auth\LoginController@ModeratorLogin');
Route::post('/register/moderator', 'App\Http\Controllers\Auth\RegisterController@createModerator');
Route::get('home', 'App\Http\Controllers\HomeController@userHome')->middleware('auth')->name('home');
Route::get('/test', 'App\Http\Controllers\TestController@index')->middleware('auth')->name('test');
Route::get('/test/{slug}', 'App\Http\Controllers\TestController@category')->middleware('auth')->name('test.category');
Route::post('/test/take', 'App\Http\Controllers\TestController@testTake')->middleware('auth')->name('test.take');
Route::get('result', 'App\Http\Controllers\TestController@testResult')->middleware('auth')->name('test.result');
Route::get('answer', 'App\Http\Controllers\TestController@testAnswer')->middleware('auth')->name('test.answer');
Route::get('/question', 'App\Http\Controllers\QuestionController@questionBankHome')->middleware('auth')->name('question.bank');
Route::get('/question/{slug}', 'App\Http\Controllers\QuestionController@questionBankCategory')->middleware('auth')->name('question.category');
Route::group(['prefix'=>'admin','as'=>'admin.','middleware'=>'auth:web,admin'], function(){
Route::view('/', 'admin.pages.dashboard');
Route::resource('category', 'App\Http\Controllers\CategoryController');
Route::post('sub_category', 'App\Http\Controllers\CategoryController@storeSubCat')->name('sub_cat');
Route::post('sub_category/destroy{id}', 'App\Http\Controllers\CategoryController@destroySubCat')->name('sub_cat.destroy');
Route::resource('question', 'App\Http\Controllers\QuestionController');
Route::resource('concept', 'App\Http\Controllers\ConceptController');
});<file_sep>/mcq-app/database/seeders/QuestionSeeder.php
<?php
namespace Database\Seeders;
use Illuminate\Database\Seeder;
use App\Models\Question;
class QuestionSeeder extends Seeder
{
/**
* Run the database seeds.
*
* @return void
*/
public function run()
{
// for ($i = 0; $i < 100; $i++){
// $name = 'Category '. $i;
// Question::create([
// 'name' => $name,
// 'slug' => str_slug($name),
// ]);
// }
}
}
<file_sep>/mcq-app/app/Http/Controllers/HomeController.php
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\Models\Category;
use App\Models\Question;
use DB;
class HomeController extends Controller
{
/**
* Create a new controller instance.
*
* @return void
*/
public function __construct()
{
$this->middleware('auth');
}
/**
* Show the application dashboard.
*
* @return \Illuminate\Contracts\Support\Renderable
*/
public function index()
{
return view('home');
}
public function userHome()
{
$categories= DB::table("categories")->count();
$total_user = DB::table("users")->count();
$questions = DB::table("questions")->count();
// dd($questions[0]->answer);
return view('user.home',compact(['categories','questions','total_user']));
}
}
<file_sep>/mcq-app/app/Http/Controllers/QuestionController.php
<?php
namespace App\Http\Controllers;
use DB;
use Validator;
use App\Models\Question;
use App\Models\Category;
use App\Models\Option;
use App\Models\Answer;
use App\Models\SubCategory;
use Illuminate\Http\Request;
use Stevebauman\Purify\Facades\Purify;
class QuestionController extends Controller
{
/**
* Display a listing of the resource.
*
* @return \Illuminate\Http\Response
*/
public function index()
{
$categories= Category::all();
$sub_categories= SubCategory::all();
return view('admin.pages.question.list',compact(['categories','sub_categories']));
}
/**
* Show the form for creating a new resource.
*
* @return \Illuminate\Http\Response
*/
public function create()
{
$categories= Category::all();
$sub_categories= SubCategory::all();
return view('admin.pages.question.add', compact(['categories', 'sub_categories']));
}
/**
* Store a newly created resource in storage.
*
* @param \Illuminate\Http\Request $request
* @return \Illuminate\Http\Response
*/
public function store(Request $request)
{
$quest = Purify::clean($request->all());
// dd($quest);
$validator = Validator::make($request->all(), [
'question' => 'required|max:500',
'option_1' => 'required|max:200',
'option_2' => 'required|max:200',
'option_3' => 'required|max:200',
'option_4' => 'required|max:200',
'explanation' => 'required|max:500',
]);
if ($validator->fails()):
return back()->withErrors($validator->errors())->withInput();
endif;
$question = new Question();
$question->question = $quest['question'];
$question->save();
if(!empty($quest['category'])):
foreach($quest['category'] as $cat):
$i= 0;
DB::table('category_question')->insert(
['category_id' => $cat[$i],
'question_id' => $question->id,
"created_at" => date('Y-m-d H:i:s'),
"updated_at" => date('Y-m-d H:i:s'),]
);
$i++;
endforeach;
else:
DB::table('category_question')->insert(
['category_id' => 1,
'question_id' => $question->id,
"created_at" => date('Y-m-d H:i:s'),
"updated_at" => date('Y-m-d H:i:s'),]
);
endif;
if(!empty($quest['sub_category'])):
foreach($quest['sub_category'] as $sub_cat):
$i= 0;
DB::table('question_sub_category')->insert(
['sub_category_id' => $sub_cat[$i],
'question_id' => $question->id,
"created_at" => date('Y-m-d H:i:s'),
"updated_at" => date('Y-m-d H:i:s'),]
);
$i++;
endforeach;
else:
DB::table('question_sub_category')->insert(
['sub_category_id' => 1,
'question_id' => $question->id,
"created_at" => date('Y-m-d H:i:s'),
"updated_at" => date('Y-m-d H:i:s'),]
);
endif;
$question->option()->create([
'option_1' => $quest['option_1'],
'option_2' => $quest['option_2'],
'option_3' => $quest['option_3'],
'option_4' => $quest['option_4']
]);
$question->answer()->create([
'answer' => $quest['answer'] ?? 0,
'explanation' => $quest['explanation']
]);
return back()->with('success', 'Successfully Added Category');
}
/**
* Display the specified resource.
*
* @param \App\Models\Question $question
* @return \Illuminate\Http\Response
*/
public function show(Question $question)
{
//
}
/**
* Show the form for editing the specified resource.
*
* @param \App\Models\Question $question
* @return \Illuminate\Http\Response
*/
public function edit(Question $question)
{
//
}
/**
* Update the specified resource in storage.
*
* @param \Illuminate\Http\Request $request
* @param \App\Models\Question $question
* @return \Illuminate\Http\Response
*/
public function update(Request $request, Question $question)
{
//
}
/**
* Remove the specified resource from storage.
*
* @param \App\Models\Question $question
* @return \Illuminate\Http\Response
*/
public function destroy(Question $question)
{
Question::destroy($question->id);
return back()->with('success', 'Successfully Added Category');
}
public function questionBankHome()
{
$categories= Category::all();
$questions= Question::all();
return view('user.pages.question_bank.home',compact(['categories','questions']));
}
public function questionBankCategory($slug)
{
$category = Category::where('slug', '=', $slug)->first();
$questions = $category->question()->paginate(25);
$rank = $questions->firstItem();
return view('user.pages.question_bank.category_question',compact(['category','questions','rank']));
}
}
<file_sep>/mcq-app/app/Http/Controllers/ConceptController.php
<?php
namespace App\Http\Controllers;
use App\Models\Category;
use App\Models\SubCategory;
use App\Models\Concept;
use Illuminate\Http\Request;
use Stevebauman\Purify\Facades\Purify;
class ConceptController extends Controller
{
/**
* Display a listing of the resource.
*
* @return \Illuminate\Http\Response
*/
public function index()
{
$categories= Category::all();
$sub_categories= SubCategory::all();
return view('admin.pages.concept.home',compact(['categories','sub_categories']));
}
/**
* Show the form for creating a new resource.
*
* @return \Illuminate\Http\Response
*/
public function create()
{
$categories= Category::all();
$sub_categories= SubCategory::all();
return view('admin.pages.concept.add',compact(['categories','sub_categories']));
}
/**
* Store a newly created resource in storage.
*
* @param \Illuminate\Http\Request $request
* @return \Illuminate\Http\Response
*/
public function store(Request $request)
{
$quest = Purify::clean($request->all());
dd($quest);
}
/**
* Display the specified resource.
*
* @param \App\Models\Concept $concept
* @return \Illuminate\Http\Response
*/
public function show(Concept $concept)
{
//
}
/**
* Show the form for editing the specified resource.
*
* @param \App\Models\Concept $concept
* @return \Illuminate\Http\Response
*/
public function edit(Concept $concept)
{
//
}
/**
* Update the specified resource in storage.
*
* @param \Illuminate\Http\Request $request
* @param \App\Models\Concept $concept
* @return \Illuminate\Http\Response
*/
public function update(Request $request, Concept $concept)
{
//
}
/**
* Remove the specified resource from storage.
*
* @param \App\Models\Concept $concept
* @return \Illuminate\Http\Response
*/
public function destroy(Concept $concept)
{
//
}
}
<file_sep>/mcq-app/app/Models/Question.php
<?php
namespace App\Models;
use Illuminate\Database\Eloquent\Factories\HasFactory;
use Illuminate\Database\Eloquent\Model;
class Question extends Model
{
protected $fillable = ['question','sub_cat_id','option_id','answer_id'];
public function option()
{
return $this->hasOne(Option::class);
}
public function answer()
{
return $this->hasOne(Answer::class);
}
// public function subcategory()
// {
// return $this->belongsToMany(SubCategory::class, 'sub_category_question');
// }
// public function category()
// {
// return $this->belongsToMany(Category::class, 'category_question');
// }
}
<file_sep>/mcq-app/database/migrations/2021_03_27_195323_create_user_test_result_table.php
<?php
use Illuminate\Database\Migrations\Migration;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Support\Facades\Schema;
class CreateUserTestResultTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('user_test_result', function (Blueprint $table) {
$table->id();
$table->bigInteger('user_id');
$table->char('category_id', 100);
$table->char('sub_category_id', 100);
$table->char('result', 100);
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('user_test_result');
}
}
<file_sep>/mcq-app/app/Http/Controllers/TestController.php
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\Models\Category;
use App\Models\Question;
use Carbon\Carbon;
use \Cache;
use Auth;
use DB;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Support\Facades\Schema;
class TestController extends Controller
{
public function index()
{
$categories= Category::all();
$questions= Question::all();
// dd($categories);
return view('user.pages.test.home',compact(['categories','questions']));
}
public function category($slug)
{
// dd(Carbon::now()->toTimeString());
$category = Category::where('slug', '=', $slug)->first();
// $question = $category->question()->limit(10)->get()->shuffle();
// dd($question);
$questions = $category->question()->limit(10)->get()->shuffle();
$rank = 1;
return view('user.pages.test.category_test',compact(['category','questions','rank']));
}
public function testTake(Request $request){
$result= $request->all();
// $user_id = Auth::user()->id;
$category = Category::where('id', '=', $result['category'])->first();
$questions = $category->question()->get();
// dd($result);
$question_answered = $request->get('question');
// dd($question_answered);
$point =0;
$answer[] = array(
"question" => "",
"option1" => "",
"option2" => "",
"option3" => "",
"option4" => "",
"correct" => "",
"selected" => "",
);
$i =0;
if(isset($question_answered)&& !empty($question_answered)):
foreach($question_answered as $qid=>$oneLeadOptions):
foreach($oneLeadOptions as $opt):
foreach($questions as $dbquestion):
if($dbquestion->id == $qid):
if($dbquestion->answer->answer == $opt):
$point++;
endif;
$answer[$i]['question'] = $dbquestion->question;
$answer[$i]['option1'] = $dbquestion->option->option_1;
$answer[$i]['option2'] = $dbquestion->option->option_2;
$answer[$i]['option3'] = $dbquestion->option->option_3;
$answer[$i]['option4'] = $dbquestion->option->option_4;
$answer[$i]['correct'] = $dbquestion->answer->answer;
$answer[$i]['selected'] = $opt;
$i++;
endif;
endforeach;
endforeach;
endforeach;
DB::table('user_test_result')->insert(
['user_id' => Auth::user()->id ?? 0,
'category_id' => $result['category'],
'sub_category_id' => 0,
'result' => $point,
"created_at" => date('Y-m-d H:i:s'),
"updated_at" => date('Y-m-d H:i:s'),]
);
else:
return redirect()->back()->with('error', 'Please answer question and submit');
endif;
Cache::put('answer', $answer);
return redirect()->route('test.answer');
// return redirect()->route('test.result');
// if (!Schema::hasTable('user_test_'.$user_id.'')) :
// Schema::create('user_test_'.$user_id.'', function (Blueprint $table) {
// $table->increments('id');
// $table->bigInteger('question_id');
// $table->integer('selected_answer');
// $table->integer('is_right');
// $table->timestamps();
// });
// else:
// DB::table('user_test_'.$user_id.'')->truncate();
// endif;
// Schema::dropIfExists('user_test_'.$user_id.'');
}
public function testAnswer(){
$answers = Cache::get('answer');
$rank = 0;
// $answers = (object) $ans;
// dd($answers);
return view('user.pages.test.answer',compact(['answers','rank']));
}
public function testResult(){
$user_id = Auth::user()->id;
$test_result = DB::table('user_test_result')->where('user_id', $user_id)->orderBy('id', 'DESC')->get();
return view('user.pages.test.result',compact(['test_result']));
}
}
| d03e4f34e3384cfecef54774d1e317fb047f3204 | [
"PHP"
] | 8 | PHP | Md-Asaduzzaman-Muhid/edu-mcq | 43d612dcc39afd4747da855ba46d8e3f3fcae4c0 | 46f48bd4ca5c584da11a9eed022d2f78b6de4f82 |
refs/heads/master | <repo_name>filipefrozza/angular-route-firebase<file_sep>/app/pagina/pessoas/controller.js
angular.module(APP)
.controller('pessoa', function($scope, $firebaseAuth, $firebaseArray, $firebaseObject, authenticate){
var callback = function(user){
$scope.user = user;
var ref = firebase.database().ref();
var pessoas = $firebaseArray(ref.child('perfil'));
$scope.pessoas = pessoas;
};
$scope.adicionarPessoa = function(pessoa){
var novaPessoa = angular.copy(pessoa);
delete novaPessoa.$id;
delete novaPessoa.$priority;
delete novaPessoa.$$hashKey;
console.log(novaPessoa);
var ref = firebase.database().ref();
ref.child('amigos').child($scope.user.uid).child(pessoa.uid).set(novaPessoa);
console.log(amigo);
};
authenticate.getAuth($firebaseAuth, callback);
});<file_sep>/app/modulo/header/directive.js
angular.module(APP)
.directive('appHeader', function(){
return {
restrict: 'AE',
templateUrl: 'app/modulo/header/template.html'
}
});<file_sep>/app/modulo/nav/directive.js
angular.module(APP)
.directive('appNav', function(){
return {
restrict: 'AE',
scope: true,
templateUrl: 'app/modulo/nav/template.html',
controller: 'nav'
}
});<file_sep>/app/modulo/nav/controller.js
angular.module(APP)
.controller('nav', function($scope, $cookies, $firebaseAuth){
$scope.auth = $firebaseAuth();
$scope.logout = function(){
$scope.auth.$signOut();
$cookies.remove('user');
window.location.reload();
};
});<file_sep>/app/modulo/chat/directive.js
angular.module(APP)
.directive('chat', function(){
return {
restrict: 'AE',
scope: true,
templateUrl: 'app/modulo/chat/template.html',
controller: 'chat'
}
});<file_sep>/app/service/authenticate.js
angular.module(APP)
.factory('authenticate', function(){
var auth;
var getAuth = function(firebaseAuth, callback){
auth = firebaseAuth();
auth.$onAuthStateChanged(function(user) {
if(callback){
callback(user);
}
});
};
var logar = function(user, cookies){
auth.$signInWithEmailAndPassword(
user.email,
user.senha
).then(function(user){
alert('logou como :' + user.uid);
cookies.putObject('user', user);
document.location.href = window.location.origin+(window.location.origin=='http://localhost'?'/angular-route-firebase':'');
}, function(error){
if(error == 'INVALID_EMAIL') {
console.log('Email invalido ou não cadastrado');
}else if(error == 'INVALID_PASSWORD') {
console.log('Senha inválida');
}else{
console.log(error);
}
});
};
var cadastrar = function(user){
auth.$createUserWithEmailAndPassword(
user.email,
user.senha
).then(function(firebaseUser) {
alert("criado");
console.log("User " + firebaseUser.uid + " created successfully!");
window.location.href = window.location.origin+(window.location.origin=='http://localhost'?'/angular-route-firebase':'')+"/login";
}).catch(function(error) {
console.error("Error: ", error);
});
};
return {
getAuth: getAuth,
logar: logar,
cadastrar: cadastrar,
auth: auth
};
});<file_sep>/README.md
# angular-route-firebase
AngularJS + Route + Firebase
| dc00d360e9b1ab8fa0ef7e42a3dcca65e2ae9914 | [
"JavaScript",
"Markdown"
] | 7 | JavaScript | filipefrozza/angular-route-firebase | 83df09d03d20b2293476b7fe5330d70c2d5592e1 | f48476778d304722e9bf0fa90071f349d4d803ac |
refs/heads/master | <repo_name>StormMaybin/baidu-coder<file_sep>/src/main/java/com/baidu/message/EmailMessage.java
package com.baidu.message;
import com.baidu.enums.MessageType;
/**
* <p>
* Created by mayongbin01 on 2017/1/25.
*
* @author mayongbin01
*/
public class EmailMessage extends Message {
/**
* @param text
*/
public EmailMessage(String text) {
super(MessageType.EMAIL, text);
}
}
<file_sep>/src/main/java/com/baidu/enums/MessageType.java
package com.baidu.enums;
/**
* <p> Created by mayongbin01 on 2017/1/25.
* enum message's type
*
* @author mayongbin01
*/
public enum MessageType {
HI,
EMAIL,
SMS
}
<file_sep>/src/main/java/com/baidu/center/MessageCenter.java
package com.baidu.center;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import org.apache.log4j.Logger;
import org.springframework.stereotype.Component;
import com.baidu.enums.MessageType;
import com.baidu.exception.AddMessageSenderException;
import com.baidu.exception.MessageIsNullException;
import com.baidu.exception.UnSupportMessageException;
import com.baidu.message.Message;
import com.baidu.sender.interfance.MessageSender;
/**
* <p>
* Created by mayongbin01 on 2017/1/25.
*
* @author mayongbin01
*/
@Component
public class MessageCenter {
/**
* logging
*/
private static final Logger logger = Logger.getLogger(MessageCenter.class);
/**
* message queue map
*/
private Map<MessageType, LinkedBlockingQueue<Message>> messageQueueMap;
/**
* thread pool
*/
private ScheduledExecutorService executorService;
private static final int MAX_POOL_SIZE = 3;
/**
* support sender
*/
private List<MessageSender> messageSenderList;
private static volatile MessageCenter instance;
private MessageCenter() {
this.messageQueueMap = new ConcurrentHashMap<>();
this.executorService = Executors.newScheduledThreadPool(MAX_POOL_SIZE);
this.messageSenderList = new ArrayList<>();
}
public static MessageCenter getInstance() {
if (instance == null) {
synchronized (MessageCenter.class) {
if (instance == null) {
instance = new MessageCenter();
}
}
}
return instance;
}
/**
* add sender to messageCenter
*
* @param messageSender
*
* @return
*/
public boolean addSender(MessageSender messageSender) throws AddMessageSenderException {
if (messageSender == null) {
logger.error("add sender to messageCenter error: sender == null !");
throw new AddMessageSenderException("add sender to messageCenter error: sender == null !");
} else {
this.messageSenderList.add(messageSender);
/**
* start execute task
*/
executeScheduleTask(messageSender);
return true;
}
}
/**
* execute task
*
* @param sender
*
* @return
*/
public boolean executeScheduleTask(final MessageSender sender) {
if (sender == null) {
logger.error("sender is null! ");
return false;
}
executorService.scheduleWithFixedDelay(new Runnable() {
@Override
public void run() {
/**
* get message queue for message queue map
*/
LinkedBlockingQueue<Message> messageQueue = messageQueueMap.get(sender.getMessageType());
if (messageQueue == null || messageQueue.isEmpty()) {
return;
}
Message message = messageQueue.peek();
if (sender.send(message)) {
messageQueue.poll();
return;
} else {
logger.info(message + "send failed!");
return;
}
}
}, 0, sender.getExecuteInterval(), TimeUnit.MILLISECONDS);
return true;
}
/**
* add message to message queue
*
* @param message
*
* @return
*
* @throws MessageIsNullException
*/
public boolean send(Message message) throws MessageIsNullException, UnSupportMessageException {
if (message == null) {
logger.error("send message but message == null !");
throw new MessageIsNullException("send message but message == null !");
}
boolean isSupport = false;
for (MessageSender messageSender : messageSenderList) {
if (messageSender.getMessageType().equals(message.getType())) {
isSupport = true;
}
}
if (!isSupport) {
throw new UnSupportMessageException("sorry, your Message type is unsupport!");
}
/**
* get message queue
*/
LinkedBlockingQueue<Message> messageQueue = messageQueueMap.get(message.getType());
if (messageQueue == null) {
/**
* init
*/
messageQueue = new LinkedBlockingQueue<>();
}
try {
messageQueue.put(message);
messageQueueMap.put(message.getType(), messageQueue);
return true;
} catch (InterruptedException e) {
logger.error(e);
return false;
}
}
/**
* shutdown
*/
public void destory() {
executorService.shutdown();
}
}
<file_sep>/src/main/java/com/baidu/exception/MessageIsNullException.java
package com.baidu.exception;
/**
* <p>
* Created by mayongbin01 on 2017/1/25.
*
* @author mayongbin01
*/
public class MessageIsNullException extends Exception {
public MessageIsNullException(String message) {
super(message);
}
}
<file_sep>/src/main/java/com/baidu/client/ClientDemo.java
package com.baidu.client;
import org.apache.log4j.Logger;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import com.baidu.center.MessageCenter;
import com.baidu.config.MyConfig;
import com.baidu.enums.MessageType;
import com.baidu.exception.AddMessageSenderException;
import com.baidu.exception.MessageIsNullException;
import com.baidu.exception.NoSuchMessageTypeException;
import com.baidu.exception.UnSupportMessageException;
import com.baidu.message.EmailMessage;
import com.baidu.message.HiMessage;
import com.baidu.message.SmsMessage;
import com.baidu.sender.MessageSenderFactory;
/**
* <p>
* Created by mayongbin01 on 2017/1/25.
*
* @author mayongbin01
*/
public class ClientDemo {
private static Logger logger = Logger.getLogger(ClientDemo.class);
public static void main(String[] args) {
/**
* get spring Ioc container
*/
AnnotationConfigApplicationContext context =
new AnnotationConfigApplicationContext(MyConfig.class);
/**
* get bean
*/
MessageCenter messageCenter = context.getBean(MessageCenter.class);
MessageSenderFactory messageSenderFactory = context.getBean(MessageSenderFactory.class);
try {
messageCenter.addSender(messageSenderFactory.getMessageSender(MessageType.EMAIL));
messageCenter.addSender(messageSenderFactory.getMessageSender(MessageType.HI));
messageCenter.addSender(messageSenderFactory.getMessageSender(MessageType.SMS));
} catch (AddMessageSenderException e) {
e.printStackTrace();
} catch (NoSuchMessageTypeException e) {
e.printStackTrace();
}
for (int i = 0; i <= 50; i++) {
try {
messageCenter.send(new HiMessage("I am a Hi Message"));
messageCenter.send(new SmsMessage("I am a Sms Message"));
messageCenter.send(new EmailMessage("I am a Email Message"));
} catch (MessageIsNullException e) {
e.printStackTrace();
} catch (UnSupportMessageException e) {
e.printStackTrace();
}
}
}
}
<file_sep>/src/main/java/com/baidu/sender/impl/SmsMessageSender.java
package com.baidu.sender.impl;
import org.apache.log4j.Logger;
import com.baidu.enums.MessageType;
import com.baidu.message.Message;
import com.baidu.sender.interfance.MessageSender;
/**
* <p>
* Created by mayongbin01 on 2017/1/25.
*
* @author mayongbin01
*/
public class SmsMessageSender implements MessageSender {
private static Logger logger = Logger.getLogger(SmsMessageSender.class);
/**
* 发送消息
*
* @param message
*
* @return
*/
@Override
public boolean send(Message message) {
if (message == null) {
logger.error("send message error: message == null");
return false;
}
logger.info("send Sms Message --- " + message.getText() + " message successfully! content:" + message.getText
());
return true;
}
/**
* @return
*
* @描述:时间间隔
*/
@Override
public long getExecuteInterval() {
return 100L;
}
/**
* 得到消息的类型
*
* @return
*/
@Override
public MessageType getMessageType() {
return MessageType.SMS;
}
}
<file_sep>/src/main/java/com/baidu/config/MyConfig.java
package com.baidu.config;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
/**
* <p>
* Created by mayongbin01 on 2017/1/25.
*
* @author mayongbin01
*/
@Configuration
@ComponentScan("com.baidu")
public class MyConfig {
}
<file_sep>/src/main/java/com/baidu/message/Message.java
package com.baidu.message;
import com.baidu.enums.MessageType;
/**
* <p>
* Created by mayongbin01 on 2017/1/25.
*
* @author mayongbin01
*/
public class Message {
private MessageType type;
private String text;
/**
* @param type
* @param text
*/
Message(MessageType type, String text) {
this.type = type;
this.text = text;
}
/**
* @return
*/
public String getText() {
return text;
}
/**
* @param text
*/
public void setText(String text) {
this.text = text;
}
/**
* @return
*/
public MessageType getType() {
return type;
}
/**
* @param type
*/
public void setType(MessageType type) {
this.type = type;
}
}
<file_sep>/src/main/java/com/baidu/sender/interfance/MessageSender.java
package com.baidu.sender.interfance;
import com.baidu.enums.MessageType;
import com.baidu.message.Message;
/**
* <p>
* Created by mayongbin01 on 2017/1/25.
*
* @author mayongbin01
*/
public interface MessageSender {
/**
* 发送消息
*
* @param message
*
* @return
*/
boolean send(Message message);
/**
* 得到消息的类型
*
* @return
*/
MessageType getMessageType();
/**
* 时间间隔
*
* @return
*/
long getExecuteInterval();
}
| 351c54b3a5a2590cae766c125f54ea218c24de45 | [
"Java"
] | 9 | Java | StormMaybin/baidu-coder | 435f10e09ac141560770f3e1ee3cd8e55a0ecab0 | 1373dea8494cbf13f36c8cddc9d7a3b0c02f3469 |
refs/heads/master | <file_sep>from flask import Flask, render_template, Response
from fulkod import arbs
import re
# begin IPv4 hack
#--------------------
# do this once at program startup
#--------------------
import socket
origGetAddrInfo = socket.getaddrinfo
def getAddrInfoWrapper(host, port, family=0, socktype=0, proto=0, flags=0):
return origGetAddrInfo(host, port, socket.AF_INET, socktype, proto, flags)
# replace the original socket.getaddrinfo by our version
socket.getaddrinfo = getAddrInfoWrapper
# END IPv4 hack
app = Flask(__name__)
re_ids = re.compile('(\w{2}\-\d\-\d{3})(_\d)?')
@app.route('/')
def index():
return render_template('index.html')
@app.route('/<path:cal_ids>')
def get_cal(cal_ids):
cal = arbs.Fulcalendar()
resp=""
for c_id, g_id in re_ids.findall(cal_ids):
if '' == g_id:
g_id = 0
else:
g_id = int(re.sub('\D', '', g_id))
resp = "%s\n%s-%s"%(resp, c_id, g_id)
cal.addCourse(c_id,g_id)
return Response(cal.getCal().to_ical(), mimetype="text/calendar")
if __name__ == '__main__':
# app.run()
app.debug = True
app.run(host='0.0.0.0', port=8000)
<file_sep>__author__ = 'slo'
import xmltodict
import icalendar
from datetime import datetime
import urllib
from pytz import timezone
class Fulcalendar():
def __init__(self):
self.cal = icalendar.Calendar()
self.cal.add('prodid', '-//arbs by fik1//fik1.net//')
self.cal.add('version', '0.91')
self.BASE_URL_ARBS="https://famnen.arcada.fi/arbs/ws/arbs_ws.php?service=1&code=%s&group=%s"
self.tz = timezone('Europe/Helsinki')
def addCourse(self, course, group=0):
url = self.getarbsurl(course, group)
print(url)
f = urllib.request.urlopen(url)
xmltodict.parse(f, item_depth=2, item_callback=self.handle_booking)
f.close()
def getarbsurl(self, course, group):
return (self.BASE_URL_ARBS %(course, group))
def handle_booking(self, a, booking):
if type(booking) is str:
return True
now = datetime.now(tz=self.tz)
start = datetime.strptime(booking['start']['@time'], "%Y-%m-%d %H:%M:%S")
end = datetime.strptime(booking['end']['@time'], "%Y-%m-%d %H:%M:%S")
event = icalendar.Event()
event['uid'] = a[1][1]["id"]
event.add('dtstamp', now)
event.add('summary', booking['title']['#text'])
event.add('dtstart', self.tz.localize(start))
event.add('dtend', self.tz.localize(end))
if "#text" in booking['room']:
event['location']= booking['room']['#text']
info=""
if None is not booking["info"]:
info ="%s\n" % booking["info"]
teachers="Teachers:"
for t in booking['teachers'].items():
if isinstance(t[1], list):
for te in t[1]:
teachers = "%s\n%s"%(teachers,te["#text"])
else:
teachers = "%s\n%s" %(teachers, t[1]["#text"])
event['description']="%s%s"%(info,teachers)
self.cal.add_component(event)
return True
def getCal(self):
return self.cal
| 74b94256b24d1baa006bcabf2a2dbf2ec4956c9d | [
"Python"
] | 2 | Python | straend/arcadaical | f6f6d30f2aaeb6326cf36234484fe9d4e2cedf12 | 7eed17d1ec8412cbe874fa9e68128fdfbd27d5e7 |
refs/heads/master | <file_sep># PHP-PDO
Basic PHP PDO Guide
<file_sep><?php
// print_r(PDO::getAvailableDrivers());
$host = "LOCALHOST";
$user = "root";
$password = "";
$dbname = "ajax";
// Data Source Name
$dsn = "mysql:host=".$host.";dbname=".$dbname;
// Create a PDO instance
$connect = new PDO($dsn, $user, $password);
// No need na maglagay ng fetch_assoc/fetch_obj sa fetch
$connect->setAttribute(PDO::ATTR_DEFAULT_FETCH_MODE, PDO::FETCH_ASSOC);
// PDO Query
// $stmt = $connect->query("SELECT * FROM tbl_customer");
// GETTING DATA
// Using Fetch Associate
// while ($row = $stmt->fetch(PDO::FETCH_ASSOC)) {
// echo $row['CustomerName']."<br>";
// }
// Using Fetch Object
// while ($row = $stmt->fetch(PDO::FETCH_OBJ)) {
// echo $row->CustomerName."<br>";
// }
// while ($row = $stmt->fetch()) {
// echo $row['CustomerName']."<br>";
// }
// Positional Parameters
// $sql = "SELECT * FROM tbl_customer WHERE Country = ?";
// $stmt = $connect->prepare($sql);
// $stmt->execute(['Philippines']);
// $results = $stmt->fetchAll();
// Named Parameters
// $sql = "SELECT * FROM tbl_customer WHERE Country = :varCountry";
// $stmt = $connect->prepare($sql);
// $stmt->execute(['varCountry' => 'Philippines']);
// $results = $stmt->fetchAll();
// foreach ($results as $result) {
// echo $result['CustomerName']." - ".$result['Country']."<br>";
// }
// Getting Specific Data
// $id = "1";
// $sql = "SELECT * FROM tbl_customer WHERE CustomerID = :id";
// $stmt = $connect->prepare($sql);
// $stmt->execute(['id' => $id]);
// $result = $stmt->fetch();
// echo $result['CustomerName']." - ".$result['Country']."<br>";
// Getting the total of rows
// $country = "Philippines";
// $sql = "SELECT * FROM tbl_customer WHERE Country = :country";
// $stmt = $connect->prepare($sql);
// $stmt->execute(['country' => $country]);
// $resultCount = $stmt->rowCount();
// if ($resultCount > 0) {
// $results = $stmt->fetchAll();
// foreach ($results as $result) {
// echo $result['CustomerName']." - ".$result['Country']."<br>";
// }
// } else {
// echo "No data found!";
// }
// Inserting Data
// $name = "<NAME>";
// $address = "Valley Golf";
// $city = "Cainta Rizal";
// $postalCode = "3213";
// $country = "Philippines";
// $sql = "INSERT INTO tbl_customer
// (CustomerName, Address, City, PostalCode, Country)
// VALUES
// (?, ?, ?, ?, ?)";
// $stmt = $connect->prepare($sql);
// $insert = $stmt->execute([$name, $address, $city, $postalCode, $country]);
// if ($insert) {
// echo "Inserted successfully";
// } else {
// echo "Insert failed";
// }
// Updating data
// $id = "26";
// $name = "<NAME>";
// $address = "Sunshine Fiesta";
// $city = "Cardona Rizal";
// $postalCode = "1111";
// $country = "Philippines";
// $sql = "UPDATE tbl_customer
// SET CustomerName = :name, Address = :address, City = :city, PostalCode = :postalcode, Country = :country WHERE CustomerID = :id";
// $stmt = $connect->prepare($sql);
// $update = $stmt->execute(['name' => $name, 'address' => $address, 'city' => $city, 'postalcode' => $postalCode, 'country' => $country, 'id' => $id]);
// if ($update) {
// echo "Updated successfully";
// } else {
// echo "Update failed";
// }
// Deleting data
// $id = "26";
// $sql = "DELETE FROM tbl_customer WHERE CustomerID = ?";
// $stmt = $connect->prepare($sql);
// $update = $stmt->execute([$id]);
// if ($update) {
// echo "Deleted successfully";
// } else {
// echo "Delete failed";
// }
| 2e22ffeb5cb48e367eabfbd693ff7cbce9c25062 | [
"Markdown",
"PHP"
] | 2 | Markdown | IamArjay99/PHP-PDO | 7ca283d882f410293f9d3fa67f15b6d5a2d06d21 | fdc129668f76c4ef00f420102289e3956e58b7f1 |
refs/heads/master | <file_sep><?php
namespace Admin;
use BaseController, View;
class HomeController extends BaseController {
//protected $layout = 'admin.layouts.main';
public function dashboard() {
/*$this->layout->title = "Dashboard";
$this->layout->content = View::make('admin.dashboard');*/
return View::make('admin.dashboard')
->with('title', 'Dashboard');
}
}
<file_sep><?php
namespace Admin;
use BaseController, View, Categories, Input, Redirect, Basehelper;
class CategoryController extends BaseController {
/**
* Display a listing of the resource.
*
* @return Response
*/
public function index()
{
$categories = Categories::paginate(5);
return View::make('admin.category.index', compact('categories'))
->nest('header_menu', 'admin.category.header');
}
/**
* Show the form for creating a new resource.
*
* @return Response
*/
public function create()
{
$categories = Categories::select('id', 'name', 'level', 'parent')->get()->toArray();
return View::make('admin.category.create', compact('categories'))
->nest('header_menu', 'admin.category.header');
}
/**
* Store a newly created resource in storage.
*
* @return Response
*/
public function store()
{
$input = Input::all();
$input = Input::except('_token');
Categories::create($input);
return Redirect::route('category.list')
->with('message', '<div class="alert alert-success">Created Successfully.</div>');
}
/**
* Display the specified resource.
*
* @param int $id
* @return Response
*/
public function show($id)
{
//
}
/**
* Show the form for editing the specified resource.
*
* @param int $id
* @return Response
*/
public function edit($id)
{
/*$d = DB::table('categories AS c')
->leftJoin('categories AS c2', 'c2.parent_id', '=', 'c.id')
->select('c.name', 'c2.name subname')
->get();*/
/*$da = DB::select('SELECT c.name, c2.name subname from `dms_categories` c left join `dms_categories` c2 on c2.parent_id=c.id');
$d = array();
foreach ($da as $value) {
$a = array();
$a['name'] = $value->name;
$a['subname'] = $value->subname;
array_push($d, $a);
}
array_count_values($d);
print_r($d);
exit();*/
$category = Categories::find($id);
if(is_null($category)) {
return Redirect::route('category.list');
}
return View::make('admin.category.edit', compact('category'))
->nest('header_menu', 'admin.category.header');
}
/**
* Update the specified resource in storage.
*
* @param int $id
* @return Response
*/
public function update($id)
{
$input = Input::except('_token');
$category = Categories::find($id);
if(is_null($category)) {
return Redirect::route('category.list');
}
$category->update($input);
return Redirect::route('category.edit', array($id))
->with('message', '<div class="alert alert-success">Categories updated successfully.</div>');
}
/**
* Remove the specified resource from storage.
*
* @param int $id
* @return Response
*/
public function destroy($id)
{
Categories::find($id)->delete();
return Redirect::route('category.list')
->with('message', '<div class="alert alert-success">Categories deleted successfully.</div>');
}
}
<file_sep><?php
class Basehelper {
public static function getCategoryDetails($id, $return) {
if($id==0)
return '';
else
return DB::table('categories')
->where('id', $id)
->select($return)
->first()
->$return;
}
}
<file_sep>Laravel-Zantrik
===============
Projects created by Zantrik on Laravel Framework
<file_sep><?php
/*
|--------------------------------------------------------------------------
| Application Routes
|--------------------------------------------------------------------------
|
| Here is where you can register all of the routes for an application.
| It's a breeze. Simply tell Laravel the URIs it should respond to
| and give it the Closure to execute when that URI is requested.
|
*/
Route::get('/', 'LoginController@index');
Route::get('login', 'LoginController@showLogin');
Route::post('login', array('as'=>'users.login', 'uses'=>'LoginController@doLogin'));
Route::get('logout', array('as'=>'users.logout', 'uses'=>'LoginController@doLogout'));
Route::group(array('before'=>array('auth'), 'prefix'=>'admin'), function() {
//Dashboard
Route::get('/', array('as'=>'admin.dashboard', 'uses'=>'admin\HomeController@dashboard'));
//Category
Route::get('category', array('as'=>'category.list', 'uses'=>'admin\CategoryController@index'));
Route::get('category/create', array('as'=>'category.create', 'uses'=>'admin\CategoryController@create'));
Route::post('category/create', array('as'=>'category.store', 'uses'=>'admin\CategoryController@store'));
Route::get('category/edit/{id}', array('as'=>'category.edit', 'uses'=>'admin\CategoryController@edit'))->where('id', '[0-9]+');
Route::put('category/update/{id}', array('as'=>'category.update', 'uses'=>'admin\CategoryController@update'))->where('id', '[0-9]+');
Route::delete('category/delete/{id}', array('as'=>'category.delete', 'uses'=>'admin\CategoryController@destroy'))->where('id', '[0-9]+');
//Profile
Route::get('profile', array('as'=>'profile', 'uses'=>'admin\ProfileController@index'));
});
| ac77cd5a28c23954ee3ec5f7903f2cd16924fc03 | [
"Markdown",
"PHP"
] | 5 | PHP | shanzantrik/Laravel-Zantrik | aeb59983acfd83f128cc3f45abafa8b5269a114b | 4a4f563270b38ba3a85bfc35ebd8a4b8dd853358 |
refs/heads/master | <file_sep>/*
File: Generic.cpp
Author: <NAME>
Created on October 18, 2017, 2:14am
*/
#include <SDL2/SDL.h>
#include <SDL2/SDL_ttf.h>
#include <iostream>
#include <cstring>
#include <string>
using namespace std;
/*
Makes a window in SDL2 for every and outputs either a 1 or 0 pertaining to the user either clicking on yes or no
Input: Two strings, one for the title of the window and the other for the message displayed
Output: Boolean value that the person who called the function can use.
*/
bool main(int argc, char* args[]) {
bool quit = false;
bool ans;
int x,y;
SDL_Init(SDL_INIT_EVERYTHING);
TTF_Init();
SDL_Event event;
SDL_Window *window;
window = SDL_CreateWindow(
"Death",
SDL_WINDOWPOS_UNDEFINED, //Starting x coordinate for window
SDL_WINDOWPOS_UNDEFINED, //Starting y coordinate for window
640, //Width of the window
480, //Height of the window
0 //Flags
);
SDL_Renderer * renderer = SDL_CreateRenderer(window, -1, 0);
SDL_SetRenderDrawColor(renderer, 0,0,255,255);
SDL_RenderClear(renderer);
SDL_RenderPresent(renderer);
TTF_Font * font = TTF_OpenFont("arial.ttf", 50);
SDL_Color black = {0, 0, 0};
SDL_Color white = {255,255,255};
SDL_SetRenderDrawColor(renderer,0,0,0,255);
SDL_Surface * surface = TTF_RenderText_Solid(font, "Sup", white);
SDL_Texture* Message = SDL_CreateTextureFromSurface(renderer, surface);
/*
Create a text box rectangle for the question
*/
SDL_Rect Message_rect;
Message_rect.x = 0; //controls the rect's x coordinate
Message_rect.y = 220; //controls the rect's y coordinte
Message_rect.w = 640; //controls the width of the rect
Message_rect.h = 50; //controls the height of the rect
SDL_RenderCopy(renderer, Message, NULL, &Message_rect);
surface = TTF_RenderText_Solid(font, "yes", white);
Message = SDL_CreateTextureFromSurface(renderer, surface);
/*
Create a Yes button to return a 1 to the call
*/
SDL_Rect Yes_rect; //create a rectangle to hold a message
Yes_rect.x = 80; //controls the rect's x coordinate
Yes_rect.y = 400; //controls the rect's y coordinte
Yes_rect.w = 50; //controls the width of the rect
Yes_rect.h = 20; //controls the height of the rect
SDL_RenderCopy(renderer, Message, NULL, &Yes_rect);
//SDL_RenderFillRect(renderer, &Yes_rect);
/*
Create a no button to return a 0
*/
SDL_Rect No_rect; //create a rectangle to hold a message
No_rect.x = 500; //controls the rect's x coordinate
No_rect.y = 400; //controls the rect's y coordinte
No_rect.w = 50; //controls the width of the rect
No_rect.h = 20; //controls the height of the rect
SDL_RenderCopy(renderer, Message, NULL, &No_rect);
SDL_RenderPresent(renderer);
while (!quit){
SDL_WaitEvent(&event);
SDL_GetMouseState(&x, &y);//set x and y to where the mouses coordinates are
//SDL_RenderPresent(renderer);
if (x >= Yes_rect.x && x <= Yes_rect.x + Yes_rect.w && y >= Yes_rect.y && y <= Yes_rect.y + Yes_rect.h)
/*
Checks if the pointer is inside the area of the rect,
Checks if the mouse's x-coordinate is greater than the origin but less than the origin plus
width
*/
{
//SDL_SetTextureColorMod(Message, 255, 0, 0);//Changes the color if the mouse is over the button
//SDL_RenderPresent(renderer);
if (event.type == SDL_MOUSEBUTTONUP)//on mouse release
if (event.button.button == SDL_BUTTON_LEFT) {
ans = true;
quit = true;
}
}
else if (x >= No_rect.x && x <= No_rect.x + No_rect.w && y >= No_rect.y && y <= No_rect.y + No_rect.h)
/*
Checks if the pointer is inside the area of the rect,
Checks if the mouse's x-coordinate is greater than the origin but less than the origin plus
width
*/
{
//SDL_SetTextureColorMod(Message, 255, 0, 128);//Changes the color if the mouse is over the button
if (event.type == SDL_MOUSEBUTTONUP)//on mouse release
if (event.button.button == SDL_BUTTON_LEFT) {
ans = false;
quit = true;
}
}
else if (event.type == SDL_QUIT)
quit = true;
}
SDL_Quit();
return 0;
} <file_sep>/**
* This code is to create a menu
* Name: <NAME>
* Date: January 30, 2018
**/
#include <SDL2/SDL.h>
#include <SDL2/SDL_image.h>
#include <SDL2/SDL_mixer.h>
#include <SDL2/SDL_ttf.h>
#include <cstring>
#include <iostream>
#include <sstream>
//Screen dimension constants
const int SWIDTH = 640;
const int SHEIGHT = 480;
using namespace std;
bool init();
bool loadMedia();
void close();
//SDL_window/surface...
SDL_Window* window = NULL;
Mix_Chunk* sound = NULL;
SDL_Renderer* render = NULL;
SDL_Surface* surface = NULL;
SDL_Texture* texture = NULL;
SDL_Texture* overlap = NULL;
bool init()
{
bool success = true;
TTF_Init();
if( SDL_Init(SDL_INIT_EVERYTHING) < 0 )
{
std::cout << "SDL_Init Error: " << SDL_GetError() << std::endl;
success = false;
}
else
{
//Creating Window
SDL_Window *window = SDL_CreateWindow("Things&Stuff", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, SWIDTH, SHEIGHT, 0);
if (window == NULL)
{
std::cout << "SDL_CreateWindow Error: " << SDL_GetError() << std::endl;
success = false;
}
else
{
int imgFlags = IMG_INIT_PNG;
if( !( IMG_Init( imgFlags ) & imgFlags ) )
{
std::cout << "SDL_image Error:" << IMG_GetError() << std::endl;
success = false;
}
//Creating Renderer
render = SDL_CreateRenderer(window, -1, 0);
if (render == NULL)
{
std::cout << "SDL_CreateRenderer Error: " << SDL_GetError() << std::endl;
success = false;
}
if( Mix_OpenAudio( 44100, MIX_DEFAULT_FORMAT, 0, 2048 ) < 0 )
{
std::cout << "SDL_mixer Error:" << Mix_GetError() << std::endl;
success = false;
}
}
}
return success;
}
bool loadMedia()
{
bool success = true;
sound = Mix_LoadWAV( "music/high.wav" );
if( sound == NULL )
{
std::cout << "SDL_Mixer Error: " << Mix_GetError() << std::endl;
success = false;
}
return success;
}
void close()
{
SDL_DestroyTexture(texture);
SDL_DestroyRenderer(render);
SDL_DestroyWindow(window);
Mix_FreeChunk( sound );
Mix_Quit();
IMG_Quit();
SDL_Quit();
}
int main(int argc, char* args[])
{
if( !init() )
{
std::cout << "Failed to initialize!" << std::endl;
}
else
{
if( !loadMedia() )
{
std::cout << "Failed to load media!" << std::endl;
}
else
{
bool quit = false;
SDL_Event e;
int x = 0;
int y = 0;
SDL_SetRenderDrawColor(render, 0,0,255,255);
SDL_RenderClear(render);
SDL_RenderPresent(render);
TTF_Font * font = TTF_OpenFont("arial.ttf", 50);
SDL_Color black = {0, 0, 0};
SDL_Color white = {255,255,255};
SDL_SetRenderDrawColor(render,255,255,255,255);
surface = TTF_RenderText_Solid(font, "Click!", white);
texture = SDL_CreateTextureFromSurface(render, surface);
SDL_Rect click;
click.x = (SWIDTH/4)-40;
click.y = (SHEIGHT/2)+40;
click.w = 80;
click.h = 80;
SDL_RenderDrawRect(render, &click);
SDL_RenderCopy(render, texture, NULL, &click);
surface = TTF_RenderText_Solid(font, "Click!!", white);
texture = SDL_CreateTextureFromSurface(render, surface);
SDL_Rect animate;
animate.x = (SWIDTH/1.5);
animate.y = (SHEIGHT/2)+40;
animate.w = 80;
animate.h = 80;
SDL_RenderDrawRect(render, &animate);
SDL_RenderCopy(render, texture, NULL, &animate);
SDL_RenderPresent(render);
while( !quit )
{
while( SDL_PollEvent( &e ) != 0 )
{
SDL_GetMouseState(&x, &y);
if( e.type == SDL_QUIT )
{
quit = true;
}
else if (x >= click.x && x <= click.x + click.w && y >= click.y && y <= click.y + click.h)
{
if (e.type == SDL_MOUSEBUTTONUP)
{
if (e.button.button == SDL_BUTTON_LEFT)
{
Mix_PlayChannel(-1, sound, 0 );
SDL_Delay(100);
close();
quit = true;
}
}
}
else if (x >= animate.x && x <= animate.x + animate.w && y >= animate.y && y <= animate.y + animate.h)
{
if (e.type == SDL_MOUSEBUTTONUP)
{
if (e.button.button == SDL_BUTTON_LEFT)
{
Mix_PlayChannel(-1, sound, 0 );
}
}
}
}
}
}
}
string stuff = "";
std::cout << "\nInput the name of the robots file." << std::endl;
std::cin >> stuff;
// CALL his code
std::cout << stuff << std::endl;
return 0;
} | 0a9c818dd8140d06da46366c3b7bcbc443abde0e | [
"C++"
] | 2 | C++ | cynthia5610/BasicSDLProject | b2d7cf3d7559f0cfca54f84afc14d785ff15f996 | 23ececb89527eb20a3f7dbc7f2cdcfedf3e4b2f1 |
refs/heads/master | <repo_name>t-taira/rails5-docker-compose<file_sep>/README.md
# rails5-docker-compose
<file_sep>/script/web-start
#!/bin/bash
bundle exec rake assets:precompile
bundle check || bundle install
export SECRET_KEY_BASE=`bundle exec rake secret`
bundle exec puma -C config/puma.rb
<file_sep>/Dockerfile
FROM ruby:2.3.3-alpine
ENV BUNDLE_PATH /bundle
ENV BUILD_PACKAGES="curl-dev ruby-dev build-base bash git" \
DEV_PACKAGES="zlib-dev libxml2-dev libxslt-dev tzdata yaml-dev mysql-dev imagemagick" \
RUBY_PACKAGES="ruby-json yaml nodejs"
RUN apk update && \
apk upgrade && \
apk add --update $BUILD_PACKAGES $DEV_PACKAGES $RUBY_PACKAGES && \
cp /usr/share/zoneinfo/Asia/Tokyo /etc/localtime && \
rm -rf /var/cache/apk/*
WORKDIR /app
COPY . /app
EXPOSE 3000
#RUN gem install bundler
#RUN mkdir -p /usr/src/app && \
# mkdir -p /app/tmp/sockets
#RUN bundle check || bundle install
# Expose volumes to frontend
VOLUME /app/public
VOLUME /app/tmp
VOLUME /app/log
<file_sep>/script/sidekiq-start
#!/bin/bash
bundle exec puma -C config/puma.rb
| 50a4d94701f2b608898b04fdb07df591c6f2bce5 | [
"Markdown",
"Dockerfile",
"Shell"
] | 4 | Markdown | t-taira/rails5-docker-compose | 1d0b51782584807044b8bc6142882bf0acc8304c | 9040bcafed5a78afc1b090246a225941a8743282 |
refs/heads/master | <repo_name>washoe/pbscast<file_sep>/public/js/main.js
// main.js - client code
// (requires jQuery)
$(document).ready(function(){
$('#programlist a.getpodcast').click(function(event){
// request podcast xml doc from server using itpc protocol
var protocol = 'itpc';
//protocol = 'http';// for testing
var getpodcastUrl = protocol + '://'+location.hostname+(location.port ? ':'+location.port : '')+'/getpodcast/'+$(event.target).data('programId');
window.open(getpodcastUrl);
})
});<file_sep>/routes/getpodcast.js
/*
* GET podcast
*/
var MONGO_URL = process.env.MONGOHQ_URL || 'mongodb://localhost:27017/pbscast'; // use local mongodb in dev
var $ = require('node-jquery');
var http = require('http');
var Q = require('q');
var jade = require('jade');
var fs = require('fs');
var programDataCollection = require('monk')(MONGO_URL).get('programData');
var PBS_HOST = 'pbsfm.org.au';
var AUDIO = '/audio';
var SAVE_PATH = './cache/';
var TEMPLATE_PATH = './views/podcast.jade';
var PROGRAM_LIST = '/programlist';
var jadeTemplate = fs.readFileSync(TEMPLATE_PATH);
// handle request for podcast by retrieving data, rendering xml an serving the result
exports.get = function(req, res) {
var programId = req.params.id; // e.g. 'acrossthetracks'
retrievePodcast(programId).then(function(podcastData){
var xml = ''; // rendered xml
if (podcastData) {
xml = renderPodcast(podcastData);
}
else {
xml = 'not found';
}
res.send(xml);
})
}
// scrape program list and persist in db
// http://stackoverflow.com/questions/18153410/how-to-use-q-all-with-complex-array-of-promises was some help
exports.buildAll = function() {
console.log('building all podcasts');
httpGet(PBS_HOST, PROGRAM_LIST)
.then(function(htmlString){
var $html = $(htmlString);
var selector = '.view-programs-active-list td';
var descriptionSelector = 'div.views-field-field-presenter-value span';
var $programList = $html.find(selector);
var programPromises = [];
$programList.each(function() {
var program = {};
program.href = $(this).find('a').attr('href');
program.id = program.href.replace(/^\/|\/$/g, '');// remove slash(es)
program.name = $(this).find('a').html();
program.description = $(this).find(descriptionSelector).html();
// only include if there is an href
if (undefined != program.href) {
var podcastPromise = getPodCast(program).then (function(podcastData) {
if (podcastData.items.length >0) {
persistPodcast(program.id, podcastData);
}
else {
console.info('no episodes found for '+program.id)
}
});
}
});
Q.all(programPromises).then(function(){
console.log('got all available podcasts ');
})
})
}
// get all podcasts in db as array of objects
exports.getIndex = function(req, res) {
var db = require('monk')(MONGO_URL);
db.get('programData').find({}, function(err, data) {
if (err) {
console.error('Error getting podcast data: '+err);
}
// render results into jade template
console.log('*********found podcast data')
console.log(data);
db.close;
});
}
// assemble podcast for a given program. return promise that returns rendered xml
var getPodCast = function(program) {
var programId = program.href;
var deferred = Q.defer();
var podcastData = {};
var episodes;
console.log('getting audio for: '+PBS_HOST+programId);
httpGet(PBS_HOST, '/'+programId)
.then(function(htmlString) {
podcastData = extractProgramDetails(htmlString);
podcastData.language = 'en-au';
podcastData.shortDescription = program.description; // short description to be used on index page
}).then (httpGet(PBS_HOST, '/'+programId+ AUDIO)
.then(function(htmlString){
episodes = extractEpisodeData(htmlString);
console.log('extracted data for '+ episodes.length+ ' episodes of program '+programId);
var episodePromises = [];
episodes.forEach(function(episode) {
var episodePromise = httpGet(PBS_HOST, episode.pageUrl).then(function(htmlString) {
episode.url = extractUrl(htmlString);
episode.link = PBS_HOST + episode.pageUrl;//"<![CDATA[" and ends with "]]>"
episode.description = $(htmlString).find('#block-views-playlists_pgm_audio-block_1 .field-content').text();
episode.duration = '0';
return(episode);
});
episodePromises.push(episodePromise);
});
Q.all(episodePromises).then(function(episodeResults) {
// all the episode promises have been fulfilled
podcastData.items = episodeResults;
deferred.resolve(podcastData);
});
}));
return deferred.promise;
}
// return episode data from program page html
var extractEpisodeData = function(htmlString) {
var $html = $(htmlString);
var selector = '.node.node-teaser.node-type-story a';
var $episodeList = $html.find(selector);
var episodes = [];
$episodeList.each(function(){
var episode = {};
episode.title = $(this).html();
episode.pageUrl = $(this).attr('href');
episode.pubDate = episode.title .split('for ')[1];// yes i know
episodes.push(episode);
});
episodes.reverse();
return episodes;
}
// Extract the program details from the progrmam list page
var extractProgramDetails = function(htmlString) {
var result = {};
var emailSelector = 'a[href^="mailto:"]';
var descriptionSelector = '#content-area .field-content';
var imageSelector = '#content-area .field-content img';
var titleSelector = '#content h1.title';
result.email = $(htmlString).find(emailSelector).html();
result.title = $(htmlString).find(titleSelector).text();
result.description = $(htmlString).find(descriptionSelector).text();
result.imageUrl = $(htmlString).find(imageSelector).attr('src');
return result;
}
// Extract the actual episode audio url from the page
var extractUrl = function(htmlString) {
var drupalSettings = JSON.parse(htmlString.split('jQuery.extend(Drupal.settings, ')[2].split(');')[0]); // extremely fragile way to get this info - a regExp would be better
return drupalSettings.jwplayer.files['jwplayer-2'].file;
}
// httpGetPromise - takes a host+path, returns a promise
// adapted from http://veebdev.wordpress.com/2012/02/26/node-js-http-get-example-does-not-work-here-is-fix/
// promise stuff from http://runnable.com/Uld6VcWt6UEaAAHR/combine-promises-with-q-for-node-js
var httpGet = function(host, path) {
var deferred = Q.defer();
http.get({ host: PBS_HOST, path: path+'?'+new Date().getTime()}, function(response) {
var htmlString = '';
if (response.statusCode === 302) {
var newLocation = url.parse(response.headers.location).host;
console.info('We have to make new request ' + newLocation);
request(newLocation);
} else {
console.info("Response: %d", response.statusCode);
response.on('data', function(data) {
htmlString += data;
});
response.on('end', function() {
deferred.resolve(htmlString);
});
}
}).on('error', function(err) {
console.error('Error %s', err.message);
});
return deferred.promise;
}
// render podcast data as xml
var renderPodcast = function(podcastData) {
var options = {pretty: true};
var jadeFunction = jade.compile(jadeTemplate, options);
var podCastString = jadeFunction(podcastData);
var podCastString = jadeFunction(podcastData);
podCastString = podCastString.replace(/lynk/g, 'link');
podCastString = podCastString.replace(/<guid>"/g, '<guid>');
podCastString = podCastString.replace(/"<\/guid>/g, '</guid>');
return podCastString;
}
// persist data in mongo db
var persistPodcast = function(programId, podcastData) {
console.log('persistPodcast ' + programId);
var deferred = Q.defer();
var db = require('monk')(MONGO_URL);
var query = {programId:programId};
//update
db.get('programData').update(query, {programId:programId, podcastData:podcastData}, {upsert:true}).on('success', function(data) {
console.log('success');
db.close();
deferred.resolve(data);
}).on('error', function(err) {
console.error('error '+err);
db.close();
deferred.resolve(null);
});
return deferred.promise;
}
// retrieve podcast data from mongo db
var retrievePodcast = function(programId) {
var deferred = Q.defer();
var db = require('monk')(MONGO_URL);
var query = {programId:programId};
db.get('programData').findOne(query, function(err, data){
if (err) {
console.error('Error getting program data for '+programId+': '+err);//etc
};
console.log('success');
db.close;
deferred.resolve(data ? data.podcastData : null);
});
return deferred.promise;
}<file_sep>/modules/pbdscast.js
// pbscast module<file_sep>/routes/index.js
/*
* GET home page.
*/
var MONGO_URL = process.env.MONGOHQ_URL || 'mongodb://localhost:27017/pbscast'; // use local mongodb in dev
var TEMPLATE_PATH = './views/index.jade';
var Q = require('q');
var fs = require('fs');
exports.index = function(req, res){
retrieveAllPodcasts().then(function(data){
res.render('index', {
pbsRoot: 'http://pbsfm.org.au',
programData:data})
});
};
// retrieve all podcast data from mongo db
var retrieveAllPodcasts = function() {
console.log('retrieveAllPodcasts');
var deferred = Q.defer();
var db = require('monk')(MONGO_URL);
var query = {};
db.get('programData').find(query).on('success', function(data) {
console.log('success');
db.close();
deferred.resolve(data);
}).on('error', function(err) {
console.error('error '+err);
db.close();
deferred.resolve(null);
});
return deferred.promise;
}
| 73a92c4bddd8b98eabec86136c3022a51e178aa7 | [
"JavaScript"
] | 4 | JavaScript | washoe/pbscast | d4786850b1dd4a78e8d23173df56fbcce936e6a0 | 9e0ef509956476ca82b897fdbad55bcb98010616 |
refs/heads/master | <file_sep>import images from './gallery-items.js';
const refs = {
gallery: document.querySelector('.gallery'),
modal: document.querySelector('.js-lightbox'),
modalImage: document.querySelector('.lightbox__image'),
button: document.querySelector('button[data-action="close-lightbox"]'),
overlay: document.querySelector('.lightbox__content'),
};
refs.gallery.append(...createGallery());
refs.gallery.addEventListener('click', handleShowImage);
refs.button.addEventListener('click', handleCloseModal);
refs.modal.addEventListener('click', overlayCloseModal);
function createGallery() {
return images.map(({ preview, original, description }) => {
const li = document.createElement('li');
li.insertAdjacentHTML(
'afterbegin',
`<li class="gallery__item">
<img
class="gallery__image"
src="${preview}"
data-source="${original}"
alt="${description}"
/>
<span class="gallery__icon">
<i class="material-icons">zoom_out_map</i>
</span>
</a>
</li>`,
);
return li;
});
}
function handleShowImage({ target }) {
if (target.tagName !== 'IMG') return;
const url = target.dataset.source;
refs.modal.classList.add('is-open');
refs.modalImage.src = url;
window.addEventListener('keydown', handleKeyPress);
window.addEventListener('keyup', handleNextImage);
window.addEventListener('keyup', handlePrevImage);
}
function handleCloseModal() {
refs.modal.classList.remove('is-open');
refs.modalImage.src = '';
window.removeEventListener('keydown', handleKeyPress);
}
function overlayCloseModal(event) {
if (event.target !== refs.overlay) return;
handleCloseModal();
}
function handleKeyPress(event) {
if (event.code !== 'Escape') return;
handleCloseModal();
}
const links = images.map(({ original }) => original);
let index = 0;
function slider(v) {
index += 1 * v;
if (index > links.length - 1) {
index = 0;
} else if (index < 0) {
index = links.length - 1;
}
refs.modalImage.src = links[index];
}
function handleNextImage(event) {
if (event.code !== 'ArrowRight') return;
slider(1);
}
function handlePrevImage(event) {
if (event.code !== 'ArrowLeft') return;
slider(-1);
}
| b3d79a60153aa9e567126a65f98a8a051dae7fa6 | [
"JavaScript"
] | 1 | JavaScript | SolodkayaT/goit-js-hw-08-gallery | 2029a29b622850fa38d933d9414f145f1be4e678 | 3ff0b1dc2d03ce27f47198890a74b5c8fc5a4190 |
refs/heads/master | <repo_name>paulfer/ProyectosSwift<file_sep>/Minireto.playground/Pages/Untitled Page.xcplaygroundpage/Contents.swift
//: Playground - noun: a place where people can play
/* Generar un rango de 0 a 100, incluye el número 100 en el rango.
Iterar el rango completo, utilizando la sentencia for para obtener cada número del rango y aplicar las siguientes reglas de impresión.
Al evaluar cada número debe aplicar las siguientes reglas:
- Si el número es divisible entre 5, imprimir: # el número + “Bingo!!!”
- Si el número es par, imprimir: # el número + “par!!!”
- Si el número es impar, imprimir: # el número + “impar!!!”
- Si el número se encuentra dentro de un rango del 30 al 40, imprimir: # el número + “Viva Swift!!!”
Usar la interpolación de variables para realizar la impresión de cada número.
*/
import UIKit
var rango = 0...100
for ran in rango {
if (ran % 5 == 0){
print("\(ran) Bingo")
}else if (ran % 2 == 0){
print("\(ran) Par")
}else {
print("\(ran) Impar")
}
switch ran {
case 30...40:
print("\t \(ran) Viva Swift")
default:
print("")
}
}
| 3ccebca01169a31eed0c1a8c5d130a9b9c8e5f2d | [
"Swift"
] | 1 | Swift | paulfer/ProyectosSwift | 31e34125c6d64ea9508bfebc7babb21ec23fc500 | bb655f484b835b555b50b9ff7aa28a82794bfcac |
refs/heads/master | <repo_name>sajibtariq/uva_solve_code<file_sep>/SMS Typing.c
#include<stdio.h>
#include<string.h>
int main()
{
char a[101];
int i,j,c,t;
scanf("%d",&t);
getchar();
for(i=1; i<=t; i++)
{
gets(a);
c=0;
for(j=0; j<strlen(a); j++)
{
if((a[j]=='a')||(a[j]=='d')||(a[j]=='g')||(a[j]=='j')||(a[j]=='m')||(a[j]=='p')||(a[j]=='t')||(a[j]=='w')||(a[j]==' '))
c+=1;
else if((a[j]=='b')||(a[j]=='e')||(a[j]=='h')||(a[j]=='k')||(a[j]=='n')||(a[j]=='q')||(a[j]=='u')||(a[j]=='x'))
c+=2;
else if((a[j]=='c')||(a[j]=='f')||(a[j]=='i')||(a[j]=='l')||(a[j]=='o')||(a[j]=='r')||(a[j]=='v')||(a[j]=='y'))
c+=3;
else if((a[j]=='s')||(a[j]=='z'))
c+=4;
}
printf("Case #%d: %d\n",i,c);
}
return 0;
}
<file_sep>/913.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
long long i,j,k,l,m,n,x,y;
while(cin>>n)
{
long long sum;
x=(n+1)/2;
y=(2*((x*(x+1))-x))-1;
sum=y+(y-2)+(y-4);
cout<<sum<<endl;
}
return 0;
}
<file_sep>/10931.cpp
#include<bits/stdc++.h>
using namespace std;
#define Pi acos(-1)
#define RADIANS(x) (((1.0 * x * Pi) / 180.0)) // degree to radian
#define DEGREES(x) (((x * 180.0) / (1.0 * pi))) //radian to degree
int main()
{
long i,j,k,l,m,n,o,p,b,cnt;
long a[1000];
while(cin>>m && m!=0 )
{ n=m;
i=0;
cnt=0;
while(n>0)
{
b=n %2;
n/=2;
a[i]=b;
if(a[i]==1)
cnt++;
i++;
}
printf("The parity of ");
for(j=i-1;j>=0;j--)
cout<<a[j];
printf(" is %d (mod 2).\n",cnt);
}
return 0;
}
<file_sep>/530.cpp
#include<bits/stdc++.h>
using namespace std;
#define ll long long
int main()
{
ll n,r,ans,a,b,c,i,k;
while(cin>>n>>r)
{ ans=1;
//memset(dp,-1,sizeof(dp));
if(n==0 && r==0)
break;
k=min(r,n-r);
cout<<k<<endl;
for(i=1;i<=k;i++)
{
ans=ans*((((n-i)+1))/i);
cout<<ans<<endl;
}
printf("%lld\n",ans);
}
return 0;
}
<file_sep>/11900biledeegg.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
long long i,j,t,k,l,n,p,q,sum,x;
cin>>t;
int a[50];
for( j=1;j<=t;j++)
{
cin>>n>>p>>q;
for(k=0; k<n; k++)
cin>>a[k];
int c=0;
sum=0;
for(i=0; i<n; i++)
{ if(i>=p || sum+a[i]>q)
break;
else
sum=sum+a[i];
}
cout<<"Case"<<' '<<j<<':'<<' '<<i<<endl;
}
return 0;
}
<file_sep>/hajj e akbar.c
#include<stdio.h>
#include<string.h>
int main()
{
char nl[6];
int ln,i,z=1,c;
while(scanf("%s",nl)==1)
{
c=0;
ln=strlen(nl);
if(ln==1)
{
for(i=0; i<ln; i++)
c=c+nl[i];
if(c==42)
break;
}
if((strcmp(nl,"Hajj"))==0)
printf("Case %d: Hajj-e-Akbar\n",z);
if((strcmp(nl,"Umrah"))==0)
printf("Case %d: Hajj-e-Asghar\n",z);
z++;
}
return 0;
}
<file_sep>/11799horrosdash.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
long i,j,k,l,m,n,t,result;
long a[3000];
long b[3000];
cin>>t;
for(i=1;i<=t;i++)
{
cin>>n;
for(j=0;j<n;j++)
{
cin>>a[j];
}
m=0;
result=0;
for(k=0;k<n;k++)
{
result=max(m,a[k]);
m=result;
//cout<<result<<" "<<a[k];
}
printf("Case %ld: %ld\n",i,result);
}
return 0;
}
<file_sep>/10340.cpp
#include<bits/stdc++.h>
using namespace std ;
int main()
{
int lt,ls,i,j;
string s,t;
while( cin>>s>>t)
{
ls=s.length();
lt=t.length();
if(ls>lt)
{
cout<<"No"<<endl;
continue;
}
int c=0;
int x=0;
for(i=0; i<ls; i++)
{
for(j=x; j<lt; j++)
{
if(s[i]==t[j])
{
c++;
x=j+1;
break;}
}
}
if(c==ls)
cout<<"Yes"<<endl;
else
cout<<"No"<<endl;
}
return 0;
}
<file_sep>/11547.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
long i,j,k,l,m,n,x,y,t;
cin>>t;
for(i=1; i<=t; i++)
{
cin>>n;
m=(((((n*567)/ 9)+7452)*235/47)-498);
l=0;
while(l<2)
{
x=m%10;
m=m/10;
l++;
}
cout<<abs(x)<<endl;
}
return 0;
}
<file_sep>/12541.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
long long a ,b,q, c, d ,e,f,g,h,k,l,i,ans [110],mmax,mmin,t;
char s[100][100];
mmin=2147483647;
mmax=0;
cin>>t;
for(q=0;q<t;q++)
{
cin>>s[q];
cin>>a>>b>>c;
ans[q]=(c*1000)+(b*100)+a;
if(ans[q]>mmax)
e=q;
//cout<<ans[q]<<endl;
//out<<mmax<<endl;
mmax=max(mmax,ans[q]);
//if(a[q]>)
if(ans[q]<mmin)
//
f=q;
mmin=min(mmin,ans[q]);
//cout<<mmin<<endl;
//cout<<mmax<<endl;
}
cout<<s[e]<<endl;
cout<<s[f]<<endl;
return 0;}
<file_sep>/374.cpp
#include<bits/stdc++.h>
using namespace std;
#define ll long
ll square(ll s)
{
return s*s;
}
ll bigmod(ll b,ll p,ll m)
{
if(p==0)
return 1;
else if(p%2==0)
return square (bigmod(b,p/2,m))%m;
else
return ((b%m)*bigmod(b,p-1,m))%m;
}
int main()
{ll b,p,m,mod;
while(scanf("%ld %ld %ld",&b,&p,&m)==3)
{
mod=bigmod(b,p,m);
cout<<mod<<endl;}
return 0;
}
<file_sep>/uva10523.java
import java.math.BigInteger;
import java.util.Scanner;
public class uva10523 {
public static void main(String args[])
{
Scanner sc= new Scanner(System.in);
while(sc.hasNext())
{ int a,b,i;
BigInteger ans,r,c;
BigInteger sum=BigInteger.ZERO;
a=sc.nextInt();
b=sc.nextInt();
c= BigInteger.valueOf(b);
for(i=1;i<=a;i++)
{
r = BigInteger.valueOf(i);
// t =BigInteger.valueOf(ans.multiply(r));
// sum=sum.add(ans.multiply(r));
sum = sum.add(r.multiply(c.pow(i)));
}
System.out.println(sum);
}
}
}
<file_sep>/10221.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
double a,b,c,d,arc,chord;
char s[10];
while(cin>>a>>b>>s)
{
//scanf("%s",s);
getchar();
if(s[0]=='m')
{
b=b/60.0;
}
if(b>180.0)
{b=360.0-b;
} arc=(6440.0+a)*((b*acos(-1))/180);
d=((b*acos(-1))/180);
chord=2*(6440.0+a)*sin(d/2);
printf("%.6lf %.6lf\n",arc,chord);
}
return 0;
}
<file_sep>/496 - Simply Subsets.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
int l,k,q,r,a,b;
string line,line2;
//freopen ("input.txt","r",stdin);
//freopen ("output.txt","w",stdout);
while( getline(cin, line ) )
{
getline(cin,line2);
stringstream ss( line );
stringstream tt( line2 );
// initialize kortesi
int num,num1;
set< int > s;
set< int > t;
vector< int > x;
vector<int > y;
vector< int > w;
vector<int > z;
map<int, int>mp;
while( ss >> num )
{
s.insert( num );
w.push_back(num);
}
while( tt >> num1 )
{
t.insert( num1 );
z.push_back(num1);
}
set< int > :: iterator it;
set< int > :: iterator kt;
for(it = s.begin(); it != s.end(); it++)
{
x.push_back(*it);
}
for(kt = t.begin(); kt != t.end(); kt++)
{
y.push_back(*kt);
}
//sort(w.begin(),w.end());
//sort(z.begin(),z.end());
a=x.size();
b=y.size();
//cout<<a<<" "<<b<<endl;
int flag=0;
for(int i=0; i<a; i++)
{
mp[x[i]]++;
if(mp[x[i]]>1)
{
flag=1;
}
}
for(int i=0; i<b; i++)
{
mp[y[i]]++;
if(mp[y[i]]>1)
{
flag=1;
}
}
int ll=mp.size();
int f1=0,f2=0,f3=0,f4=0,f5=0,f6=0,df=0;
if(a==b)
{
for(int i=0; i<a; i++)
for(int j=0;j<a;j++)
{
if(x[i]==y[j])
f1++;
}
if(f1==a)
printf("A equals B\n");
else
{
if(flag==0)
printf("A and B are disjoint\n");
else
printf("I'm confused!\n");
}
}
else if(a<b)
{
for(int i=0; i<a; i++)
for(int j=0;j<b;j++)
{
if(x[i]==y[j])
f3++;
}
//cout<<f3<<endl;
if(f3==a)
printf("A is a proper subset of B\n");
else
{
{
if(flag==0)
printf("A and B are disjoint\n");
else
printf("I'm confused!\n");
}
}
}
else if(b<a)
{
for(int i=0; i<b; i++)
for(int j=0;j<a;j++)
{
if(y[i]==x[j])
f5++;
}
if(f5==b)
printf("B is a proper subset of A\n");
else
{
{
if(flag==0)
printf("A and B are disjoint\n");
else
printf("I'm confused!\n");
}
}
}
}
return 0;
}
<file_sep>/maxproduct.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
long long i,j,t,k,num[20],p,m,max,d=1;
while(cin>>k)
{
for(t=1; t<=k; t++)
cin>>num[t];
max=0;
for(i=1; i<=k; i++)
{
p=1;
for(j=i; j<=k; j++)
{
p=p*num[j];
if(p>max)
max=p;
}
}
printf("Case #%lld: The maximum product is %lld.\n\n",d,max);
d++;
}
return 0;
}
<file_sep>/uva10341.cpp
#include<bits/stdc++.h>
using namespace std;
#define ESP 1e-7
double p,q,r,s,t,u,mid,x,k,l,h,m,n;
double eqn(double x)
{
return (p*exp(-x)+q*sin(x)+r*cos(x)+s*tan(x)+t*x*x+u);
}
double bisection()
{
l=0;h=1;
while(h-l>ESP)
{m=(l+h)/2;
if(eqn(m)*eqn(h)<=0)
l=m;
else
h=m;
}
return m;
}
int main()
{
while(cin>>p>>q>>r>>s>>t>>u)
{
if(eqn(0)*eqn(1)<=0)
{
printf("%.4lf\n",bisection());
}
else
printf("No solution\n");
}
return 0;}
<file_sep>/11389.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
long long i,j,k,l,n,m,d,r,x,y,sum1,sum2;
while(cin>>n>>d>>r)
{ if(n==0&&d==0&&r==0)
break;
y=0;
sum1=0;
sum2=0;
long long a[110];
long long b[110];
for(i=0; i<n; i++)
{
cin>>a[i];
}
sort(a,a+n);
for(i=0; i<n; i++)
{
cin>>b[i];
//sum2+=b[i];
}
sort(b,b+n);
for(i=0; i<n; i++)
{
if(a[i]+b[n-1-i]>d)
{
x=(a[i]+b[i])-d;
y=y+x*r;
}}
cout<<y<<endl;
}
return 0;
}
<file_sep>/102.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
int a,b,c,d,e,f,g,h,i,ar[10],mn,mx;
//freopen("in.txt","r",stdin);
//freopen("o.txt","w",stdout);
while(cin>>a>>b>>c>>d>>e>>f>>g>>h>>i)
{
ar[0]=b+c+d+f+g+h;
ar[1]=b+c+d+e+g+i;
ar[2]=a+b+e+f+g+i;
ar[3]=a+b+d+f+h+i;
ar[4]=a+c+e+f+g+h;
ar[5]=a+c+d+e+h+i;
mn=2147483647;
for(int k=0;k<6;k++)
{
// cout<<ar[k]<<endl;
mn=min(mn,ar[k]);
}
if(mn==ar[1])
printf("BCG %d\n",mn);
else if(mn==ar[0])
printf("BGC %d\n",mn);
else if(mn==ar[2])
printf("CBG %d\n",mn);
else if(mn==ar[3])
printf("CGB %d\n",mn);
else if(mn==ar[4])
printf("GBC %d\n",mn);
else if(mn==ar[5])
printf("GCB %d\n",mn);
}
return 0;}
<file_sep>/11799.c
#include<stdio.h>
int main()
{int n[120],i,sum,x,max,min;
while(scanf("%d",&x)==1)
{
max=0;
for(i=0;i<x;i++)
{
scanf("%d",&n[i]);
if(n[i]>max)
max=n[i];
}
printf("%d\n",max);}
return 0;
}
<file_sep>/11934.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
int l,b,a,c,d,i,k;
while(cin>>a>>b>>c>>d>>l)
{
if(a==0&&b==0&&c==0&&d==0&&l==0)
break;
else
{
k=0;
for(i=0; i<=l; i++)
{
int sum=a*pow(i,2)+(b*i)+c;
if(sum%d == 0)
k++;
}
cout<<k<<endl;
}
}
return 0;
}
<file_sep>/490n.c
#include<stdio.h>
#include<string.h>
int main()
{
char s[110][110];
int i,j,l,d,ln,max=0;
d=0;
for( i=0;i<110;i++)
for(j=0;j<110;j++)
s[i][j]=' ';
while(gets(s[d])){
d++;
}
for(i=0;i<d;i++)
{ln=strlen(s[i]);
s[i][ln]=' ';
if(ln>max)
max=ln;}
for(i=0;i<max;i++)
{
for(j=d-1;j>=0;j--)
printf("%c",s[j][i]);
printf("\n");
}
return 0;
}
<file_sep>/11462.cpp
#include<bits/stdc++.h>
//#include<vector>
using namespace std;
int main()
{
int x,n;
while((cin >> n))
{
if(n==0)
break;
vector<int>vt;
for(int i = 0; i < n; i++) {
cin >> x;
vt.push_back(x);
}
sort(vt.begin(), vt.end());
for(int i = 0; i < vt.size(); i++) {
{cout << vt[i];
if(i<(vt.size()-1))
cout<<'.';}
}
cout<<endl;
}
return 0;}
<file_sep>/900.cpp
#include<bits/stdc++.h>
using namespace std;
long long bee[5000];
void beeanc()
{
bee[0]=0;
bee[1]=1;
for(int i=2; i<=5000; i++)
bee[i]=bee[i-1]+bee[i-2];
}
/*
long long bee(long long n)
{
if(n==0||n==1)
return n;
else
return bee(n-1)+bee(n-2);
}
*/
/*
a=0;b=1;
for(int i=1;i<85;i++)
c=a+b;
a=b;
b=c
*/
int main()
{
beeanc();
long long i,j,k,l,m;
while(cin>>m&&m!=0)
{
k=bee[m+1];
cout<<k<<endl;
}
return 0;
}
<file_sep>/11877.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
int i,j,k,l,m,n,y,t,x,z,w;
while(cin>>m)
{
if(m==0)
break;
z=0;
y=0;
while(m>=3)
{
z=z+m/3;
m=(m/3)+(m%3);
if(m==2)
y++;
}
w=z+y;
cout<<w<<endl;
}
return 0;
}
<file_sep>/10325lottery.cpp
#include<bits/stdc++.h>
using namespace std;
long long gcd(long long a, long long b)
{
return b == 0 ? a : gcd(b, a % b);
}
long long LCM(long long a, long long b)
{
return (a / gcd(a, b)) * b;
}
int main()
{
long long i,j,k,l,n,m;
while(cin>>n>>m)
{
long long a[100];
long long rest=0;
for(i=0; i<m; i++)
{
cin>>a[i];
}
k=pow(2,m);
for(j=1; j<=k-1; j++)
{
long long lcm=1;
long long cnt=0;
for(l=0; l<m; l++)
{
if (j &(1<<l))
{
lcm=LCM(lcm,a[l]);
cnt++;
}
}
if(cnt%2==1)
rest +=n/lcm;
else
rest -=n/lcm;
}
cout<<n-rest<<endl;
}
return 0;
}
<file_sep>/11192.cpp
#include<bits/stdc++.h>
using namespace std;
#define Pi acos(-1)
#define RADIANS(x) (((1.0 * x * Pi) / 180.0)) // degree to radian
#define DEGREES(x) (((x * 180.0) / (1.0 * pi))) //radian to degree
int main()
{
char c[200];
char b[200];
int t,i,j,k,l,s,e,n;;
while(cin>>t && t!=0)
{
getchar();
gets(c);
l=strlen(c);
n=l/t;
e=n;
s=0;
while(e<=l)
{
for(j=e-1; j>=s; j--)
{
cout<<c[j];
}
e=e+n;
s=s+n;
}
cout<<endl;
}
return 0;
}
<file_sep>/squre number.c
#include<stdio.h>
#include<math.h>
int main()
{
int i,j,x,c,y,z;
while(scanf("%d %d",&x,&y)==2)
{
if(x==0&&y==0)
break;
c=0;
for(i=x; i<=y; i++)
{
z=sqrt(i);
if(i==z*z)
c+=1;
}
printf("%d\n",c);
}
return 0;
}
<file_sep>/10300.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
long i,j,k,t,a,s,f,sum;
double m,n,l;
while(cin>>k)
{
for(i=1; i<=k; i++)
{
cin>>t;
sum=0;
for(j=1; j<=t; j++)
{
cin>>s;
cin>>a;
cin>>f;
sum=sum+(s*f);
}
cout<<sum<<endl;
}
}
return 0;
}
<file_sep>/10347.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
double a,b,c,s,d,area;
while(cin>>a>>b>>c)
{
s=(a + b + c)/2;
area = (4.0/3.0) * sqrt(s*(s-a)*(s-b)*(s-c));
if(area>0)
printf("%.3lf\n",area);
else
printf("-1.000\n");
}
return 0;}
<file_sep>/12578.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
int i,j,k,l,n;
double w,r,R,g;
cin>>n;
for(i=0;i<n;i++)
{
cin>>l;
w=0.6*(double)l;
R=0.2*(double)l;
r=acos(- 1)*R*R;
g=(w*(double)l)-r;
printf("%.2lf %.2lf\n",r,g);
}
return 0;
}
<file_sep>/10611.cpp
#include<bits/stdc++.h>
using namespace std;
int a[50005],b[50005],x,i,j,k,l,m,h,n,q,cnt;
void b_search(int low,int high,int t)
{
int mid=0,f=0,tl=0,th=0,tmid=0;
tmid=(low+high)/2;
while(low<high)
{
mid=(low+high)/2;
//cout<<mid;
if(t==b[mid])
{
low=mid-1;
//tl=low;
high=mid+1;
//th=high;
f=1;
break;
}
if(t>b[mid])
low=mid+1;
if(t<b[mid])
high=mid-1;
//printf("%d %d\n",low,high);
}
if(f==1)
{
printf("%d %d\n",b[low],b[high]);
//cout<<low<<" "<<high<<endl;
}
else
{
if((low==high))
{
if((t>b[low]))
printf("%d %d\n",b[low],b[high+1]);
if((t<b[low]))
printf("%d %d\n",b[low-1],b[high]);
if (t==b[low])
printf("%d %d\n",b[low-1],b[high+1]);
}//*/
else
printf("%d %d\n",b[low-1],b[high+1]);
}
}
int main()
{
cnt=1;
scanf("%d",&n);
//cin>>n;
{
for(i=0; i<n; i++)
scanf("%d",&a[i]);
//cin>>a[i];
b[0]=a[0];
for(k=1; k<n; k++)
{
if(a[k]!=a[k-1])
{
b[cnt]=a[k];
cnt++;
}
else
continue;
}
// for(int s=0;s<cnt;s++)
//cout<<b[s]<<endl;
//cout<<cnt;
cin>>q;
for(j=0; j<q; j++)
{
//cin>>x;
scanf("%d",&x);
if(cnt==1 && x==b[0])
printf("X X\n");
else if(x==b[0])
printf("X %d\n",b[1]);
else if(x<b[0])
printf("X %d\n",a[0]);
else if(x==b[cnt-1])
printf("%d X\n",b[cnt-2]);
else if(x>b[cnt-1])
printf("%d X\n",b[cnt-1]);
else
{
b_search(0,cnt-1,x);
}
}
return 0;
}
}
<file_sep>/11559.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
int i,j,k,l,m,n,a,b,c,p,w,d,e,f,g,h,cost,costh,mn,bd,total;
while(cin>>p>>b>>h>>w)
{ mn=500001;
for(j=1; j<=h; j++)
{
costh=0;
cin>>cost;
for(i=1; i<=w; i++)
{
cin>>bd;
if(bd>=p)
{
total=cost*p;
mn=min(mn,total);
costh=mn;
break;
}
}
}
if(costh<=b && costh!=0)
cout<<costh<<endl;
else
cout<<"stay home"<<endl;
}
return 0;
}
<file_sep>/10924.c
#include<stdio.h>
#include<string.h>
#include <math.h>
int main()
{
char n1[100];
int ln1,ln2,t1,t2,sum,sum2,i,r,c,d,j,l;
while(gets(n1))
{
ln1=strlen(n1);
c=0;
for(i=0; i<ln1; i++)
{
if(n1[i]>='a' && n1[i]<='z')
c+=n1[i]-96;
else if(n1[i]>='A' && n1[i]<='Z')
c+=n1[i]-38;
}
d=0;
for(i=2;i<=sqrt(c);i++)
{if(c%i==0)
d=1;
}
if(d==0||c==1)
printf("It is a prime word.\n");
else
printf("It is not a prime word.\n");
}
return 0;
}
<file_sep>/10302.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
long double i,j,k,l,m,n;
while(cin>>n)
{
m=((n*(n+1)/2));
l=m*m;
printf("%.0Lf\n",l);
}
return 0;
}
<file_sep>/leap year (2).c
#include<stdio.h>
#include<string.h>
int jaj(char y[],int z,int p)
{
int c=0,i;
for(i=0; i<z; i++)
{
c=((c*10)+ (y[i]-48))% p;
}
if(c==0)
return 0;
else
return 1;
}
int main()
{
char year[100000];
int flag,ln,leap;
int print=0;
while(gets(year))
{
if(print!=0)
printf("\n");
print=1;
leap = flag = 0;
ln=strlen(year);
if((jaj(year,ln,4)==0 && jaj(year,ln,100) !=0)||(jaj(year,ln,400)==0))
{
leap=1;
flag=1;
printf("This is leap year.\n");
}
if(jaj(year,ln,15)==0)
{
flag=1;
printf("This is huluculu festival year.\n");
}
if( leap==1 && jaj(year,ln,55)==0)
{
flag=1;
printf("This is bulukulu festival year.\n");
}
if(flag==0)
printf("This is an ordinary year.\n");
}
return 0;
}
<file_sep>/113powerof cryptography.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
double n,a,i,j,k,l,y,p;
double x,result;
while(cin>>n>>p)
{ x= (log(p)/n);
result= exp(x);
printf("%.0lf",result);
}
return 0;
}
<file_sep>/474.cpp
#include<bits/stdc++.h>
using namespace std;
#define Pi acos(-1)
#define RADIANS(x) (((1.0 * x * Pi) / 180.0)) // degree to radian
#define DEGREES(x) (((x * 180.0) / (1.0 * pi))) //radian to degree
/*
number of digit in a number=floor(log10(number)+1) here log10=log and log=ln
*/
int main()
{
long i,j,k,l,m,t;
double n,e,x,y;
while(cin>>n)
{
y=floor(n*log10(2)+1);
e=((y-n)*log(2))+(y*log(5));
x=exp(e);
printf("2^-%.0lf = %lfe-%.0lf\n",n,x,y);
}
return 0;
}
<file_sep>/Hashmot.c
#include<stdio.h>
int main()
{
long int h,o;
while(scanf("%ld %ld",&h,&o)==2)
{
if(h>o)
printf("%ld\n",h-o);
else
printf("%ld\n",o-h);
}
return 0;
}
<file_sep>/12614earnforfuture.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
long long i,j,k,l,m,n,t,x,y,max;
cin>>t;
for(i=1;i<=t;i++)
{max=0;
cin>>n;
for(j=1;j<=n;j++)
{
cin>>m;
if(m>max)
max=m;
}
printf("Case %lld: %lld\n",i,max);
}
return 0;}
<file_sep>/max.c
#include<stdio.h>
int main()
{
long a[100];
long sum,max,i,j;
sum=0;
max=-10000000;
for(i=1;i<=5;i++)
scanf("%ld",&a[i]);
for(j=1;j<=5;j++)
{
sum=sum+a[j];
if(sum<0)
sum=0;
if(max<sum)
max=sum;
}
printf("%ld",max);
return 0;
}
<file_sep>/11044.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
double i,j,l;
int m,n,x;
cin>>x;
for(int k=0; k<x; k++)
{
cin>>i>>j;
m=ceil((i-2)/3);
n=ceil((j-2)/3);
cout<<m*n<<endl;
}
return 0;
}
<file_sep>/bafana bafana.c
#include<bits/stdc++.h>
using namespace std;
int main()
{
int x,i,n,k,p,r;
scanf("%d",&x);
for(i=1; i<=x; i++)
{
scanf("%d %d %d",&n,&k,&p);
r=k+p;
while(r>n)
{
r=r-n;
}
printf("Case %d: %d\n",i,r);
}
return 0;
}
<file_sep>/12502three families.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
int t,k,l,i,j,q,d,a,b,c,s;
cin>>t;
for(i=1;i<=t;i++)
{
cin>>a>>b>>c;
s=c*(2*a-b)/(a+b);
cout<<s<<endl;
}
return 0;
}
<file_sep>/abstract name.c
#include<stdio.h>
#include<string.h>
int main()
{
int i,j,k,ln1,ln2,a,b,flag;
char sajib[30];
char jaj[30];
scanf("%d",&k);
for(j=1; j<=k; j++)
{
scanf("%s%s",sajib,jaj);
ln1=strlen(sajib);
ln2=strlen(jaj);
if(ln1!=ln2)
printf("No\n");
else
{
for(i=0; i<ln1; i++)
{
if(sajib[i]==jaj[i])
{
flag=1;
}
else if((sajib[i]=='a' || sajib[i]=='e' || sajib[i]=='i' || sajib[i]=='o' || sajib[i]=='u' ) && (jaj[i]=='a' || jaj[i]=='e' || jaj[i]=='i' || jaj[i]=='o' || jaj[i]=='u'))
{
flag=1;
}
else if(sajib[i]!=jaj[i])
{flag=0;
break;}
}
if(flag==0)
printf("No\n");
else
printf("Yes\n");
}
}
return 0;
}
<file_sep>/458.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
char s;
int i,l=0;
while(1)
{
while((s=getchar())!='\n')
{
printf("%c",s-7);}
printf("\n");
}
return 0;
}
<file_sep>/A Change in Thermal Unit.c
#include<stdio.h>
int main()
{
int x,i;
float f,c,d,e;
scanf("%d",&x);
for(i=1;i<=x;i++)
{
scanf("%f %f",&c,&d);
e=(d*5)/9;
f=(e+c);
printf("Case %d: %.2f\n",i,f);
}
return 0;}
<file_sep>/11219.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
int i,j,k,l,m,a,b,c,t,year,month,date,cd,cm,cy,bd,bm,by;
cin>>t;
for(i=1;i<=t;i++)
{
//cout<<endl;
scanf("%d/%d/%d",&cd,&cm,&cy);
scanf("%d/%d/%d",&bd,&bm,&by);
if(cd<bd)
{
cd=cd+30;
cm=cm-1;
}
date=cd-bd;
if(cm<bm)
{
cm=cm+12;
cy=cy-1;
}
month=cm-bm;
year=cy-by;
if(year<0)
printf("Case #%d: Invalid birth date\n",i);
else if(year>130)
printf("Case #%d: Check birth date\n",i);
else
printf("Case #%d: %d\n",i,year);
}
return 0;
}
<file_sep>/10286.cpp
#include<bits/stdc++.h>
using namespace std;
#define Pi acos(-1)
#define Ra(x) (((1.0 * x * Pi) / 180.0)) // degree to radian
#define Dg(x) (((x * 180.0) / (1.0 * pi))) //radian to degree
int main()
{
double i, j, k, l, m;
while(cin>>m)
{
// l= m*1.0673956817111818692592637626711;
l= m*sin(Ra(108))/sin(Ra(63));
printf("%.10lf\n",l);
}
/*
*/
return 0;
}
<file_sep>/The Lazy Lumberjacks.c
#include<stdio.h>
int main()
{
int i,a,b,c,d,e;
scanf("%d",&e);
for(i=1;i<=e;i++)
{scanf("%d%d%d",&a,&b,&c);
if(((a+b)>c)&&((b+c)>a)&&((c+a)>b))
printf("OK\n");
else
printf("Wrong!!\n");
}
return 0;
}
<file_sep>/11057.c
#include<stdio.h>
int main()
{
int a[100];
int x,i,j ,m,k,l;
while(scanf("%d",&x)==1)
{
{
for(i=0; i<x; i++)
scanf("%d",&a[i]);
}
scanf("%d",&m);
for(j=0; j<x-1; j++)
{
{
for(k=j+2; k<=x; k++)
if(a[j]+a[k-1]!=m)
{
printf("Peter should buy books whose prices are 0 and 0.\n");
break;
}
else
printf("Peter should buy books whose prices are %d and %d.\n",a[j],a[k-1]);
}
}
printf("\n");
}
return 0;
}
<file_sep>/10346.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
int i,j,k,l,m,n,x,y,t;
while(scanf("%d %d",&m,&n)==2&&n>1)
{
y=m;
while(m>=n)
{
y= y+m/n;
m=(m/n)+(m%n);
}
cout<<y<<endl;
}
return 0;
}
<file_sep>/Main.java
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.StringTokenizer;
public class Main {
public static void main(String[] args) throws IOException {
InputStreamReader isr = new InputStreamReader(System.in);
BufferedReader br = new BufferedReader(isr);
StringBuffer sb = new StringBuffer("");
String m = "";
while ((m = br.readLine()) != null) {
StringTokenizer st=new StringTokenizer(m);
String[] str = new String[st.countTokens()];
for(int i=0;i<str.length;i++){
str[i]=st.nextToken();
}
int baseFrom=Integer.parseInt(str[1]);
int baseTo=Integer.parseInt(str[2]);
int temp=Integer.parseInt(str[0],baseFrom);
StringBuilder result=new StringBuilder(Integer.toString(temp, baseTo));
for(int i=result.length();i<7;i++){
result.insert(0, ' ');
}
if(result.length()==7)
sb.append(result.toString().toUpperCase()).append("\n");
else
sb.append(" ERROR\n");
}
System.out.print(sb);
}
}<file_sep>/12405.cpp
#include<bits/stdc++.h>
using namespace std;
int main(){
int i,j,k,l,m,answer,answer1,ans,ans1;
double an,bn;
char a[110];
cin>>m;
freopen("o.txt","w",stdout);
for(i=1;i<=m;i++)
{ans=0;
ans1=0;
answer=0;
cin>>l;
for(j=0;j<l;j++)
{cin>>a[j];
}
k=0;
while(k<l)
{
if(a[k]=='.')
{ans=ans+1;
k=k+3;
}
else
k++;
}
// cout<<ans<<endl;
//an=(float)ans/2;
//bn=(float)ans1/3;
// answer1=ceil(bn);
// cout<<an<<endl;
//answer=floor(an);
//if(answer==0)
//printf("Case %d: 0\n",i);
//else
printf("Case %d: %d\n",i,ans);
//cout<<answer<<endl
}
return 0;}
<file_sep>/272.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
char s[1000];
long i,p=0,l;
while(cin>>s)
{
l=strlen(s);
for(i=0;i<l;i++)
{
if(s[i]=='"')
{
p=p+1;
if(p%2==1)
printf("``");
else
printf("''");
}
else
cout<<s[i];
}
cout<<endl;
}
return 0;
}
<file_sep>/11000.cpp
#include<bits/stdc++.h>
using namespace std;
long bee[60];
void beeanc()
{
bee[0]=0;
bee[1]=1;
for(int i=2; i<=60; i++)
bee[i]=bee[i-1]+bee[i-2]+1;
}
/*
long long bee(long long n)
{
if(n==0||n==1)
return n;
else
return bee(n-1)+bee(n-2)+1;
}
long long a=0,b=1, c;
for( i=2;i<=m+1;i++)
{
c=a+b+1;
a=b;
b=c;
}
cout<<a<<" "<<b<<endl;
*/
int main()
{
beeanc();
long i,j,k,l,m;
while(cin>>m && m!=-1)
{
k=bee[m];
l=bee[m+1];
cout<<k<<" "<<l<<endl;
}
return 0;
}
<file_sep>/you can say 11.c
#include<bits/stdc++.h>
using namespace std;
int main()
{
char num[1000];
int i,c,ln;
while(gets(num)!=NULL && (num[0]!='0' || strlen(num)>1))
{
ln=strlen(num);
c=0;
for(i=0; i<ln; i++)
{
c = (num[i]-'0' + c * 10)%11;
}
if(c==0)
printf("%s is a multiple of 11.\n",num);
else
printf("%s is not a multiple of 11.\n",num);
}
return 0;
}
<file_sep>/12279.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
int i,j,k,l,m,n,w,c,d;
int a[1010];
w=1;
while(cin>>n)
{
if(n==0)
break;
c=0;
d=0;
for(i=0; i<n; i++)
cin>>a[i];
for(i=0; i<n; i++)
{
if(a[i]>0)
c++;
else
d++;
}
m=c-d;
cout<<"Case"<<' '<<w<<":"<<' '<<m<<endl;
w++;
}
return 0;
}
<file_sep>/369combination.cpp.cpp
#include<bits/stdc++.h>
using namespace std;
#define ll long
ll dp[1000][1000];
ll nCr(ll n,ll r)
{
if(r==1)
return n;
if(n==r)
return 1;
if(dp[n][r]!=-1)
return dp[n][r]; //ভ্যালু টেবিলে থাকলে নতুন করে হিসাব করা দরকার নেই,ভ্যালুটা রিটার্ণ করে দাও
else
{
dp[n][r]=nCr(n-1,r)+nCr(n-1,r-1); //ভ্যালু টেবিলে সেভ করে রাখো
return dp[n][r];
}
}
int main()
{
//init dp table with -1
//for(int i=0; i<1000; i++)
//for(int j=0; j < 1000; j++)
//dp[i][j]=-1;
ll n,r,ans,a,b,c;
while(cin>>n>>r)
{
memset(dp,-1,sizeof(dp));
if(n==0 && r==0)
break;
ans=nCr(n,r);
printf("%ld things taken %ld at a time is %ld exactly.\n",n,r,ans);
}
return 0;
}
<file_sep>/flipsort.c
#include<stdio.h>
int main()
{
int a[1000];
int j,n,i,k,m,temp,c,l,t,flag,s,q;
while(scanf("%d",&q)==1)
for(s=1;s<=t;s++)
{
while(scanf("%d",&n)==1)
{
c=0;
for(i=1; i<=n; i++)
scanf("%d",&a[i]);
for(j=1; j<=n-1; j++)
{
flag=0;
for(k=1; k<=n-j; k++)
if(a[k]>a[k+1])
{
temp=a[k];
a[k]=a[k+1];
a[k+1]=temp;
c=c+1;
flag=1;
}
if(flag==0)
break;
}
printf("Minimum exchange operations : %d\n",c);
}}
return 0;
}
<file_sep>/modular.c
#include<stdio.h>
int main()
{
int fact=1,i;
for( i=1;i<=100;i++)
{
fact=((fact%97)*(i%97))%97;
}
printf("%d\n",fact);
return 0;}
<file_sep>/12015.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
char a[101][101];
int b[50];
int t,i,j,k,l,n;
cin>>t;
for(i=1;i<=t;i++)
{ int m=0;
for(j=0;j<10;j++)
{
scanf("%s %d",a[j],&b[j]);
m=max(m,b[j]);
}
printf("Case #%d:\n",i);
for(k=0;k<10;k++)
if(m==b[k])
printf("%s\n",a[k]);
}
return 0;
}
<file_sep>/10550.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
int i,a,b,c,d,ans1,ans2,ans3,ans,ans4;
while(cin>>i>>a>>b>>c)
{
if(i==0&&a==0&&b==0&c==0)
break;
ans=1080;
if(i<a)
{
ans1=((40+i)-a)*9;
// cout<<ans1<<endl;
}
else
ans1=(i-a)*9;
if(a>b)
ans2=((b+40)-a)*9;
else
ans2=(b-a)*9;
// cout<<ans2<<endl;
if(b<c)
ans3=((40+b)-c)*9;
//cout<<ans3<<endl;
else
ans3=(b-c)*9;
ans4=ans+ans1+ans2+ans3;
cout<<ans4<<endl;
}
return 0;
}
<file_sep>/12403.cpp
#include <bits/stdc++.h>
using namespace std;
int main()
{
int i,j,k,l,m,n,c=0,x,s=0;
char ch[10];
char cd[12];
cin>>n;
for(i=1; i<=n; i++)
{
scanf("%s",ch);
x=strcmp(ch,"donate");
if (x==0)
{ cin>>j;
c=c+j;
}
else
{
cout<<c<<endl;
}
}
return 0;
}
<file_sep>/11609.cpp
#include<bits/stdc++.h>
using namespace std;
#define MOD 1000000007
long long bigmod(long long b,long long p,long long m)
{
long long c;
if(p==0)
return 1;
else if(p%2==0)
{
c=bigmod(b,p/2,m);
return (c*c) %m ;
}
else
return ((b%m)*bigmod(b,p-1,m))%m;
}
int main()
{
long long i,j,k,l,m,n,o,p,h,fh,fm;
long long x1,x2,y1,y2,t;
cin>>t;
for(i=1;i<=t;i++)
{
cin>>n;
m=bigmod(2,(n-1),MOD);
p=((n%MOD)*m)%MOD;
printf("Case #%lld: %lld\n",i,p);
}
return 0;
}
<file_sep>/11388gcdlcm.cpp
#include<bits/stdc++.h>
using namespace std;
#define ull unsigned long long
ull gcd(ull a, ull b) {
return b == 0 ? a : gcd(b, a % b);
}
int main()
{
int t,k,i,j,q,d;
ull a,b,g,l;
cin>>t;
for(i=1;i<=t;i++)
{
cin>>a>>b;
g=gcd(a,b);
l=(a*b)/g;
if((a==g)&&(b==l))
cout<<g<<' '<<l<<endl;
else
cout<<"-1"<<endl;
}
return 0;
}
<file_sep>/3n+1.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
int m,n;
while(scanf("%d %d",&n,&m)==2)
{int a=n;
int b=m;
int s=0;
if(n>m)
swap(n,m);
for(int i=n; i<=m; i++)
{
int c=1;
unsigned long int j=i;
while(j!=1)
{
if(j%2==0)
j=j/2;
else
j=(3*j)+1;
c++;
}
if(c>s)
s=c;
}
printf("%d %d %d\n",a,b,s);
}
return 0;
}
<file_sep>/12439.cpp
#include<bits/stdc++.h>
using namespace std;
int year(char a[])
{
if(strcmp(a,"January")==0)
return 1;
if(strcmp(a,"February")==0)
return 2;
if(strcmp(a,"March")==0)
return 3;
if(strcmp(a,"April")==0)
return 4;
if(strcmp(a,"May")==0)
return 5;
if(strcmp(a,"June")==0)
return 6;
if(strcmp(a,"July")==0)
return 7;
if(strcmp(a,"August")==0)
return 8;
if(strcmp(a,"September")==0)
return 9;
if(strcmp(a,"October")==0)
return 10;
if(strcmp(a,"November")==0)
return 11;
if(strcmp(a,"December")==0)
return 12;
}
int main()
{
char mm[20];
char nn[20];
int i,j,k,l,n,t,d,m,D,y,Y;
int ans,ans4,ans100,ans400,answer;
cin>>t;
for(i=1;i<=t;i++)
{ getchar();
cin>>mm;
scanf("%d,%d",&d,&y);
getchar();
cin>>nn;
scanf("%d,%d",&D,&Y);
if(year(mm)>2)
y++;
if((year(nn)==1)||(year(nn)==2&&D<29))
Y--;
// cout<<y<<endl;
//cout<<Y<<endl;
ans4= Y/4-(y-1)/4;
ans100= Y/100-(y-1)/100;
ans400= Y/400-(y-1)/400;
//cout<<ans4<<" "<<ans100;
answer=(ans4-ans100)+ans400;
printf("Case %d: %d\n", i, answer);
// printf("%d\n",answer);
}
return 0;
}
<file_sep>/11716.cpp
#include<bits/stdc++.h>
using namespace std;
#define Pi acos(-1)
#define RADIANS(x) (((1.0 * x * Pi) / 180.0)) // degree to radian
#define DEGREES(x) (((x * 180.0) / (1.0 * pi))) //radian to degree
int main()
{
char s[100000];
int t,i,j,k,l,e,n,z;
cin>>t;
for(i=1; i<=t; i++)
{ getchar();
scanf ("%[^\n]", s);
l= strlen(s);
z=sqrt(l);
if(l==(z*z))
{
for(j=0;j<z;j++)
for(k=j;k<l;k=k+z)
cout<<s[k];
}
else
cout<<"INVALID";
cout<<endl;
}
return 0;
}
<file_sep>/11152.cpp
#include<bits/stdc++.h>
using namespace std;
#define Pi acos(-1)
#define RADIANS(x) (((1.0 * x * Pi) / 180.0)) // degree to radian
#define DEGREES(x) (((x * 180.0) / (1.0 * pi))) //radian to degree
int main()
{
double x,y,z,s,r,t,aic,aoc,at,k,sr,br;
while(cin>>x>>y>>z)
{
k=(x+y+z)/2;
s=sqrt(k*(k-x)*(k-y)*(k-z));
sr=s/k;
aic=sr*sr*Pi;
br=(x*y*z)/(sqrt((x+y+z)*(x+y-z)*(x+z-y)*(y+z-x)));
at=(s-aic);
aoc=(Pi*br*br)-s;
printf("%.4lf %.4lf %.4lf\n",aoc,at,aic);
}
return 0;
}
<file_sep>/10006.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
int a[]= {561,1105,1729,2465,2821,6601,8911,10585,15841,29341,41041,46657,52633,62745,63973};
int i,j,k,l,m,n,f;
while(cin>>n && n!=0)
{
f=0;
for( i=0; i<15; i++)
{
if(a[i]==n)
{
f=1;
break;
}
}
if(f==1)
printf("The number %d is a Carmichael number.\n",n);
else
printf("%d is normal.\n",n);
}
return 0;
}
<file_sep>/12250.c
#include<stdio.h>
#include<string.h>
int main()
{
char a[30];
int x,y,i,j,k=1,l;
i=1;
while(gets(a))
{
if(a[0]=='#')
break;
if(strcmp(a,"HELLO")==0)
printf("Case %d: ENGLISH\n",i);
else if(strcmp(a,"HOLA")==0)
printf("Case %d: SPANISH\n",i);
else if(strcmp(a,"HALLO")==0)
printf("Case %d: GERMAN\n",i);
else if(strcmp(a,"BONJOUR")==0)
printf("Case %d: FRENCH\n",i);
else if(strcmp(a,"CIAO")==0)
printf("Case %d: ITALIAN\n",i);
else if(strcmp(a,"ZDRAVSTVUJTE")==0)
printf("Case %d: RUSSIAN\n",i);
else
printf("Case %d: UNKNOWN\n",i);
i++;
}
return 0;
}
<file_sep>/10323.cpp
#include<bits/stdc++.h>
using namespace std;
long long fac(int n)
{
int i,res=1;
for(i=1;i<=n;i++)\
res*=i;
return res;
}
int main()
{ long long i,j,k,l,m;
while(cin>>m)
{
if(m<0)
{
long long g=(-1)*m;
if(g%2==0)
cout<<"Underflow!"<<endl;
else
cout<<"Overflow!"<<endl;
}
else
{
if(m>13)
cout<<"Overflow!"<<endl;
else if(m<8)
cout<<"Underflow!"<<endl;
else if(m==13)
cout<<"6227020800"<<endl;
else
{
k=fac(m);
cout<<k<<endl;
}
}
}
return 0;
}
<file_sep>/10696.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
long i,j,k,l,m,n;
while(cin>>n&&n!=0)
{
if(n>=101)
{
printf("f91(%ld) = %ld\n",n,n-10);
}
else
printf("f91(%ld) = 91\n",n);
}
return 0;
}
<file_sep>/skewbinary.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
char ch[100000];
while(gets(ch))
{
long long l=strlen(ch);
long long i, j,r ,b=1,e=0,d,x;
for(i=l-1; i>=0; i--)
{
x= ch[i]-'0';
d=x*(pow(2,b)-1);
b++;
e=e+d;
}
if(e==0)
break;
cout<<e<<endl;
}
return 0;
}
<file_sep>/11498.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
long i,j,k,l,m,n,sum,sum2,x,y;
while(cin>>k&&k!=0)
{
cin>>m>>n;
for(i=1;i<=k;i++)
{
cin>>x>>y;
if(m==x||n==y)
{cout<<"divisa"<<endl;
continue;
}
if(x>m&&y>n)
cout<<"NE"<<endl;
if(x>m&&y<n)
cout<<"SE"<<endl;
if(x<m&&y>n)
cout<<"NO"<<endl;
if(x<m&&y<n)
cout<<"SO"<<endl;
}
}
return 0;
}
<file_sep>/uva-10281.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
string ss;
double pre,lst=0.0,update,speed=0.0,dis=0.0;
while(getline(cin,ss))
{
pre=((ss[0]-48)*10+(ss[1]-48)*1)*3600+((ss[3]-48)*10+(ss[4]-48)*1)*60+((ss[6]-48)*10+(ss[7]-48)*1)*1;
update=pre-lst;
int l=ss.length();
if(l>=8)
{
dis=dis+(update*(speed/3600.00));
if(l>8)
{
speed=0.0;
for(int i=9; i<l; i++)
{
speed=speed*10.0+((ss[i]-48)*1.0);
}
}
else
{
for(int i=0; i<=7; i++)
cout<<ss[i];
cout<<" ";
printf("%.2lf km\n",dis);
}
}
lst=pre;
}
return 0;
}
<file_sep>/11689.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
int i,j,k,l,m,n,x,y,t;
cin>>t;
for(i=1; i<=t; i++)
{
scanf("%d %d %d",&m,&n,&x);
l=m+n;
y=0;
while(l>=x)
{
y= y+l/x;
l=(l/x)+(l%x);
}
cout<<y<<endl;
}
return 0;
}
<file_sep>/10222.cpp
#include<bits/stdc++.h>
using namespace std;
int main(){
char s[1000];
char a[79]="abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890-=[];',. ";
char c[79]="aczaqsdfyghjbvuiqwsetxwxrzaczaqsdfyghjbvuiqwsetxwxrz121234567890opklnm ";
int i,j,k,l,m,n;
while(gets(s)){
l=strlen(s);
k=strlen(a);
for(i=0;i<l;i++)
{for(j=0;j<k;j++)
if(a[j]==s[i])
cout<<c[j];}
cout<<endl;
}
return 0;}
<file_sep>/1230.cpp
#include<bits/stdc++.h>
using namespace std;
#define Pi acos(-1)
#define RADIANS(x) (((1.0 * x * Pi) / 180.0)) // degree to radian
#define DEGREES(x) (((x * 180.0) / (1.0 * pi))) //radian to degree
long bigmod(long b,long p,long m)
{
long c;
if(p==0)
return 1;
else if(p%2==0)
{
c=bigmod(b,p/2,m);
return (c*c) %m ;
}
else
return ((b%m)*bigmod(b,p-1,m))%m;
}
int main()
{ long t,i,j,k,l,m,b,p;
while(cin>>t && t!=0)
{
for(i=0;i<t;i++)
{
cin>>b>>p>>m;
l=bigmod(b,p,m);
cout<<l<<endl;
}
}
return 0;
}
<file_sep>/uva343.java
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.math.BigInteger;
import java.util.StringTokenizer;
/**
* Created by Sajib on 6/24/15.
*/
public class uva343 {
public static void main(String args[]) throws IOException
{
InputStreamReader ir =new InputStreamReader(System.in);
BufferedReader br =new BufferedReader(ir);
String t="";
while(( t =br.readLine())!=null)
{
StringTokenizer tr= new StringTokenizer(t);
String[] st= new String[tr.countTokens()];
for(int u=0;u <(st.length);u++) {
st[u] = tr.nextToken();
//System.out.println(st[u]);
}
//System.out.println(st[0]+" "+st[1]);
boolean f=false;
BigInteger m,n;
String a,b;
int k,l,h,j;
for( h=1;h<=36;h++)
{ try{
m= new BigInteger(st[0],h);
}
catch (Exception e)
{
continue;
}
//System.out.println(h);
for( j=1;j<=36;j++)
{
//
try{
n= new BigInteger(st[1],j);
}
catch (Exception e)
{
continue;
}
//System.out.printf("%d",j);
//n= new BigInteger(st[1],j);
if( n.compareTo(m)==0)
{ f= true;
System.out.printf("%s (base %d) = %s (base %d)\n",st[0],h,st[1],j);
break;
}
}
if(f==true)
break;
}
if(f!=true)
{
System.out.printf("%s is not equal to %s in any base 2..36\n",st[0],st[1]);
}
}
}
}
<file_sep>/egept.c
#include<stdio.h>
int main()
{
int x,y,z,u,v;
while(scanf("%d %d %d",&x,&y,&z)==3)
{
if(x==0&&y==0&&z==0)
break;
u=(x*x)+(y*y);
v=(z*z);
{if(v==u)
printf("right\n");
else
printf("wrong\n");
}
}
return 0;}
<file_sep>/egypt.c
#include<stdio.h>
int jaj(int a,int b,int c)
{
if(a>b)
{
if(a>c)
return a;
else
return c;
}
else
{if(b>c)
return b;
else
return c;}
}
int main(){
int x,y,z,u,v,m;
while(scanf("%d %d %d",&x,&y,&z)==3)
{
if(x==0&&y==0&&z==0)
break;
m=jaj(x,y,z);
if((m*m==x*x+y*y)||(m*m==x*x+z*z)||(m*m==z*z+y*y))
printf("right\n");
else
printf("wrong\n");
}
return 0;}
<file_sep>/12592.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
map<string,string>slogan;
string part1,part2,part3;
int t,i,j,k,l,m;
while(cin>>t)
{ //cin.ignore();
getchar();
for(i=0;i<t;i++)
{
getline(cin,part1);
getline(cin,part2);
slogan[part1]=part2;
}
cin>>m;
// cin.ignore();
getchar();
{
for(j=0;j<m;j++)
{getline(cin,part3);
cout<<slogan[part3]<<endl;}
}
}
return 0;
}
<file_sep>/10970.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
int i,j,k,l,m,n,x,y,t;
while(scanf("%d %d",&m,&n)==2)
{
l=(m*n)-1;
cout<<l<<endl;
}
return 0;
}
<file_sep>/10469 (2).cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
int n,a,i,j,k,l,x,y,result;
while(cin>>n>>a)
{
result=n^a;
cout<<result<<endl;
}
return 0;
}
<file_sep>/cost sutting.c
#include<stdio.h>
int jaj(int a,int b,int c)
{
if(a<b)
{
if(a<c)
{
if(b<c)
return b;
else
return c;
}
else
return a;
}
else if(a>b)
{
if(a>c)
{
if(b>c)
return b;
else
return c;
}
else
return a;
}
else if(c<a)
{
if(c<b)
{
if(a>b)
return b;
else
return a;
}
return c;
}
}
int main()
{
int i,j,t,x,y,z;
scanf("%d",&t);
for(i=1; i<=t; i++)
{
scanf("%d %d %d",&x,&y,&z);
{
j=jaj(x,y,z);
printf("Case %d: %d\n",i,j);
}
}
return 0;
}
<file_sep>/12751.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
int i,z;
int n,j;
int k,l,m,b,w,a,t,x,sum;
cin>>z;
for(m=1; m<=z; m++)
{
sum=0;
l=0;
cin>>n>>k>>x;
sum=0;
for(j=1; j<=n; j++)
{
sum+=j;
}
for(i=x; i<=(x+k-1); i++)
{
l=l+i;
}
printf("Case %d: %d\n",m,sum-l);
}
return 0;
}
<file_sep>/495.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
int n,i,j,k,l;
char a[10];
cin>>n;
getchar();
for(i=0; i<n; i++)
{
gets(a);
l=strlen(a);
if(l==3 &&((a[1]=='n'&&a[2]=='e')||(a[0]=='0'&&a[1]=='n')||(a[0]=='o'&&a[2]=='e')))
cout<<'1'<<endl;
if(((a[0]=='o'&&a[1]=='o'&&a[2]=='w')||((a[0]=='t'&&a[1]=='o')||a[2]=='w')||((a[0]=='t'&&a[2]=='w')||a[1]=='o')))
cout<<'2'<<endl;
if(l==5 )
cout<<'3'<<endl;
}
return 0;
}
<file_sep>/add all.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
int x,n,j,sum,s;
while(cin>>n){
if(n==0)
break;
vector <int> vt;
vector <int> t;
for(int i=0; i<n; i++)
{
cin>>x;
vt.push_back(x);
}
sort( vt.begin(), vt.end() );
j=0;
sum=0;
s=0;
for(int i=0; i<n; i++)
{
sum=sum+vt[i];
t.push_back(sum);
j++;
}
for(int i=1; i<j; i++)
{
s=s+t[i];
}
cout<<s<<endl;
}
return 0;
}
<file_sep>/hello world.c
#include<stdio.h>
int main()
{
int i,x,z,N,c=1;
while(scanf("%d",&N)==1)
{
if(N<1)
break;
x=1;
for(i=1; i<=N; i++)
{
z=x+i;
if(z>=N)
break;
else
x=z;
}
printf("Case %d: %d\n",c,z-x);
c++;
}
return 0;
}
<file_sep>/uva 11057.cpp
#include<bits/stdc++.h>
using namespace std;
int main(){
int a[10010];
int i,j,k,l,m,n,o,t,p;
freopen("in.txt","r",stdin);
freopen("o.txt","w",stdout);
while(cin>>t){
m=0;
n=0;
for(i=0;i<t;i++)
cin>>a[i];
cin>>p;
sort(a,a+t);
for(j=0;j<t-1;j++)
{
for(k=j+1;k<t;k++)
{
if(a[j]+a[k]==p)
{
m=a[j];
n=a[k];
}
}
}
printf("Peter should buy books whose prices are %ld and %ld.\n\n",m,n);
}
return 0;}
<file_sep>/10038.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
long i,j,k,l,m,n;
long a[3000];
long b[3000];
while(cin>>n)
{
int f=1;
for(i=0;i<n;i++)
cin>>a[i];
for(j=0;j<n-1;j++)
{
l=abs(a[j+1]-a[j]);
b[j]=l;
//cout<<b[j]<<j;
}
//sort(b,b+(n-1));
sort(b,b+(n-1));
for(l=1; l<n; l++)
{
//cout<<b[l];
if(b[l-1]!=l)
{
f=0;
break;
}
}
if(f==1)
cout<<"Jolly"<<endl;
else
cout<<"Not jolly"<<endl;
}
return 0;
}
<file_sep>/12149.cpp
#include<bits/stdc++.h>
using namespace std;
#define Pi acos(-1)
#define Ra(x) (((1.0 * x * Pi) / 180.0)) // degree to radian
#define Dg(x) (((x * 180.0) / (1.0 * pi))) //radian to degree
int main()
{
long i,j,k,l,m,n;
while(cin>>n && n!=0)
{
l=((n*((2*n)+1)*(n+1))/6);
cout<<l<<endl;
}
/*
*/
return 0;
}
<file_sep>/11764.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
int t,k;
int a[100];
cin>>t;
for(int i=1;i<=t;i++)
{ int h=0,l=0;
cin>>k;
for(int j=0;j<k;j++)
cin>>a[j];
for(int m=0;m<k-1;m++)
{if(a[m]>a[m+1])
l++;
else if(a[m]<a[m+1])
h++;
}
printf("Case %d: %d %d\n",i,h,l);
}
return 0;
}
<file_sep>/banglawash.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
int i,z;
cin>>z;
for(i=1; i<=z; i++)
{
int n,j;
int k,l,m,b,w,a,t;
char g[30];
a=0;
b=0;
w=0;
t=0;
cin>>n;
cin>>g;
for(j=0; j<n; j++)
{
if(g[j]=='B')
b++;
else if(g[j]=='W')
w++;
else if(g[j]=='A')
a++;
else if(g[j]=='T')
t++;
}
{
if((b+a) ==n &&b !=0)
printf("Case %d: BANGLAWASH\n",i);
else if( w+a == n&&w!=0)
printf("Case %d: WHITEWASH\n",i);
else if(a==n && a!=0)
printf("Case %d: ABANDONED\n",i);
else if(b>w)
printf("Case %d: BANGLADESH %d - %d\n",i,b,w);
else if(w>b)
printf("Case %d: WWW %d - %d\n",i,w,b);
else if(b==w)
printf("Case %d: DRAW %d %d\n",i,b,w);
}
}
return 0;
}
<file_sep>/logical operator.c
#include<stdio.h>
int main()
{
long int i,n,x,y;
scanf("%d",&n);
for(i=1; i<=n; i++)
{
scanf("%ld %ld",&x,&y);
if(x>y)
printf(">\n");
else if(x<y)
printf("<\n");
else
printf("=\n");
}
return 0;
}
<file_sep>/591.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
int i,j,k,l,m,n,x,y,t,z;
int a[1000];
l=1;
while(scanf("%d",&n)==1)
{
if(n==0)
break;
m=0;
z=0;
for(i=0; i<n; i++)
cin>>a[i];
{
for(i=0; i<n; i++)
m=m+a[i];
}
x=m/n;
for(i=0; i<n; i++)
{
if(a[i]>x)
{ y=0;
y=a[i]-x;
z=z+y;}
}
printf("Set #%d\n",l);
cout<<"The minimum number of moves is "<<z<<'.'<<endl<<endl;
l++;
}
return 0;
}
<file_sep>/12157.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
int i,j,x,c,y,z,t,mile,juice,m,n;
cin>>t;
for(i=1;i<=t;i++)
{ cin>>n;
mile=0,juice=0;
for(y=1;y<=n;y++)
{
cin>>x;
mile+=(x+30)/30;
juice+=(x+60)/60;
}
m=mile*10;
j=juice*15;
if(m<j)
printf("Case %d: Mile %d\n",i,m);
else if(m>j)
printf("Case %d: Juice %d\n",i,j);
else
printf("Case %d: Mile Juice %d\n",i,m);
}
return 0;
}
<file_sep>/uva401.cpp
#include<bits/stdc++.h>
using namespace std;
bool mirror(string test)
{
bool flag=1;
char st[1000];
st['A']='A';
st['E']='3';
st['H']='H';
st['I']='I';
st['J']='L';
st['L']='J';
st['M']='M';
st['O']='O';
st['S']='2';
st['T']='T';
st['U']='U';
st['V']='V';
st['W']='W';
st['X']='X';
st['Y']='Y';
st['Z']='5';
st['1']='1';
st['2']='S';
st['5']='Z';
st['3']='E';
st['8']='8';
int l;
l=test.length();
for(int i=0; i<l; i++)
{
//cout<<st[test[i]]<<endl;
//cout<<test[l-i-1]<<endl;
if(st[test[i]]==test[l-i-1])
flag=1;
else
{
flag=0;
break;
}
}
return flag;
}
int main()
{
string s;
string t;
bool f;
freopen("input.txt","r",stdin);
freopen("output.txt","w",stdout);
while(cin>>s)
{
f=mirror(s);
t=string(s.rbegin(),s.rend());
if(f==0 && t!=s)
cout<<s<<" -- is not a palindrome."<<endl<<endl;
else if(f==0 && t==s)
cout<<s<<" -- is a regular palindrome."<<endl<<endl;
else if(f==1 && t!=s)
cout<<s<<" -- is a mirrored string."<<endl<<endl;
else if(f==1 && t==s)
cout<<s<<" -- is a mirrored palindrome."<<endl<<endl;
}
}
<file_sep>/11875.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{ int t ,i,j,k,l,m,n,x;
int p[11];
cin>>t;
for(i=1;i<=t;i++)
{
cin>>m;
for(j=1;j<=m;j++)
cin>>p[j];
x=(m/2)+1;
printf("Case %d: %d\n",i,p[x]);
}
return 0;
}
<file_sep>/12646.c
#include<stdio.h>
int main()
{
int i,j,k,l,m,n,s,c;
while(scanf("%d %d %d",&n,&m,&k)==3)
{
{
if(n==0&&m==0&&k==0)
break;
}
if((n*n)+(m*m)==(k*k))
printf("right\n");
else if(n*n+k*k==m*m)
printf("right\n");
else if(k*k+m*m==n*n)
printf("right\n");
else
printf("wrong\n");
}
return 0;
}
<file_sep>/pizza cutting.c
#include<stdio.h>
int main()
{
long i,n,j,m,k,sum;
while(scanf("%ld",&n)==1)
{if(n<0)
break;
{
sum=0;
for(i=1;i<=n;i++)
sum=(sum+i);
printf("%ld\n",sum+1);}
}
return 0;
}
<file_sep>/11150.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
int i,j,k,l,m,n,y,t,x,z,w;
while(cin>>m)
{
l=m;
z=0;
y=0;
if(m==2){
w=3;
cout<<w<<endl;}
else{
while(m>=3)
{
z=z+m/3;
m=(m/3)+(m%3);
if(m==2)
y++;
}
w=(z+y)+l;
cout<<w<<endl;
}
}
return 0;
}
<file_sep>/README.md
# UVA_solve_code
Due to my laptop hard drive crash, I had not recovered some of my uva solve codes. So here some of the files are corrupted
<file_sep>/uva gcd.c
#include<stdio.h>
int gcd(a,b)
{
int temp;
while(a!=0)
{
temp=a;
a=b%a;
b=temp;
}
return b;
}
int main()
{
int i,j,k,l,m,n;
for(i=1; i<=100; i++)
{
scanf("%d",&n);
if(n==0)
break;
int g=0;
for(j=1; j<n; j++)
for(k=j+1; k<=n; k++)
{g+=gcd(j,k);
}
printf("%d\n",g);
}
return 0;
}
<file_sep>/uva 856.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
// age khatai simulation kore nite hobe.....koto difference er jonno koto step ashe
int i,j,k,l,m,n,t,dif,a;
float x,y,z;
//freopen ("input.txt","r",stdin);
//freopen ("output.txt","w",stdout);
cin>>t;
for(i=1;i<=t;i++)
{
cin>>n>>m;
dif=m-n;
x=sqrt(dif);
y=floor(x);
z=x-y;
if(z>=0.5)
{
a=ceil(x);
cout<<((a*2)-1)<<endl;
}
else
{
a=floor(x);
cout<<a*2<<endl;
}
}
return 0;
}
<file_sep>/11479.cpp
#include<bits/stdc++.h>
//#include<vector>
using namespace std;
int main()
{
int x,n,a,b,c,i;
cin >> n;
for(i=1;i<=n;i++){
scanf("%d%d%d",&a,&b,&c);
if((a+b)<=c||(b+c)<=a||(c+a)<=b)
printf("Case %d: Invalid\n",i);
else if(a<=0||b<=0||c<=0)
printf("Case %d: Invalid\n",i);
else if(a==b && b==c)
printf("Case d: Equilateral\n",i);
else if(a==b||b==c||c==a)
printf("Case %d: Isosceles\n",i);
else
printf("Case %d: Scalene\n",i );
}
return 0;}
<file_sep>/10773.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
long i,j,k,l,m,n,t;
double d,v,u,s_time,s_path;
cin>>t;
for(i=1; i<=t; i++)
{
cin>>d>>v>>u;
if((v>=u)||(v<=0)||(u<=0)||(d<=0))
printf("Case %ld: can't determine\n",i);
else
{
s_time=(d/u);
s_path=abs(d/sqrt((u*u)-(v*v)));
double result;
result= abs(s_path-s_time);
printf("Case %ld: %.3lf\n",i,result);
}
}
return 0;
}
<file_sep>/11614.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
long long t,i,j,k,l,m,n;
cin>>t;
for(i=1;i<=t;i++)
{
cin>>n;
m=sqrt(1+(8*n));
l=(-1+m)/2;
printf("%lld\n",l);
}
return 0;
}
<file_sep>/579.cpp
#include<bits/stdc++.h>
using namespace std;
int main()
{
double i ,j,k,l,m,n;
while(scanf("%lf:%lf",&i,&j))
{
if(i==0&&j==00)
break;
k=.5*((60*i)+j);
l=6*j;
m= abs(k-l);
if(m>180)
n=360-m;
else
n=m;
printf("%.3lf\n",n);
}
return 0;
}
<file_sep>/12468.c
#include<stdio.h>
int main()
{
int x,y,i,j,k,l;
while(scanf("%d%d",&x,&y)==2)
{
if(x==-1&&y==-1)
break;
{if(x>y)
{k=x-y;
j=(100-x)+y;}
else
{k=y-x;
j=(100-y)+x;}}
if(k>j)
printf("%d\n",j);
else
printf("%d\n",k);
}
return 0;
}
<file_sep>/11879 - Multiple of 17.c
#include<stdio.h>
#include<string.h>
int main()
{
char num[1000000];
int i,c,ln,l=0;
while(gets(num)!=NULL && (num[0]!='0' || strlen(num)>1))
{
ln=strlen(num);
c=0;
for(i=0; i<ln; i++)
{
c = (num[i]-'0' + c * 10)%17;
}
if(c==0)
printf("1\n");
else
printf("0\n");
}
return 0;
}
| c67a513df0a686e2df00253b843bbf529c22e0d5 | [
"Java",
"C",
"C++",
"Markdown"
] | 112 | C | sajibtariq/uva_solve_code | e2ff0a55967bac0add768842c7446df2f4f89812 | 02e436d1162d646c10dafeb10116864dea5f69c3 |
refs/heads/master | <file_sep>package com.accolite.kafka.producers;
import java.util.Properties;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
public class MessageProducer {
static Properties clusterProps = new Properties();
static KafkaProducer<String, String> producer = null;
static {
clusterProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092,localhost:9093,localhost:9094");
clusterProps.put("serializer.class","kafka.serializer.StringEncoder");
clusterProps.put("partitioner.class", "com.accolite.kafka.partitoners.SimplePartitioner");
clusterProps.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
clusterProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
producer = new KafkaProducer<String, String>(clusterProps);
}
public void send(ProducerRecord<String, String> record) {
producer.send(record);
}
}
<file_sep>package com.accolite.kafka.app;
import java.util.Date;
import java.util.Random;
import org.apache.kafka.clients.producer.ProducerRecord;
import com.accolite.kafka.producers.MessageProducer;
public class AppLauncher {
//TOPIC NAME
private static final String USER_ACTIVITY = "user_activity";
/**
* @param args
*/
public static void main(String[] args) {
MessageProducer producer = new MessageProducer();
while(true) {
Random rnd = new Random();
long runtime = new Date().getTime();
String ip = "192.168.2." + rnd.nextInt(255);
String msg = runtime + ",www.accolite.com," + ip;
ProducerRecord<String,String> record = new ProducerRecord<String, String>(USER_ACTIVITY, 4, ip, msg);
producer.send(record);
}
}
}
| e919d3b894df45d20a9a8da76765c58532ce3ff3 | [
"Java"
] | 2 | Java | jsaurav/kafka-client | 1d9bd98cc02bc1a330bb198518c8a65a10d33cdc | 48333496a59dd55e3808ccd5517d31b76866bb2e |
refs/heads/master | <repo_name>giovgiac/sonmap-matching<file_sep>/main.py
# main.py
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import app
from absl import logging
from datasets.matching_dataset import MatchingDataset
from loggers.logger import Logger
from models.dizygotic_net import DizygoticNet
from trainers.matching_trainer import MatchingTrainer
from utils.config import process_config
import tensorflow as tf
def main(argv) -> None:
del argv
# Process the configuration from flags.
config = process_config()
if config.mode != "evaluate":
# Define the datasets.
train_dataset = MatchingDataset(batch_size=config.batch_size,
folder="datasets/aracati/train",
x_shape=config.input_shape,
y_shape=config.output_shape)
valid_dataset = MatchingDataset(batch_size=config.batch_size,
folder="datasets/aracati/validation",
x_shape=config.input_shape,
y_shape=config.output_shape)
# Define the model.
loss = tf.keras.losses.BinaryCrossentropy()
optimizer = tf.keras.optimizers.Adam(learning_rate=config.learning_rate)
model = DizygoticNet(filters=config.filters, loss=loss, optimizer=optimizer)
if config.mode == "restore":
model.load_checkpoint()
# Define the logger.
logger = Logger()
# Define the trainer.
trainer = MatchingTrainer(model=model, logger=logger, train_dataset=train_dataset, valid_dataset=valid_dataset)
trainer.train()
else:
logging.fatal("Evaluation mode is not yet implemented.")
exit(1)
if __name__ == '__main__':
logging.set_verbosity(logging.INFO)
app.run(main)
<file_sep>/models/dizygotic_net.py
# dizygotic_net.py
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from base.base_model import BaseModel
from layers.encode import Encode
import tensorflow as tf
class DizygoticNet(BaseModel):
def __init__(self, filters: int, loss: tf.keras.losses.Loss, optimizer: tf.keras.optimizers.Optimizer):
# Invoke parent class constructor.
super(DizygoticNet, self).__init__(loss, optimizer, name="DizygoticNet")
# Store network architecture hyperparameters.
self.filters = filters
# Define sublayers of the Dizygotic Network.
# Sonar encoding layers.
self.son_e1 = Encode(filters=filters, kernel_size=3, activation_fn=tf.keras.layers.ReLU, with_pool=True)
self.son_e2 = Encode(filters=filters, kernel_size=3, activation_fn=tf.keras.layers.ReLU, with_pool=True)
self.son_e3 = Encode(filters=filters, kernel_size=3, activation_fn=tf.keras.layers.ReLU, with_pool=True)
self.son_e4 = Encode(filters=filters, kernel_size=3, activation_fn=tf.keras.layers.ReLU, with_pool=True)
self.son_e5 = Encode(filters=filters, kernel_size=3, activation_fn=tf.keras.layers.ReLU, with_pool=True)
self.son_f = tf.keras.layers.Flatten()
# Satellite encoding layers.
self.sat_e1 = Encode(filters=filters, kernel_size=3, activation_fn=tf.keras.layers.ReLU, with_pool=True)
self.sat_e2 = Encode(filters=filters, kernel_size=3, activation_fn=tf.keras.layers.ReLU, with_pool=True)
self.sat_e3 = Encode(filters=filters, kernel_size=3, activation_fn=tf.keras.layers.ReLU, with_pool=True)
self.sat_e4 = Encode(filters=filters, kernel_size=3, activation_fn=tf.keras.layers.ReLU, with_pool=True)
self.sat_e5 = Encode(filters=filters, kernel_size=3, activation_fn=tf.keras.layers.ReLU, with_pool=True)
self.sat_f = tf.keras.layers.Flatten()
self.son_sat_cat = tf.keras.layers.Concatenate()
# Multilayer perceptron to match encodings.
self.dense_1 = tf.keras.layers.Dense(units=2048, activation='relu')
self.dense_2 = tf.keras.layers.Dense(units=512, activation='relu')
self.dense_3 = tf.keras.layers.Dense(units=64, activation='relu')
self.dense_4 = tf.keras.layers.Dense(units=1, activation='sigmoid')
# Generate random fake data.
x = tf.random.uniform(shape=(self.config.batch_size,) + self.config.input_shape, minval=0.0, maxval=1.0,
dtype=tf.float32)
# Build model and print summary.
self([x, x], training=False)
self.summary()
def call(self, inputs, training=None, mask=None) -> tf.Tensor:
[x, y] = inputs
# Sonar
son = self.son_e1(x, training=training)
son = self.son_e2(son, training=training)
son = self.son_e3(son, training=training)
son = self.son_e4(son, training=training)
son = self.son_e5(son, training=training)
son = self.son_f(son)
# Satellite
sat = self.sat_e1(y, training=training)
sat = self.sat_e2(sat, training=training)
sat = self.sat_e3(sat, training=training)
sat = self.sat_e4(sat, training=training)
sat = self.sat_e5(sat, training=training)
sat = self.sat_f(sat)
# Concatenate
z = self.son_sat_cat([son, sat])
# MLP
z = self.dense_1(z)
z = self.dense_2(z)
z = self.dense_3(z)
return self.dense_4(z)
<file_sep>/README.md
# sonmap-matching
<file_sep>/base/base_model.py
# base_model.py
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import flags
from absl import logging
import abc
import os
import tensorflow as tf
class BaseModel(abc.ABC, tf.keras.Model):
def __init__(self, loss: tf.keras.losses.Loss, optimizer: tf.keras.optimizers.Optimizer, name: str) -> None:
super(BaseModel, self).__init__(name=name)
# Initialize configuration and state variables.
self.config = flags.FLAGS
self.epoch = tf.Variable(initial_value=0, shape=(), dtype=tf.int32, trainable=False, name="epoch")
self.global_step = tf.Variable(initial_value=0, shape=(), dtype=tf.int64, trainable=False, name="global_step")
self.loss = loss
self.optimizer = optimizer
self.saver = tf.train.Checkpoint(optimizer=self.optimizer, model=self)
@abc.abstractmethod
def call(self, inputs, training=None, mask=None) -> tf.Tensor:
raise NotImplementedError
def load_checkpoint(self) -> None:
latest_checkpoint = tf.train.latest_checkpoint(checkpoint_dir=self.config.checkpoint_dir)
print(self.variables)
if latest_checkpoint:
logging.info("Restoring Model Checkpoint {}...".format(latest_checkpoint))
self.saver.restore(save_path=latest_checkpoint)
logging.info("[Epoch {}] Model Restored...".format(int(self.epoch)))
def save_checkpoint(self) -> None:
logging.info("[Epoch {}] Saving Model...".format(int(self.epoch)))
self.saver.save(file_prefix=os.path.join(self.config.checkpoint_dir, "ckpt"))
logging.info("[Epoch {}] Model Saved...".format(int(self.epoch)))
<file_sep>/utils/config.py
# config.py
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import flags
from absl import logging
from typing import Tuple
import namegenerator
import os
import time
# Core entries
flags.DEFINE_enum("mode", "train", ["evaluate", "restore", "train"], "The modes that are available.")
flags.DEFINE_string("name", "auto", "Name of the folder to store the files of the running experiment.")
# Network entries
flags.DEFINE_float("learning_rate", 2e-4, "Initial learning rate for the chosen optimizer")
flags.DEFINE_integer("batch_size", 16, "The size of the batch to use while training the network.", lower_bound=1)
flags.DEFINE_integer("filters", 128, "A parameter that scales the depth of the neural network.", lower_bound=1)
flags.DEFINE_integer("num_epochs", 64, "Number of epochs to train the network for.", lower_bound=1)
# Data entries
flags.DEFINE_list("input_shape", [128, 256, 1], "The shape of the data to input in the neural network.")
flags.DEFINE_list("output_shape", [128, 256, 1], "The shape of the data that will be output from the neural network.")
# Non-configurable entries
flags.DEFINE_string("checkpoint_dir", "", "Location to save the training checkpoints. (Do not edit).")
flags.DEFINE_string("evaluate_dir", "", "(Do not edit).")
flags.DEFINE_string("execution_dir", "", "(Do not edit).")
flags.DEFINE_string("presentation_dir", "", "(Do not edit).")
flags.DEFINE_string("summary_dir", "", "(Do not edit).")
def create_directories(config: flags.FlagValues, directories: Tuple[str, ...]) -> None:
for d in directories:
if not os.path.exists(d):
os.makedirs(d)
else:
if config.mode == "train":
logging.fatal("Cannot train with chosen name because directory already exists.")
exit(1)
def process_config() -> flags.FlagValues:
config = flags.FLAGS
# Process running experiment name.
if (config.mode == "evaluate" or config.mode == "restore") and config.name == "auto":
logging.fatal("Cannot automatically generate a name for the chosen mode.")
elif config.mode == "train" and config.name == "auto":
config.name = os.path.join(time.strftime("%Y-%m-%d"), namegenerator.gen())
logging.info("Experiment Name: {}".format(config.name))
# Convert shapes to tuples.
config.input_shape = tuple(config.input_shape)
config.output_shape = tuple(config.output_shape)
# Set directories to their appropriate paths.
config.execution_dir = os.path.join(os.curdir, "executions", config.name)
config.checkpoint_dir = os.path.join(config.execution_dir, "checkpoint")
config.evaluate_dir = os.path.join(config.execution_dir, "result")
config.log_dir = os.path.join(config.execution_dir, "log")
config.presentation_dir = os.path.join(config.execution_dir, "presentation")
config.summary_dir = os.path.join(config.execution_dir, "summary")
create_directories(config, directories=(config.checkpoint_dir, config.evaluate_dir, config.log_dir,
config.presentation_dir, config.summary_dir))
# Log out the command for using TensorBoard.
print('tensorboard --logdir="{}" --host 127.0.0.1 --port 8080'.format(os.path.abspath(config.summary_dir)))
return config
<file_sep>/base/base_trainer.py
# base_trainer.py
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import flags
from absl import logging
from base.base_dataset import BaseDataset
from base.base_logger import BaseLogger
from base.base_model import BaseModel
from typing import Optional
import abc
class BaseTrainer(abc.ABC):
def __init__(self, model: BaseModel, logger: BaseLogger, train_dataset: BaseDataset,
valid_dataset: Optional[BaseDataset]) -> None:
self.config = flags.FLAGS
self.model = model
self.logger = logger
self.train_dataset = train_dataset
self.valid_dataset = valid_dataset
# Build and print model summary.
# TODO: Verify the reason why model summary is failing with weird internal TensorFlow errors.
# self.model.summary(print_fn=logging.info)
def train(self) -> None:
for _ in range(int(self.model.epoch), self.config.num_epochs + 1):
self.train_epoch()
if self.valid_dataset:
self.validate_epoch()
self.model.save_checkpoint()
self.model.epoch.assign_add(delta=1)
@abc.abstractmethod
def train_epoch(self) -> None:
pass
@abc.abstractmethod
def validate_epoch(self) -> None:
raise NotImplementedError
<file_sep>/datasets/aracati/split.py
# split.py
import glob
import pandas as pd
import numpy as np
import random as rand
PERCENT_TRAINING = 0.7
PERCENT_VALIDATION = 0.2
PERCENT_TESTING = 0.1
data = pd.read_csv("./data.csv")
son = sorted(glob.glob("./images/input/*.png"))
sat = sorted(glob.glob("./images/gt/*.png"))
indices = list(np.unique(data.iloc[:, 0]))
indices_size = len(indices)
assert(len(son) == len(sat) == len(indices))
# Acquire training data.
train_csv = pd.DataFrame()
for i in range(round(PERCENT_TRAINING * indices_size)):
index = rand.choice(indices)
train_csv = train_csv.append(data.iloc[:, :][data.iloc[:, 0] == index], ignore_index=True)
indices.remove(index)
train_csv = train_csv.sort_values(by=['#Sonar ID'])
train_csv.to_csv(path_or_buf="./train/data.csv", index=False, header=True, float_format="%.6f")
# Acquire validation data.
valid_csv = pd.DataFrame()
for i in range(round(PERCENT_VALIDATION * indices_size)):
index = rand.choice(indices)
valid_csv = valid_csv.append(data.iloc[:, :][data.iloc[:, 0] == index], ignore_index=True)
indices.remove(index)
valid_csv = valid_csv.sort_values(by=['#Sonar ID'])
valid_csv.to_csv(path_or_buf="./validation/data.csv", index=False, header=True, float_format="%.6f")
# Acquire testing data.
test_csv = pd.DataFrame()
for i in range(round(PERCENT_TESTING * indices_size)):
index = rand.choice(indices)
test_csv = test_csv.append(data.iloc[:, :][data.iloc[:, 0] == index], ignore_index=True)
indices.remove(index)
test_csv = test_csv.sort_values(by=['#Sonar ID'])
test_csv.to_csv(path_or_buf="./test/data.csv", index=False, header=True, float_format="%.6f")
<file_sep>/layers/decode.py
# decode.py
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from typing import Tuple, Union
import tensorflow as tf
class Decode(tf.keras.layers.Layer):
def __init__(self, filters: int, kernel_size: Union[int, Tuple[int, int]],
activation_fn: type(tf.keras.layers.Layer), with_dropout=True) -> None:
super(Decode, self).__init__(trainable=True, name=None)
# Save parameters to class.
self.activation_fn = activation_fn
self.filters = filters
self.kernel_size = kernel_size
self.with_dropout = with_dropout
# Create decode sublayers.
self.upsample = tf.keras.layers.UpSampling2D(size=2, interpolation='nearest')
self.upconv = tf.keras.layers.Conv2D(filters=filters, kernel_size=4, padding='same')
self.conv_1 = tf.keras.layers.Conv2D(filters=filters, kernel_size=kernel_size, padding='same')
self.activation_1 = activation_fn()
self.conv_2 = tf.keras.layers.Conv2D(filters=filters, kernel_size=kernel_size, padding='same')
self.activation_2 = activation_fn()
self.concatenate = tf.keras.layers.Concatenate()
if with_dropout:
self.dropout = tf.keras.layers.Dropout(rate=0.2)
def call(self, inputs, training=False) -> tf.Tensor:
[x, s] = inputs
x = self.upsample(x)
x = self.upconv(x)
x = self.concatenate([x, s])
x = self.conv_1(x)
x = self.activation_1(x)
x = self.conv_2(x)
x = self.activation_2(x)
if self.with_dropout:
x = self.dropout(x, training=training)
return x
<file_sep>/trainers/trainer.py
# trainer.py
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from base.base_dataset import BaseDataset
from base.base_logger import BaseLogger
from base.base_model import BaseModel
from base.base_trainer import BaseTrainer
from tqdm import tqdm
from typing import Optional
from typing import Tuple
import numpy as np
import random as rand
import tensorflow as tf
class Trainer(BaseTrainer):
def __init__(self, model: BaseModel, logger: BaseLogger, train_dataset: BaseDataset,
valid_dataset: Optional[BaseDataset]) -> None:
super(Trainer, self).__init__(model, logger, train_dataset, valid_dataset)
def train_epoch(self) -> None:
loop = tqdm(range(len(self.train_dataset)))
loop.set_description("Training Epoch [{}/{}]".format(int(self.model.epoch),
self.config.num_epochs))
errs = []
for data, _ in zip(self.train_dataset.data, loop):
err, grad = self.train_step(data)
self.model.optimizer.apply_gradients(zip(grad, self.model.trainable_variables))
# Append step data to epoch data list.
errs.append(err)
# Increment global step counter.
self.model.global_step.assign_add(delta=1)
self.logger.summarize(self.model.global_step, summarizer="train", summaries_dict={
"total_loss": np.mean(errs)
})
def train_step(self, data: Tuple[tf.Tensor, ...]) -> Tuple[tf.Tensor, ...]:
x, y = data
with tf.GradientTape() as tape:
# Evaluate results on training data.
prediction = self.model(x, training=True)
loss = self.model.loss(y, prediction)
grad = tape.gradient(loss, self.model.trainable_variables)
return loss, grad
def validate_epoch(self) -> None:
loop = tqdm(range(len(self.valid_dataset)))
loop.set_description("Validating Epoch {}".format(int(self.model.epoch)))
errs = []
predictions = []
targets = []
for data, _ in zip(self.valid_dataset.data, loop):
err, prediction, target = self.validate_step(data)
# Append step data to epoch data list.
errs.append(err)
predictions.append(prediction)
targets.append(target)
batch = rand.choice(range(len(predictions)))
self.logger.summarize(self.model.global_step, summarizer="validation", summaries_dict={
"prediction": predictions[batch],
"target": targets[batch],
"total_loss": np.mean(errs)
})
def validate_step(self, data: Tuple[tf.Tensor, ...]) -> Tuple[tf.Tensor, ...]:
x, y = data
# Evaluate results on validation data.
prediction = self.model(x, training=False)
loss = self.model.loss(y, prediction)
return loss, prediction, y
<file_sep>/trainers/matching_trainer.py
# trainer.py
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from base.base_dataset import BaseDataset
from base.base_logger import BaseLogger
from base.base_model import BaseModel
from base.base_trainer import BaseTrainer
from tqdm import tqdm
from typing import Optional
from typing import Tuple
import numpy as np
import os
import random as rand
import tensorflow as tf
class MatchingTrainer(BaseTrainer):
def __init__(self, model: BaseModel, logger: BaseLogger, train_dataset: BaseDataset,
valid_dataset: Optional[BaseDataset]) -> None:
super(MatchingTrainer, self).__init__(model, logger, train_dataset, valid_dataset)
def train_epoch(self) -> None:
loop = tqdm(range(len(self.train_dataset)))
loop.set_description("Training Epoch [{}/{}]".format(int(self.model.epoch),
self.config.num_epochs))
accs = []
errs = []
for data, _ in zip(self.train_dataset.data, loop):
err, grad, acc = self.train_step(data)
self.model.optimizer.apply_gradients(zip(grad, self.model.trainable_variables))
# Append step data to epoch data list.
accs.append(acc)
errs.append(err)
# Increment global step counter.
self.model.global_step.assign_add(delta=1)
self.logger.summarize(self.model.global_step, summarizer="train", summaries_dict={
"accuracy": np.mean(accs),
"total_loss": np.mean(errs)
})
@tf.function
def train_step(self, data: Tuple[tf.Tensor, ...]) -> Tuple[tf.Tensor, ...]:
x, y, z = data
z = tf.expand_dims(z, axis=-1)
with tf.GradientTape() as tape:
# Evaluate results on training data.
prediction = self.model([x, y], training=True)
loss = self.model.loss(z, prediction)
acc = tf.reduce_mean(
tf.cast(tf.equal(tf.greater_equal(z, 0.5), tf.greater_equal(prediction, 0.5)), dtype=tf.float32))
grad = tape.gradient(loss, self.model.trainable_variables)
return loss, grad, acc
def validate_epoch(self) -> None:
loop = tqdm(range(len(self.valid_dataset)))
loop.set_description("Validating Epoch {}".format(int(self.model.epoch)))
accs = []
preds = []
errs = []
targets = []
sats = []
sons = []
for data, _ in zip(self.valid_dataset.data, loop):
err, pred, acc, target, sat, son = self.validate_step(data)
# Append step data to epoch data list.
accs.append(acc)
errs.append(err)
preds.append(pred)
targets.append(target)
sats.append(sat)
sons.append(son)
# Save an example batch.
batch = rand.choice(range(len(preds)))
path = os.path.join(self.config.evaluate_dir, str(int(self.model.epoch)))
os.makedirs(path)
for i in range(len(preds[batch])):
# Save metrics.
file = open(os.path.join(path, "%d.txt" % i), 'w')
file.write("Prediction: %.5f\n" % float(preds[batch][i]))
file.write("Target: %.5f\n" % float(targets[batch][i]))
file.close()
# Save sonar image.
son = tf.image.encode_png(tf.cast(sons[batch][i] * 255.0, dtype=tf.uint8))
tf.io.write_file(os.path.join(path, "son_%d.png" % i), son)
# Save satellite image.
sat = tf.image.encode_png(tf.cast(sats[batch][i] * 255.0, dtype=tf.uint8))
tf.io.write_file(os.path.join(path, "sat_%d.png" % i), sat)
self.logger.summarize(self.model.global_step, summarizer="validation", summaries_dict={
"accuracy": np.mean(accs),
"total_loss": np.mean(errs)
})
@tf.function
def validate_step(self, data: Tuple[tf.Tensor, ...]) -> Tuple[tf.Tensor, ...]:
x, y, z = data
z = tf.expand_dims(z, axis=-1)
# Evaluate results on validation data.
prediction = self.model([x, y], training=False)
loss = self.model.loss(z, prediction)
acc = tf.reduce_mean(
tf.cast(tf.equal(tf.greater_equal(z, 0.5), tf.greater_equal(prediction, 0.5)), dtype=tf.float32))
return loss, prediction, acc, z, y, x
<file_sep>/layers/encode.py
# encode.py
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from typing import Tuple, Union
import tensorflow as tf
class Encode(tf.keras.layers.Layer):
def __init__(self, filters: int, kernel_size: Union[int, Tuple[int, int]],
activation_fn: type(tf.keras.layers.Layer), with_pool=True) -> None:
super(Encode, self).__init__(trainable=True, name=None)
# Save parameters to class.
self.activation_fn = activation_fn
self.filters = filters
self.kernel_size = kernel_size
self.with_pool = with_pool
# Create encode sublayers.
self.conv_1 = tf.keras.layers.Conv2D(filters=filters, kernel_size=kernel_size, padding='same')
self.activation_1 = activation_fn()
self.conv_2 = tf.keras.layers.Conv2D(filters=filters, kernel_size=kernel_size, padding='same')
self.activation_2 = activation_fn()
if with_pool:
self.max_pool = tf.keras.layers.MaxPool2D(pool_size=2, padding='same')
def call(self, inputs, training=False) -> tf.Tensor:
x = self.conv_1(inputs)
x = self.activation_1(x)
x = self.conv_2(x)
x = self.activation_2(x)
if self.with_pool:
x = self.max_pool(x)
return x
<file_sep>/models/unet.py
# unet.py
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from base.base_model import BaseModel
from layers.decode import Decode
from layers.encode import Encode
import tensorflow as tf
class UNet(BaseModel):
def __init__(self, filters: int, loss: tf.keras.losses.Loss, optimizer: tf.keras.optimizers.Optimizer):
# Invoke parent class constructor.
super(UNet, self).__init__(loss, optimizer, name="U-Net")
# Store network architecture hyperparameters.
self.filters = filters
# Define sublayers of the U-Net.
self.encode_1 = Encode(filters=filters, kernel_size=3, activation_fn=tf.keras.layers.ReLU, with_pool=False)
self.encode_2 = Encode(filters=filters * 2, kernel_size=3, activation_fn=tf.keras.layers.ReLU, with_pool=True)
self.encode_3 = Encode(filters=filters * 4, kernel_size=3, activation_fn=tf.keras.layers.ReLU, with_pool=True)
self.encode_4 = Encode(filters=filters * 8, kernel_size=3, activation_fn=tf.keras.layers.ReLU, with_pool=True)
self.encode_5 = Encode(filters=filters * 16, kernel_size=3, activation_fn=tf.keras.layers.ReLU, with_pool=True)
self.decode_1 = Decode(filters=filters * 8, kernel_size=3, activation_fn=tf.keras.layers.ReLU, with_dropout=True)
self.decode_2 = Decode(filters=filters * 4, kernel_size=3, activation_fn=tf.keras.layers.ReLU, with_dropout=True)
self.decode_3 = Decode(filters=filters * 2, kernel_size=3, activation_fn=tf.keras.layers.ReLU, with_dropout=False)
self.decode_4 = Decode(filters=filters, kernel_size=3, activation_fn=tf.keras.layers.ReLU, with_dropout=False)
self.final = tf.keras.layers.Conv2D(filters=self.config.output_shape[2], kernel_size=1,
padding='same', activation='softmax')
def call(self, inputs, training=None, mask=None) -> tf.Tensor:
e1 = self.encode_1(inputs)
e2 = self.encode_2(e1, training=training)
e3 = self.encode_3(e2, training=training)
e4 = self.encode_4(e3, training=training)
x = self.encode_5(e4, training=training)
x = self.decode_1([x, e4], training=training)
x = self.decode_2([x, e3], training=training)
x = self.decode_3([x, e2], training=training)
x = self.decode_4([x, e1], training=training)
return self.final(x)
| c201d4b1c9e01693d8df71ec9c8e353e95bc99e5 | [
"Markdown",
"Python"
] | 12 | Python | giovgiac/sonmap-matching | c974c157c8a9817789b8fe343a9115a4dbf88ab5 | 249964203dc9cdd48f335ac01ef7fe34df7fcbed |
refs/heads/master | <file_sep>/*
* Assignment 1: Callback Function
Complete the function below to show a delayed result in the console.
*/
function delayedResult(n1, n2, delayTime, callback) {
// your code here
const result = n1 + n2;
setTimeout(() => callback(result), delayTime);
}
delayedResult(4, 5, 3000, function(result) {
console.log(result);
}); // 9 (4+5) will be shown in the console after 3 seconds
delayedResult(-5, 10, 2000, function(result){
window.alert(result);
}); // 5 (-5+10) will be shown in an alert dialog after 2 seconds<file_sep>function twoSum(nums, target) {
let len = nums.length;
for (let i = 0; i < len-1 ; i++) {
for (let j = len-1; j > i; j--) {
if (nums[i] + nums[j] === target) {
return `[${i}, ${j}]`;
}
}
}
return `target error!`;
}
console.log( twoSum([2, 7, 11, 15], 9) );
console.log( twoSum([2, 7, 11, 15], 13) );
console.log( twoSum([2, 7, 11, 15], 26) );
console.log( twoSum([2, 7, 11, 15], 30) );<file_sep>/*
* Route Handlers
*/
/* Import Express and set up `express.Router` */
const express = require('express');
const router = express.Router();
/* Home route */
router.get('/', (req, res) => {
// Log statement to indicate that this function is running
console.log('Handleing request to root or "home" route, "/"');
// Send gretting to the page
res.send('<h1>Hello, My Server!</h1>');
});
/*
* Assignment 2: Build Backend API for Front-End
Even if you are heading to be a front-end engineer, it’s much better to have some basic
experience in the backend field. Now, try to modify your code executed on the server side to
build a simple API. Your server should fulfill following client requests:
1. When a user enters http://localhost:3000/getData in a browser’s address bar,
show the "Lack of Parameter" message in the page.
2. When a user enters http://localhost:3000/getData?number=xyz in a browser’s
address bar, show the "Wrong Parameter" message in the page.
3. When a user enters http://localhost:3000/getData?number=5, they should get the
result of 1+2+....+5 in the page.
4. Generally speaking, when a user enters http://localhost:3000/getData?number=正整數,
they can get result of 1+2+....+正整數 in the page.
Hint: handle HTTP GET method and parameters with Express on the server side.
*/
router.get('/getData', (req, res) => {
const num = req.query.number;
if (!num) {
res.send('Lack of Parameter');
} else if (Number.isInteger(+num) && +num > 0) {
let result;
result = ((1 + +num) * +num) /2;
res.send(`<strong>${result}</strong>`);
} else {
res.send('Wrong Parameter');
}
});
/*
* Assignment 4: HTTP Cookie (Advanced Optional)
Cookie is an important mechanism for storing small pieces of data in the browser.
Modify your code executed in the backend to use cookies for user tracking.
1. Serve a URL http://localhost:3000/myName by your server.
2. When the user connects to http://localhost:3000/myName, check cookies for the
user's name in the backend.
a. If you can get the user's name from cookies, show it on the web page. Done.
b. If you cannot get the user's name from cookies, show a HTML form including
a text input and a button in the web page. Go to step 3.
3. User can enter his name in the text input, and then click button to submit form to a
URL http://localhost:3000/trackName?name=使用者的輸入 which should be served
from your server, too.
4. When user submits form to http://localhost:3000/trackName?name=使用者的輸入,
you should get user’s name from HTTP parameter and store it in the cookies.
5. Redirect user to http://localhost:3000/myName, user can see his name, finally. Done.
*/
router.get('/myName', (req, res) => {
const name = req.cookies.name;
res.render('myName', { name: name });
});
router.get('/trackName', (req, res) => {
const name = req.query.name;
if (name) {
res.cookie('name', name);
}
res.redirect('/myName');
});
/* Custom error route */
router.get('/error', (req, res) => {
// Log statement to indicate that this function is running
console.log('Handling request to custom "error" route, "/error"');
// Create custom error and print error message to the page
const err = new Error('err');
err.message = 'Oops, it looks like an error occurred.';
throw err;
});
/* Export router */
module.exports = router;<file_sep>function calculate(args) {
let result;
if (args.op === "+") {
result = args.n1 + args.n2;
} else if (args.op === "-") {
result = args.n1 - args.n2;
} else {
result = "Not supported";
}
return result;
}
// Solution 1
console.log( calculate(
{
op: '-',
n1: 10,
n2: 5
}
) );
// Solution 2
class args {
constructor(op, n1, n2) {
this.op = op;
this.n1 = n1;
this.n2 = n2;
}
}
console.log( calculate(new args('-', 10, 5)) );<file_sep>function ajax(src, callback) {
let xhr = new XMLHttpRequest();
xhr.onreadystatechange = () => {
if (xhr.readyState === 4 && xhr.status === 200) {
const products = JSON.parse(xhr.responseText);
callback(products);
};
}
xhr.open('GET', src);
xhr.send();
}
function render(data) {
for (let i = 0; i < data.length; i++) {
// Create "div" for products
let color = ["lightsalmon", "gray", "lightskyblue"];
let div = document.createElement('div');
div.setAttribute('id',`product${i}`);
document.getElementsByClassName('products')[0].appendChild(div);
document.getElementById(`product${i}`).style.background = `${color[i]}`;
// Create "content" for each product
for (let j = 0; j < 3; j++) {
let tag = ["h2", "h3", "p"];
let key = ["name", "price", "description"];
let node = document.createElement(tag[j]);
let content = document.createTextNode(data[i][key[j]]);
node.appendChild(content);
document.getElementById(`product${i}`).appendChild(node);
};
};
}
ajax("https://cwpeng.github.io/live-records-samples/data/products.json", function(response){
render(response);
}); | bcebde8afedcd5757b5346e538ff65495a2c1ab0 | [
"JavaScript"
] | 5 | JavaScript | angela30916/remote-assignments | ea2815612b61889a712ece951cebcd80dffa55d8 | 1b7f697c6b8ce0d450f95733b79dba1387392f16 |
refs/heads/master | <file_sep># Sys.setenv(RENV_PATHS_CACHE = "~/.rpkgcache")
setwd("~")
library(tidyverse)
library(magrittr)
devtools::install_local("./webperf/", upgrade="never")
library(webperf)
# Change the following directory to switch to your results directory
setwd("results/")
# Change the vector of `www` and `dev` below to have all the prefix names from
# your lighthouse run.
specs <- webperf::read_lighthouse_json(c("www", "dev"), 3)
# Analyze key performance metrics
specs %>% webperf::analyze_change(largestContentfulPaint)
specs %>% webperf::analyze_change(cumulativeLayoutShift)
specs %>% webperf::analyze_change(totalBlockingTime)
# Show most interesting statistics
specs %>%
pivot_longer(firstContentfulPaint:observedSpeedIndexTs) %>%
ggplot(aes(env, value)) +
facet_wrap(vars(name), scales="free") +
geom_boxplot() +
expand_limits(y=0)
# Demo of grabbing network performance information out of the results
img_transfers <- specs %>%
unnest(network) %>%
unnest_wider(value) %>%
mutate(diff=endTime-startTime)
# Frame visualization
frames <- specs %>%
unnest(frames) %>%
unnest_wider(value)
<file_sep>import { LighthouseReport, getUrl } from "./Lighthouse";
import { DateTime } from "luxon";
import { sortBy } from "lodash";
export interface Differential {
current: LighthouseReport;
previous: LighthouseReport;
section: string;
page: string;
current_url: string;
previous_url: string | null;
current_html_url: string;
}
export function getParam(name) {
// See demo in https://developer.mozilla.org/en-US/docs/Web/API/URL/searchParams
const url = new URL(window.location.href as string);
return url.searchParams.get(name);
}
export async function getData(
previous_param: null | string,
current_param: null | string
): Promise<Differential[]> {
const index = await dataFetch("index.json");
const parsed: Record[] = sortBy(index.map(parseRecordDate), (r) =>
r.date.toMillis()
);
if (parsed.length === 0) {
throw new Error("No records");
}
const current_record = current_param
? selectRecord(current_param, parsed)
: parsed[parsed.length - 1];
if (!current_record) {
throw new InvalidSelectorError("Current record selector invalid");
}
const previous_record = choosePreviousRecord(
previous_param,
current_record,
parsed
);
if (!previous_record) {
throw new InvalidSelectorError("Previous record selector invalid");
}
const current = await getFromRecord(current_record);
const previous = await getFromRecord(previous_record);
const pairs = indexesToDifferentials(previous, current);
return await Promise.all(pairs);
}
interface Record {
date: DateTime;
path: string;
}
interface IndexReport {
data: any;
date: DateTime;
path: string;
}
async function dataFetch(path) {
const url = getUrl(path);
const response = await fetch(url);
if (!response.ok) {
throw new Error(`Fetch failed: ${response.status}`);
}
return response.json();
}
function parseRecordDate(record): Record {
return {
path: record.path,
date: DateTime.fromISO(record.date),
};
}
function selectRecord(param: string, records: Record[]): Record | null {
const re = new RegExp(param);
const matches = records.filter((r) => re.test(r.path));
if (matches.length > 0) {
return matches[matches.length - 1];
}
return null;
}
function choosePreviousRecord(
param: string | null,
current: Record,
records: Record[]
): Record | null {
if (param) {
return selectRecord(param, records);
}
const old_records = records.filter(
(r) => r.date < current.date.minus({ days: 7 })
);
// If we don't have an old enough record, use the oldest thing we do have.
return old_records.length > 0
? old_records[old_records.length - 1]
: records[0];
}
async function getFromRecord(record: Record): Promise<IndexReport> {
const path = record.path + "/";
const data = await dataFetch(path + "index.json");
return {
data,
date: record.date,
path: path,
};
}
function indexesToDifferentials(
previous: IndexReport,
current: IndexReport
): Array<Promise<Differential>> {
const current_data: { [key: string]: string[] } = current.data;
return Object.entries(current_data)
.flatMap(([key, values]) =>
values.map((value) => {
console.log(previous, current);
const path = `${key}/${value}.report.json`;
return {
section: key,
page: value,
current: current.path + path,
previous: previous.data[key]?.includes(value)
? previous.path + path
: null,
};
})
)
.map(async ({ current, previous, section, page }) => {
const current_data = await dataFetch(current);
const previous_data = previous ? await dataFetch(previous) : null;
return {
current: current_data,
previous: previous_data,
section,
page,
current_url: current,
current_html_url: current.replace(/\.json$/, ".html"),
previous_url: previous,
};
});
}
export class InvalidSelectorError extends Error {}
<file_sep>#!/usr/bin/env ruby
require 'docopt'
require 'pathname'
require 'fileutils'
require 'timeout'
require 'uri'
require 'cgi'
require 'English'
require 'logger'
require_relative '../lib/utils'
Log = Logger.new($stderr)
ROOT = Pathname(__dir__)
CONFIG_FILE_NAME = 'lighthouse-config.js'
CONFIG_FILE = ROOT / '..' / CONFIG_FILE_NAME
def lighthouse(url, target)
FileUtils.cp CONFIG_FILE, CONFIG_FILE_NAME
raise "Lighthouse failed five times for #{target}; was trying to load #{url}" unless
Lighthouse.run(
Dir.pwd,
target,
url.to_s,
'--output', 'json',
"--config-path=/var/lighthouse/#{CONFIG_FILE_NAME}"
)
end
$uniq_id = 0
Run = Struct.new(:uri, :props, :number, keyword_init: true) do
##
# Generate a distinguishing name for this run
# Used for naming the resulting output files.
def env
# `number` is 0-indexed in the struct, so we need to add 1 to get nice
# human-readable run numbers.
"#{props.join}_#{number + 1}"
end
def url
busted = uri.to_s.gsub(/%3Cbust%3E/) do |_|
$uniq_id += 1
CGI.escape("#{env}#{Time.now.to_f}_id#{$uniq_id}")
end
URI(busted)
end
end
def main
opt = Docopt.docopt <<~DOCOPT
Usage:
run [options] <env=url>...
Options:
--count=<count> Number of runs to do [default: 3]
--start=<start> Run number to begin at [default: 1]
--cache-buster=<alt> Add a unique-valued `cacheBuster` param to every URL. Providing an <alt> value of "both" will generate results for both versions: with and without cache busting.
--varnish-buster=<alt> Add a unique-valued `varnishBuster` param to every URL. Providing an <alt> value of "both" will generate results for both versions: with and without varnish busting.
--dry-run Print list of requests which would be made
DOCOPT
count = opt['--count'].to_i
start = opt['--start'].to_i
pairs = opt['<env=url>'].map { |p| p.split('=', 2) }
cache_buster = opt['--cache-buster']
varnish_buster = opt['--varnish-buster']
runs = generate_runs start, count, pairs
runs = add_cache_buster(runs, cache_buster == 'both') if cache_buster
runs = add_varnish_buster(runs, varnish_buster == 'both') if varnish_buster
if opt['--dry-run']
runs.each do |run|
pp({ url: run.url, env: run.env })
end
exit 0
end
run_scan pairs, runs
rescue Docopt::Exit => e
puts e.message
exit 1
end
def generate_runs(start, count, pairs)
Log.info "Running #{count} times starting at #{start}"
count.times.flat_map do |run|
pairs.shuffle.map do |env, url|
Run.new(uri: URI(url), props: [env], number: (start + run - 1))
end
end
end
def run_scan(pairs, requests)
# Try up to ten times, waiting up to four minutes for a reply from the server
pairs.shuffle.each do |_env, url|
Log.info "Checking if #{url} is ready"
ExecHelpers.run 'curl', '--max-time', (4 * 60).to_s, '--retry', '10', url.to_s, out: '/dev/null'
lighthouse url, 'warmup.json'
end
sleep 5
requests.each do |run|
Log.info "Testing #{run.env} at #{run.url}"
lighthouse run.url, "#{run.env}.json"
sleep 5
end
end
def add_cache_buster(runs, both)
add_buster_to_runs 'cacheBuster', 'Bust', runs, both
end
def add_varnish_buster(runs, both)
add_buster_to_runs 'varnishBuster', 'NoVarnish', runs, both
end
def add_buster_to_runs(name, prop, runs, both)
runs.flat_map do |run|
busted = add_buster_param(name, prop, run)
if both
[run, busted]
else
[busted]
end
end
end
def add_buster_param(name, prop, run)
param = "#{name}=<bust>"
uri = run.uri.clone
if uri.query
uri.query += "&#{param}"
else
uri.query = param
end
Run.new uri: uri, props: run.props + [prop], number: run.number
end
main
<file_sep>datadog==0.45.0
python-dotenv==1.0.0
<file_sep>require 'date'
require 'open-uri'
require 'json'
require 'scriptster'
require_relative 'config'
class ScanOutput
attr_reader :scan_dir
def initialize(output_dir, hostname)
@output_dir = output_dir
scan_time = DateTime.now.strftime('%Y%m%d-%H%M%S')
@scan_name = Dir.chdir(File.dirname(__FILE__)) do
deploy_url = generate_deploy_url(hostname)
Scriptster.log :debug, "Fetching deployment information from '#{deploy_url}'"
current_hash = URI.open(deploy_url) do |fi|
blob = JSON.load(fi)
blob['id']
end
current_branch =
`git name-rev --name-only #{current_hash}`.strip.gsub(%r{/}, '-')
branchname = current_branch == '' ? 'missing' : current_branch
"#{hostname}-#{scan_time}-#{current_hash}-#{branchname}"
end
Scriptster.log :debug, "Using name for scan: #{@scan_name}"
@scan_dir = @output_dir / @scan_name
end
def get_framework_dir(framework)
framework_subdir = @scan_dir / framework
Scriptster.log :debug, "Creating framework dir #{framework_subdir}"
framework_subdir.mkpath
framework_subdir
end
end
<file_sep>#' Read a set of Lighthouse report files as a data frame
#' @param files A character vector of basenames for files.
#' @param count The number of files of each basename.
#' @param bust Whether to also generate the names for cache-busted results
#'
#' @importFrom magrittr %>%
#' @importFrom rlang .data
#' @export
read_lighthouse_json <- function(files, count, bust=FALSE) {
patterns <- tibble::tibble(
files=purrr::cross3(files, seq(count), if(bust) c("", "Bust") else c(""))
)
data <- patterns %>%
hoist_parts(bust) %>%
dplyr::mutate(
filename=purrr::map_chr(files, ~stringr::str_c(.x[[1]], .x[[3]], "_", .x[[2]], ".json")),
treatment=purrr::map_chr(files, ~stringr::str_c(.x[[1]], .x[[3]])),
) %>%
dplyr::mutate(files=purrr::map(.data$filename, ~jsonlite::read_json(.x)))
data %>%
dplyr::mutate(
metrics=purrr::map(files, ~.x$audits$metrics$details$items[[1]] %>% tibble::enframe()),
network=purrr::map(files, ~.x$audits[["network-requests"]]$details$items %>% tibble::enframe()),
frames=purrr::map(files, ~.x$audits[["screenshot-thumbnails"]]$details$items %>% tibble::enframe())
) %>%
tidyr::unnest(.data$metrics) %>%
dplyr::mutate(value=purrr::flatten_dbl(.data$value)) %>%
tidyr::pivot_wider(names_from = .data$name, values_from=.data$value)
}
hoist_parts <- function(patterns, bust) {
if(bust) {
patterns %>%
tidyr::hoist(.data$files, env=1, run=2, bust=3, .remove=FALSE) %>%
mutate(bust=bust == "Bust")
} else {
patterns %>% tidyr::hoist(.data$files, env=1, run=2, .remove=FALSE)
}
}
<file_sep>#' Analyze a tibble of Lighthouse data
#' @param specs A tibble of data, such as the one loaded by `read_lighthouse_json`.
#' @param variable The name of a column from `specs` to analyze
#'
#' @importFrom magrittr %>%
#' @importFrom rlang .data
#' @export
analyze_change <- function(specs, variable) {
variable <- rlang::ensym(variable)
formula <- rlang::new_formula(variable, quote(treatment))
plot <- specs %>%
ggplot2::ggplot(ggplot2::aes(!!variable, .data$treatment)) + ggplot2::geom_boxplot() + ggplot2::expand_limits(y=0)
print(plot)
test <- stats::t.test(formula, data=specs)
sd_lcp <- effectsize::sd_pooled(formula, data=specs)
power <- stats::power.t.test(NULL, diff(test$estimate)[[1]], sd_lcp, .05, .95,
type = "two.sample",
alternative = "two.sided",
)
print(test)
print(power)
}
<file_sep>export interface LighthouseReport {
fetchTime: string;
audits: Audits;
categories: Categories;
}
interface Categories {
performance: Score;
}
interface Audits {}
interface Score {
score: number;
}
export function getUrl(path) {
return path
}
<file_sep>require 'English'
require 'logger'
require 'pathname'
require 'timeout'
module Lighthouse
Log = Logger.new($stderr, progname: 'Lighthouse')
InternalPath = Pathname('/var/lighthouse/')
def self.run(
bind_path,
target,
*args,
chrome_flags: '--headless --no-sandbox'
)
5.times do
# Exit the `lighthouse` function if the command succeeds
return true if run_once(
bind_path,
target,
*args,
"--chrome-flags=#{chrome_flags}"
)
Log.error "Error running command. Exit status was #{$CHILD_STATUS}."
end
false
end
def self.run_once(
bind_path,
target,
*args
)
docker_args = [
'docker', 'run',
'--rm',
'-v', "#{bind_path}:#{InternalPath}:z",
'lighthouse',
'--output-path', (InternalPath / target).to_path,
*args
]
Log.info docker_args.join(' ')
ExecHelpers.limited_system(*docker_args)
end
end
module ExecHelpers
Log = Logger.new($stderr, progname: 'Lighthouse')
def self.run(*args)
raise "Command exited with exit status of #{$CHILD_STATUS}" unless system(*args)
end
# For testing, this is a variable
@lighthouse_timeout = 4 * 60
def self.lighthouse_timeout=(val)
@lighthouse_timeout = val
end
def self.limited_system(*args)
pid = spawn(*args)
begin
Timeout.timeout(@lighthouse_timeout) do
pid, status = Process.wait2(pid)
return status.success?
end
rescue Timeout::Error
Log.warn "Timeout; killing pid #{pid}"
Process.kill(15, pid)
false
end
end
end
<file_sep>import datadog
import os, time
class DataDogApiClient:
def __init__(self):
options = {
"api_key": os.getenv('DD_API_KEY'),
"app_key": os.getenv('DD_APPLICATION_KEY')
}
datadog.initialize(**options)
def submit_metric(self, metric_name, value, tags=[]):
metric = [
{
'metric': metric_name,
'type': 'gauge',
'points': [
(int(time.time()), value)
],
'tags': tags
}
]
datadog.api.Metric.send(metrics=metric)
if __name__ == '__main__':
tags = {'url': 'https://www.ifixit.com', 'test': 'test'}
tags = [f'{k}:{v}' for k, v in tags.items()]
dd_client = DataDogApiClient()
dd_client.submit_metric('lighthouse.cumulative_layout_shift', 0.5, tags)
dd_client = DataDogApiClient()
dd_client.submit_metric('lighthouse.largest_contentful_paint', 0.3, tags)
dd_client = DataDogApiClient()
dd_client.submit_metric('lighthouse.total_blocking_time', 1.231, tags)
dd_client = DataDogApiClient()
dd_client.submit_metric('lighthouse.page_size', 155129, tags)<file_sep>#!/usr/bin/env ruby
# frozen_string_literal: true
require 'json'
require 'pathname'
require 'date'
require 'optparse'
##
# Tools to convert a Lighthouse report into Prometheus metrics format
class LighthousePrometheus
Metric = Struct.new(:url, :timestamp, :name, :value, :help, keyword_init: true)
AUDITS = %w[
first-contentful-paint
speed-index
largest-contentful-paint
interactive
total-blocking-time
cumulative-layout-shift
].freeze
def main(lighthouse_run)
pages = lighthouse_run.glob('*/*.json')
metrics = pages.flat_map { |page| to_metrics page }
lines = metrics.map { |metric| to_prom_entry metric }
print lines.join("\n")
end
private
def to_metrics(page)
data = JSON.parse page.read
url = data['requestedUrl']
timestamp = DateTime.iso8601 data['fetchTime']
page_metrics = [
perf_score_metric(url, timestamp, data),
total_weight_metric(url, timestamp, data)
]
page_metrics + mime_metrics(url, timestamp, data) +
audit_metrics(url, timestamp, data)
end
def perf_score_metric(url, timestamp, data)
Metric.new(
url: url,
timestamp: timestamp,
name: "lighthouse_performance_score",
value: data['categories']['performance']['score'].to_f,
help: 'The overall performance score'
)
end
def total_weight_metric(url, timestamp, data)
Metric.new(
url: url,
timestamp: timestamp,
name: "lighthouse_total_page_weight",
value: data['audits']['total-byte-weight']['numericValue'].to_f,
help: 'The total number of bytes to download for the page'
)
end
def mime_metrics(url, timestamp, data)
requests = data['audits']['network-requests']['details']['items']
mime_metrics = mime_metrics_group url, timestamp, requests, 'total'
dcl_time = data['audits']['metrics']['details']['items'][0]\
['observedDomContentLoaded']
pre_dcl_requests = requests.select { |r| r['endTime'] <= dcl_time }
pre_dcl_mime_metrics = mime_metrics_group(url, timestamp, pre_dcl_requests,
'pre-DCL')
post_dcl_requests = requests.select { |r| r['endTime'] > dcl_time }
post_dcl_mime_metrics = mime_metrics_group(url, timestamp,
post_dcl_requests, 'post-DCL')
mime_metrics + pre_dcl_mime_metrics + post_dcl_mime_metrics
end
def audit_metrics(url, timestamp, data)
AUDITS.map do |audit|
result = data['audits'][audit] or raise "No #{audit} key"
Metric.new(
url: url,
timestamp: timestamp,
name: 'lighthouse_' + audit.gsub('-', '_'),
value: result['score'].to_f,
help: result['title']
)
end
end
def mime_metrics_group(url, timestamp, network_requests, note)
normalized_mime_types = network_requests.map { |r| normalize_mime(r['mimeType']) }.uniq
normalized_mime_types.flat_map do |normalized_mime|
%w[transferSize resourceSize].map do |byte_type|
mime_weight_metric(url, timestamp, network_requests, normalized_mime, byte_type, note)
end
end
end
def normalize_mime(mime_type)
case mime_type
when /javascript/i
'javascript'
when /css/i
'css'
when /image/i
'image'
when /video/i
'video'
when /font/i
'font'
when /html/i
'html'
else
mime_type
end
end
def mime_weight_metric(url, timestamp, network_requests, normalized_mime, byte_type, note)
matches = network_requests.select{|i| normalize_mime(i['mimeType']) == normalized_mime }
total = matches.sum{|i| i[byte_type]}
Metric.new(
url: url,
timestamp: timestamp,
name: "lighthouse_mime_weight_#{normalized_mime}_#{byte_type}_#{note}",
value: total.to_f,
help: "The number of #{byte_type} bytes of #{normalized_mime} downloaded for the page (#{note})"
)
end
def to_prom_entry(metric)
unixtime = metric.timestamp.to_time.to_i
timestamp = 1000 * unixtime
# Convert metric name to be valid
name = metric.name.gsub(/[^a-zA-Z0-9:_]/, "_")
<<~PROM_METRIC
# HELP #{name} #{metric.help}
# TYPE #{name} gauge
#{name}{url="#{metric.url}"} #{metric.value.to_f}
PROM_METRIC
end
end
# Use `OptionParser` for nice -h/--help handling
OptionParser.new do |opts|
opts.banner = 'Usage: summarize.rb <lighthouse_output_path>'
end.parse!
LighthousePrometheus.new.main Pathname.new(ARGV[0])
<file_sep>FROM node:18-bullseye-slim
WORKDIR /opt/lighthouse
RUN apt-get update --fix-missing && apt-get -y upgrade \
&& apt-get install -y chromium \
&& npm install --global lighthouse@10.3.0 \
&& apt-get clean
ENV CHROME_PATH=/usr/bin/chromium
ENV CHROMIUM_FLAGS="--headless --no-sandbox --disable-dev-shm-usage"
ENTRYPOINT ["lighthouse"]<file_sep>target=$(file <target)
.PHONY: deploy
deploy: build
rsync --chmod=D0755,F0644 --perms -rP build/ $(target)
.PHONY: build
build:
yarn build
<file_sep>import { Differential } from "./data_fetch";
export interface RenderInterface {
data: Differential[];
}
export interface ReportSequenceInterface {
data: Differential[];
names: Array<string>;
}
<file_sep>import json, os
from lighthouse import Lighthouse
from datadog_api import DataDogApiClient
from urllib.parse import urlparse
from dotenv import load_dotenv
load_dotenv()
def retrieve_values_for_audits(json_results, audits):
metrics = {}
for audit in audits:
metrics[audit] = get_audits_value(json_results, audit)
return metrics
def get_audits_value(json_results, audit_name):
return json_results.get('audits').get(audit_name).get('numericValue') or 0
def send_metrics_to_datadog(metrics, tags={}):
tags = [f'{k}:{v}' for k, v in tags.items()]
dd_client = DataDogApiClient()
for metric_name, value in metrics.items():
dd_client.submit_metric(f'lighthouse.{metric_name}', value, tags)
def capture_lighthouse_metrics(page_type, url, audits, lighthouse_options=[]):
lighthouse = Lighthouse()
form_factor = 'mobile'
if '--preset=desktop' in lighthouse_options:
form_factor = 'desktop'
print(f'Running lighthouse for {url} with {form_factor} options\n')
json_results = lighthouse.run(url, lighthouse_options)
print(f'Finished running lighthouse for {url}\n')
print(f'Parsing lighthouse results for {url}\n')
metrics = retrieve_values_for_audits(json_results, audits)
print(f'Sending metrics to datadog for {url}')
send_metrics_to_datadog(metrics, tags = {
'url': url,
'page_type': page_type,
'lighthouse_version': json_results.get('lighthouseVersion'),
'form_factor': form_factor,
})
print(f'Finished sending metrics to datadog for {url}\n')
print('=' * 80)
def main():
scripts_path = os.path.abspath(__file__)
project_root = os.path.dirname(os.path.dirname(scripts_path))
urls_path = os.path.join(project_root, 'urls.json')
with open(urls_path) as f:
urls = json.load(f)
metrics_config_path = os.path.join(project_root, 'metrics-config.json')
with open(metrics_config_path) as f:
metrics_config = json.load(f)
audits = metrics_config.get('audits')
for page_type, url_list in urls.items():
print(f'Running Lighthouse for {page_type} pages\n')
for url in url_list:
try:
capture_lighthouse_metrics(page_type, url, audits, ['--preset=desktop'])
capture_lighthouse_metrics(page_type, url, audits, ['--form-factor=mobile'])
except Exception as e:
error_message = f'Failed to run lighthouse for {url}: {e}'
print(error_message)
raise Exception(error_message)
print(f'Finished running lighthouse for all urls of {page_type} pages\n')
print(f'Finished running lighthouse for all urls\n')
if __name__ == '__main__':
main()<file_sep>#!/bin/bash
set -exuo pipefail
docker build -t lighthouse lighthouse-docker
gem install bundler
bundle install
pip install -r requirements.txt<file_sep>#!/usr/bin/env ruby
require 'pathname'
require 'docopt'
require_relative '../lib/lighthouse_runner'
begin
args = Docopt.docopt <<~DOCOPT
Run an automated Lighthouse scan. Emit results into a target directory.
Usage:
#{File.basename __FILE__} [--hostname=<hostname>] <config_file> <output_dir> [-h]
Options:
-h, --help Show this message.
--hostname=<hostname> Replace the hostname specified in the config file [default: www.ifixit.com]
with the provided hostname
DOCOPT
rescue Docopt::Exit => e
puts e.message
exit
end
config_file_path = Pathname.new args['<config_file>']
output_dir = Pathname.new args['<output_dir>']
hostname = args['--hostname']
LighthouseRunner.new(config_file_path, output_dir, hostname).run
<file_sep>import json
import subprocess
class Lighthouse:
IMAGE_NAME = 'lighthouse'
def __init__(self):
# Check if the lighthouse image is locally saved
result = subprocess.run(['docker', 'images', self.IMAGE_NAME], stdout=subprocess.PIPE)
if result.returncode != 0 or self.IMAGE_NAME not in result.stdout.decode():
raise Exception(f'{self.IMAGE_NAME} image not found.\nTry running: `docker build -t {self.IMAGE_NAME} ./lighthouse-docker`')
self.command = [
'docker', 'run',
'--rm',
'--name', 'lighthouse',
self.IMAGE_NAME
]
self.lighthouse_options = [
'--no-enable-error-reporting',
'--output=json',
'--quiet',
]
def run(self, url, options=[]):
options = options + self.lighthouse_options
command = self.command + [url] + options
result = subprocess.run(command, stdout=subprocess.PIPE)
if result.returncode != 0:
output = result.stdout.decode('utf-8')
print(output)
raise Exception('Lighthouse failed')
return json.loads(result.stdout)
if __name__ == '__main__':
lighthouse = Lighthouse()
json_result = lighthouse.run('https://www.ifixit.com', ['--preset=desktop'])
# Save the json_result to a file for debugging
with open('lighthouse.json', 'w') as f:
json.dump(json_result, f, indent=4)<file_sep>#!/usr/bin/env ruby
require 'docopt'
class DockerRunner
def initialize(image: 'rocker/verse')
@image = image
end
def docker(port, *args)
system('docker', 'run', '--rm', '-p', port, '-v',
"#{Dir.getwd}:/home/rstudio", *args, @image) or raise 'Docker run error'
end
end
opts = Docopt.docopt <<~DOCS
Usage:
R-docker [options]
Options:
--use-pass Use a randomly-generated password
--port=<port> Expose on specified port [default: 8787]
--image=<image> Use this Docker image [default: rocker/verse]
DOCS
port = opts['--port']
runner = DockerRunner.new(
image: opts['--image']
)
if opts['--use-pass']
password = `<PASSWORD>`
puts "Password\n#{password}"
runner.docker("#{port}:8787", "-ePASSWORD=#{password}")
else
Thread.new do
sleep 2
system 'xdg-open', "http://localhost:#{port}"
end
runner.docker("127.0.0.1:#{port}:8787", '-eDISABLE_AUTH=true')
end
<file_sep># Lighthouse Page Analysis Tools
This repo contains tooling to analyze the performance of a webpage using [Lighthouse](https://github.com/GoogleChrome/lighthouse).
It provides a Docker container which wraps up Lighthouse, a couple scripts for running the Dockerized Lighthouse against various pages, and tooling to analyze the results in R.
## Setup
```sh
./install.sh
```
should install the Ruby dependency (docopt) and build the Docker container.
## Usage
The Docker container built by the `install.sh` script is tagged with `lighthouse`. You can use it in place of an `npm` install of lighthouse by doing `docker run --rm lighthouse` and passing all the same arguments you'd pass to `npx lighthouse`. The container has the advantage of not being dependent on your system environment for its functionality.
### Lighthouse Performance Analysis
For simplicity, the following examples all assume the `bin` directory is in the user's `PATH`:
```sh
export PATH="$(readlink -e bin):$PATH"
```
Use the `run` script to run a series of Lighthouse runs against a group of pages:
```sh
mkdir -p analysis/results
cd analysis/results
run 'www=https://your-site.com' 'dev=https://your-dev-version:8000'
```
By default it runs three Lighthouse runs against each configuration (three against `your-site.com` and three against `your-dev-version:8000`) and outputs the results to sequentially-numbered JSON files prefixed with the configuration name (`www` and `dev` in this case, so we'd expect files named things like `www_1.json`, `dev_1.json`, etc.). See the help for more on the options it accepts.
Once you've got your runs, boot `RStudio` using the `R-docker` script:
```sh
cd ..
R-docker
```
This will start a Docker container running an instance of RStudio which has access to all the files in the current working directory.
In RStudio, open the `analysis.R` script. Change the `results` line to point at your results directory (if you're following along with our example, it should already be correct). Change the vector of names being passed to `webperf::read_lighthouse_json` to include all the configurations you gathered results for. If needed, change the `3` to be the number of runs you ran (the default for `bin/run` is three, so if you're using the defaults you can leave it alone).
Run all the lines of `analysis.R` through line 15. Then run any of the lines that call `webperf::analyze_change` to analyze the change in the provided metric across your configurations.
### Lighthouse Scanning
The `bin/scan` script accepts a config file which specifies groups of URLs to run Lighthouse against. It runs Lighthouse against all of them and creates a directory structure of the outputs which reflects the structure of the input file. A demo config file is provided in `scan_config.json`. All the keys are arbitrary; the two-level structure will be replicated in the output directory structure. The inner values are the URLs to scan.
<file_sep>require 'json'
require 'logger'
require 'uri'
require_relative 'scan_output'
require_relative 'utils'
##
# Sets up runs of Lighthouse on multiple URLs
class LighthouseRunner
Log = Logger.new($stderr, 'LighthouseRunner')
def initialize(config_file_path, output_dir, hostname)
@config_file_path = config_file_path
@hostname = hostname
@output = ScanOutput.new(output_dir, hostname)
end
def run
Log.info('Running Lighthouse scan')
pages = pages_from_config(@config_file_path)
pages.each do |page|
run_scan page
end
generate_index pages
end
def run_scan(page)
framework_dir = @output.get_framework_dir(page.framework_name)
exit(70) unless Lighthouse.run(
framework_dir.realpath.to_path,
page.name,
page.url.to_s,
'--output', 'html',
'--output', 'json'
)
end
def pages_from_config(config_file_path)
config_contents = JSON.load(config_file_path)
Log.debug("Read config file: '#{config_file_path}'")
config_contents.flat_map do |framework, pages|
pages.map do |name, url|
Page.new framework, name, get_uri(url)
end
end
end
def get_uri(url)
uri = URI.parse url
uri.host = @hostname if @hostname
uri
end
def generate_index(pages)
framework_groups = pages.group_by(&:framework_name)
index = framework_groups.transform_values { |group| group.map(&:name) }
write_index index
end
def write_index(index)
Log.info('Generating index file')
File.write(@output.scan_dir / 'index.json', JSON.dump(index))
end
end
Page = Struct.new :framework_name, :name, :url
<file_sep>require 'utils'
RSpec.describe ExecHelpers, '#limited_system' do
it 'returns false if the command fails' do
expect(ExecHelpers.limited_system('false')).to be false
end
it 'returns true if the command succeeds' do
expect(ExecHelpers.limited_system('true')).to be true
end
it 'times out slow commands' do
ExecHelpers::Log.level = Logger::ERROR
ExecHelpers.lighthouse_timeout = 0.1
expect(ExecHelpers.limited_system('sleep', '10')).to be false
end
end
<file_sep>// Colors from https://www.nature.com/articles/nmeth.1618
export function scoreColor(value: number): string {
const safeRed = "rgb(213, 94, 0)";
const safeGreen = "rgb(53, 155, 115)";
const safeYellow = "rgb(230, 159, 0)";
if (value < 50) {
return safeRed;
}
if (value < 90) {
return safeYellow;
}
return safeGreen;
}
// Colors from https://www.nature.com/articles/nmeth.1618
export function diffColor(value: number): string {
const opacity = Math.abs(value / 100);
if (value < -5) {
return `radial-gradient(closest-side, rgba(213, 94, 0, ${opacity}), white)`;
}
if (value > 5) {
return `radial-gradient(closest-side, rgba(0, 158, 115, ${opacity}), white)`;
}
return "white";
}
| 59077e4c35419d3edb47cc0f39f99ebd133c6702 | [
"Ruby",
"Markdown",
"Makefile",
"Python",
"Text",
"TypeScript",
"R",
"Dockerfile",
"Shell"
] | 23 | R | iFixit/lighthouse-docker | 99e5548030b08507f363f80763fc0e09d4defa09 | c3cab7b1d51f24ed8b697aba0e6f4e238b378e6b |
refs/heads/master | <repo_name>kanavsethi19/hackathon<file_sep>/src/main/java/uk/co/vodafone/hackathon/service/ESIndexing.java
package uk.co.vodafone.hackathon.service;
import java.io.IOException;
import java.util.Map;
public interface ESIndexing {
public void loadDataToES();
public void checkDocumentAgainstQueries() throws IOException;
public void indexQuery(String userName,String phoneNumber,String emaildId,String id,String requirement) throws IOException;
public String createIndex() throws IOException;
public Boolean checkIfIndexExists(String name) throws Exception;
public Map<String, String> traverseRequirement(String requirement);
}
<file_sep>/src/main/java/uk/co/vodafone/hackathon/service/ESQuerying.java
package uk.co.vodafone.hackathon.service;
/**
* @author kanav.sethi
*
*/
public interface ESQuerying {
public void getDataFromESById(String query);
public void getDataForQueryString(String query);
}
<file_sep>/src/main/java/uk/co/vodafone/hackathon/service/ESQueryingImpl.java
package uk.co.vodafone.hackathon.service;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.index.query.Operator;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.env.Environment;
import org.springframework.stereotype.Service;
/**
* @author kanav.sethi
*
*/
@Service
public class ESQueryingImpl implements ESQuerying {
private static final Logger logger = LoggerFactory.getLogger(ESQueryingImpl.class);
@Autowired
private RestHighLevelClient client;
@Autowired
private Environment enviroment;
@Override
public void getDataForQueryString(String query) {
String indexName = enviroment.getProperty("Index");
String type = enviroment.getProperty("Type");
try {
String[] fieldNames = {"productName","displayName"};
QueryBuilder builder = QueryBuilders.multiMatchQuery(query, fieldNames).fuzziness("AUTO").operator(Operator.AND);
SearchSourceBuilder sourceBuilder = new SearchSourceBuilder();
sourceBuilder.query(builder);
sourceBuilder.from(0);
sourceBuilder.size(10);
SearchRequest request = new SearchRequest(indexName);
request.types(type);
request.source(sourceBuilder);
SearchResponse searchResponse = client.search(request);
List<String> responseFromES = new ArrayList<>();
for (SearchHit hit : searchResponse.getHits().getHits()) {
responseFromES.add(hit.getId());
logger.info(hit.getSourceAsString());
}
}
catch(Exception e) {
e.printStackTrace();
}
}
@Override
public void getDataFromESById(String query) {
List<String> bundleIds = null;
String indexName = enviroment.getProperty("Index");
String type = enviroment.getProperty("Type");
try {
bundleIds = new ArrayList<>();
bundleIds = Arrays.asList("opt_product_200327");
String[] arr = bundleIds.toArray(new String[bundleIds.size()]);
logger.info("Total Bundles in CSV FILE: " + arr.length);
QueryBuilder builder = QueryBuilders.idsQuery().addIds(arr);
SearchSourceBuilder sourceBuilder = new SearchSourceBuilder();
sourceBuilder.query(builder);
sourceBuilder.from(0);
String[] includeFields = new String[] { "id", "deviceSpecificPricing.*" };
sourceBuilder.fetchSource(includeFields, null);
sourceBuilder.size(bundleIds.size());
SearchRequest request = new SearchRequest(indexName);
request.types(type);
request.source(sourceBuilder);
SearchResponse searchResponse = client.search(request);
logger.info("Total Documents in response from elasticsearch: " + searchResponse.getHits().totalHits);
List<String> responseFromES = new ArrayList<>();
for (SearchHit hit : searchResponse.getHits().getHits()) {
responseFromES.add(hit.getId());
logger.info(hit.getSourceAsString());
}
}
catch(Exception e) {
e.printStackTrace();
}
}
}
| 37f979ef8f28a3d95a7e2ab41e3bc28ae055e24c | [
"Java"
] | 3 | Java | kanavsethi19/hackathon | 6b27e53855c855f36e9cb0ce67d41cb61c86daa9 | 04420f674222b9bd4529f53c6ac42788bd28a58c |
refs/heads/master | <repo_name>codingfishman/anyproxy-package-demo<file_sep>/src/rule/rule.js
module.exports = {
summary: 'The rule to demo AnyProxy plugin',
getWebFiles() {
return [
'./web.js',
'./web.css'
]
},
*beforeDealHttpsRequest(requestDetail) {
return true;
},
*beforeSendRequest(requestDetail) {
requestDetail.pluginData = {
requestPluginData: 'The data you set in before sending request'
}
return requestDetail;
},
/**
* 总入口
* @param requestDetail
* @param responseDetail
*/
*beforeSendResponse(requestDetail, responseDetail) {
responseDetail.pluginData = {
responsePluginData: 'The data you set in before sending response'
}
return responseDetail;
}
};
<file_sep>/src/web/app.jsx
import React, { PropTypes } from 'react';
const Style = require('./app.less');
class App extends React.Component {
constructor() {
super();
this.state = {
}
}
static propTypes = {
record: PropTypes.object
}
render() {
const { record = {} } = this.props;
const { pluginData = {} } = record;
return (
<div className={Style.wrapper}>
<div className={Style.detailItem}>
<div className={Style.label} >URL:</div>
<div className={Style.content} >{' ' + record.protocol}://{record.host + record.path} </div>
</div>
<div className={Style.detailItem}>
<div className={Style.label} >Request PluginData:</div>
<div className={Style.content} >{pluginData.requestPluginData} </div>
</div>
<div className={Style.detailItem}>
<div className={Style.label} >Response PluginData:</div>
<div className={Style.content} >{pluginData.responsePluginData} </div>
</div>
</div>
)
}
}
export default App;
<file_sep>/README.md
# Intro
This is a starter of AnyProxy Plugin, it contains both the rule and a web folder, which helps to create the UI you need when handler your rule.
The *web* part is based on React, but you are free to try any other frameworks which a purely front-end rendering, such as Vue and Angular.<file_sep>/webpack.config.js
const webpack = require('webpack');
const path = require('path');
const autoprefixer = require('autoprefixer');
const ExtractTextPlugin = require('extract-text-webpack-plugin');
const UglifyJSPlugin = require('uglifyjs-webpack-plugin');
const CopyWebpackPlugin = require('copy-webpack-plugin');
const extractCss = new ExtractTextPlugin('web.css', {
disable: false,
allChunks: true
});
// a plugin to set the environment
const defineProperty = new webpack.DefinePlugin({
'process.env': {
NODE_ENV: JSON.stringify(process.env.NODE_ENV || 'test')
}
});
const plugins = [
new webpack.LoaderOptionsPlugin({
options: {
postcss: [autoprefixer]
}
}),
extractCss,
defineProperty,
new CopyWebpackPlugin([{
from: './src/rule/',
to: './'
}], {
toType: 'dir'
})
];
if (process.env.NODE_ENV === 'production') {
plugins.push(new UglifyJSPlugin());
}
const config = {
entry: [
path.join(__dirname, './src/web/index.js')
],
output: {
path: path.join(__dirname, 'dist'),
filename: 'web.js'
},
resolve: {
extensions: ['.js', '.jsx', '.json'],
modules: [
path.join(__dirname, 'web'),
path.join(__dirname, 'node_modules')
]
},
externals: {
react: 'React',
'react-dom': 'ReactDOM'
},
module: {
rules: [
{
test: /\.js$/,
exclude: /node_modules/,
use: [{
loader: 'babel-loader',
options: {
presets: ['es2015', 'stage-0']
}
}]
},
{
test: /\.jsx$/,
exclude: /node_modules/,
use: [{
loader: 'babel-loader',
options: {
presets: ['es2015', 'stage-0', 'react'],
plugins: ['transform-runtime', ['import', {
libraryName: 'antd',
style: 'css'
}]]
}
}]
},
{
test: /\.less$/,
use: ExtractTextPlugin.extract({
fallback: 'style-loader',
use: [
{
loader: 'css-loader',
options: {
modules: true,
localIdentName: '[local]___[hash:base64:5]'
}
},
{
loader: 'postcss-loader'
},
{
loader: 'less-loader'
}
]
})
},
{
test: /\.css$/,
use: ExtractTextPlugin.extract({
fallback: 'style-loader',
use: [
{
loader: 'css-loader',
options: {
modules: true,
localIdentName: '[local]___[hash:base64:5]'
}
},
{
loader: 'postcss-loader'
},
{
loader: 'less-loader'
}
]
})
}
]
},
plugins: plugins
};
module.exports = config;
<file_sep>/demo.config.js
/*
* The demo.config.js file will be used as an instruction.
* when a user is editing the configuration file, they can take this as a reference.
* AnyProxy will read this file and dispaly it in an easy-to-get way
*
*/
module.exports = {
interceptUrls: [
'http://domain.your.preference.com'
]
};
| ccf19cc05dfcb0c81cf7570fd4fd83a10ec56920 | [
"JavaScript",
"Markdown"
] | 5 | JavaScript | codingfishman/anyproxy-package-demo | 0ef096a91fc858c81409c4d557cd615ff82e8193 | bf17e6aba83b73c7dc8236dcc94c1cfb4c7d9867 |
refs/heads/master | <file_sep>{
"name": "markdown-it-sanitizer",
"version": "0.4.3",
"description": "sanitizer for markdown-it.",
"keywords": [
"markdown-it-plugin",
"markdown-it",
"markdown",
"sanitizer"
],
"homepage": "https://github.com/svbergerem/markdown-it-sanitizer",
"repository": {
"type": "git",
"url": "git://github.com/svbergerem/markdown-it-sanitizer.git"
},
"bugs": {
"url": "https://github.com/svbergerem/markdown-it-sanitizer/issues"
},
"license": "MIT",
"main": "index.js",
"scripts": {
"test": "make test"
},
"devDependencies": {
"browserify": "^13.1.0",
"chai": "^3.5.0",
"coveralls": "^2.11.13",
"eslint": "2.11.1",
"istanbul": "^0.4.5",
"markdown-it": "^8.0.0",
"markdown-it-diaspora-mention": "^0.4.0",
"markdown-it-for-inline": "^0.1.1",
"markdown-it-hashtag": "^0.4.0",
"markdown-it-sub": "^1.0.0",
"markdown-it-sup": "^1.0.0",
"markdown-it-testgen": "~0.1.4",
"markdown-it-video": "^0.4.0",
"mocha": "^3.0.2",
"uglify-js": "^2.7.3"
},
"dependencies": {},
"directories": {
"test": "test"
}
}
<file_sep># markdown-it-sanitizer
[](https://travis-ci.org/svbergerem/markdown-it-sanitizer)
[](https://coveralls.io/r/svbergerem/markdown-it-sanitizer?branch=master)
[](https://npmjs.com/package/markdown-it-sanitizer)
> sanitizer plugin for [markdown-it](https://github.com/markdown-it/markdown-it) markdown parser.
## Accepted tags
All tags are parsed case insensitive.
### Balanced
`<b>`, `<blockquote>`, `<code>`, `<em>`, `<h1>`, ..., `<h6>`, `<li>`, `<ol>`, `<ol start="42">`, `<p>`, `<pre>`, `<sub>`, `<sup>`, `<strong>`, `<strike>`, `<ul>`
### Standalone
`<br>`, `<hr>`
### Links
`<a href="http://example.com" title="link">text</a>`
The `title` attribute is optional.
### Images
`<img src="http://example.com" alt="cat" title="image">`
The `alt` and `title` attributes are optional.
## Install
node.js, bower:
```bash
npm install markdown-it-sanitizer --save
bower install markdown-it-sanitizer --save
```
## Use
#### Basic
```js
var md = require('markdown-it')({ html: true })
.use(require('markdown-it-sanitizer'));
md.render('<b>test<p></b>'); // => '<p><b>test</b></p>'
```
#### Advanced
For not whitelisted tags and tags that don't have a matching opening/closing tag you can define whether you would like to remove or escape them. You can also define a class attribute that will be added to image tags. Here is an example with default values:
```js
var md = require('markdown-it')({ html: true })
.use(require('markdown-it-sanitizer'), {
imageClass: '',
removeUnbalanced: false,
removeUnknown: false
});
// unknown tag
md.render('<u>test</u>'); // => '<p><u>test</u></p>'
// unknown tag with removeUnknown: true
md.render('<u>test</u>'); // => '<p>test</p>'
// unbalanced tags
md.render('<b>test</em>'); // => '<p><b>test</em></p>'
// unbalanced tags with removeUnbalanced: true
md.render('<b>test</em>'); // => '<p>test</p>'
// imageClass: 'img-responsive'
md.render('<img src="http://example.com/image.png" alt="image" title="example">'); // => '<p><img src="http://example.com/image.png" alt="image" title="example" class="img-responsive"></p>'
```
_Differences in the browser._ If you load the script directly into the page, without
package system, the module will add itself globally as `window.markdownitSanitizer`.
## License
[MIT](https://github.com/svbergerem/markdown-it-sanitizer/blob/master/LICENSE)
<file_sep># 0.4.3
* Allow arbitrary attributes for a tags but only keep `href` and `title` attributes.
# 0.4.2
* Allow image and link urls starting with //
# 0.4.1
* Allow arbitrary attributes for image tags but only keep `src`, `alt` and `title` attributes.
# 0.4.0
* Update markdown-it to 5.0.0
# 0.3.2
* Added option imageClass
# 0.3.1
* Remove url-regex dependency
* Fix sanitizer freeze [#3](https://github.com/svbergerem/markdown-it-sanitizer/issues/3)
# 0.3.0
* Use markdown-it 4
# 0.2.2
* Only run balance for HTML input
# 0.2.1
* Refactor sanitizer, should be now one of the last rules
# 0.2.0
* Added options for removing or escaping unbalanced/unknown tags
* Check url for allowed protocolls: http and https for images, http, https, ftp, mailto and xmpp for links
# 0.1.0
* Initial release
<file_sep>'use strict';
var path = require('path');
var generate = require('markdown-it-testgen');
var markdownIt = require('markdown-it'),
inline = require('markdown-it-for-inline'),
sub = require('markdown-it-sub'),
sup = require('markdown-it-sup'),
hashtag = require('markdown-it-hashtag'),
mention = require('markdown-it-diaspora-mention');
describe('markdown-it-sanitizer', function () {
var md;
beforeEach(function () {
md = markdownIt({
html: true,
langPrefix: '',
typographer: true,
linkify: true
});
md.linkify.add('xmpp:', 'mailto:');
});
it('sanitizes the input with default values (both false)', function () {
md.use(require('../'));
generate(path.join(__dirname, 'fixtures/sanitizer/base.txt'), md);
generate(path.join(__dirname, 'fixtures/sanitizer/imagesBase.txt'), md);
generate(path.join(__dirname, 'fixtures/sanitizer/keepAll.txt'), md);
});
it('accepts removeUnknown as an option', function () {
md.use(require('../'), { removeUnbalanced: false, removeUnknown: true });
generate(path.join(__dirname, 'fixtures/sanitizer/base.txt'), md);
generate(path.join(__dirname, 'fixtures/sanitizer/imagesBase.txt'), md);
generate(path.join(__dirname, 'fixtures/sanitizer/removeUnknown.txt'), md);
});
it('accepts removeUnbalanced as an option', function () {
md.use(require('../'), { removeUnbalanced: true, removeUnknown: false });
generate(path.join(__dirname, 'fixtures/sanitizer/base.txt'), md);
generate(path.join(__dirname, 'fixtures/sanitizer/imagesBase.txt'), md);
generate(path.join(__dirname, 'fixtures/sanitizer/removeUnbalanced.txt'), md);
});
it('accepts removeUnknown and removeUnbalanced as options', function () {
md.use(require('../'), { removeUnbalanced: true, removeUnknown: true });
generate(path.join(__dirname, 'fixtures/sanitizer/base.txt'), md);
generate(path.join(__dirname, 'fixtures/sanitizer/imagesBase.txt'), md);
generate(path.join(__dirname, 'fixtures/sanitizer/removeBoth.txt'), md);
});
it('accepts imageClass as an option', function () {
md.use(require('../'), { imageClass: 'img-responsive' });
generate(path.join(__dirname, 'fixtures/sanitizer/base.txt'), md);
generate(path.join(__dirname, 'fixtures/sanitizer/imagesClass.txt'), md);
generate(path.join(__dirname, 'fixtures/sanitizer/keepAll.txt'), md);
});
it('only accepts a few protocolls for urls', function () {
md.use(require('../'));
generate(path.join(__dirname, 'fixtures/sanitizer/urls.txt'), md);
});
it('keeps other commonmark features', function () {
md.use(require('../'));
generate(path.join(__dirname, 'fixtures/sanitizer/notags.txt'), md);
});
it('works with other plugins on real world examples', function () {
md.use(require('../'), { removeUnbalanced: false, removeUnknown: false })
.set({ breaks: true })
.use(inline, 'utf8_symbols', 'text', function (tokens, idx) {
tokens[idx].content = tokens[idx].content.replace(/<->/g, '↔')
.replace(/<-/g, '←')
.replace(/->/g, '→')
.replace(/<3/g, '♥');
})
.use(sub)
.use(sup)
.use(inline, 'link_new_window', 'link_open', function (tokens, idx) {
tokens[idx].attrPush([ 'target', '_blank' ]);
})
.use(hashtag, {
hashtagRegExp: '[\\u0080-\\uFFFF\\w\\-]+|<3',
preceding: '^|\\s'
})
.use(mention, {
mentions: [
{
diaspora_id: '<EMAIL>@pod.tld',
guid: 1337
},
{
diaspora_id: '<EMAIL>',
guid: 666
},
{
handle: '<EMAIL>',
url: '/my/awesome/url',
guid: 42
}
]
});
// Bootstrap table markup
md.renderer.rules.table_open = function () { return '<table class="table table-striped">\n'; };
generate(path.join(__dirname, 'fixtures/examples'), md);
});
});
<file_sep>'use strict';
var path = require('path');
var generate = require('markdown-it-testgen');
var video = require('markdown-it-video');
describe('sanitzier + markdown-it-video', function () {
var md = require('markdown-it')({
html: true,
langPrefix: '',
typographer: true,
linkify: true
});
md.use(video);
it('works with default values (both false)', function () {
md.use(require('../'));
generate(path.join(__dirname, 'fixtures/vendor/markdown-it-video'), md);
});
it('works with removeUnknown as an option', function () {
md.use(require('../'), { removeUnbalanced: false, removeUnknown: true });
generate(path.join(__dirname, 'fixtures/vendor/markdown-it-video'), md);
});
it('works with removeUnbalanced as an option', function () {
md.use(require('../'), { removeUnbalanced: true, removeUnknown: false });
generate(path.join(__dirname, 'fixtures/vendor/markdown-it-video'), md);
});
it('works with removeUnknown and removeUnbalanced as options', function () {
md.use(require('../'), { removeUnbalanced: true, removeUnknown: true });
generate(path.join(__dirname, 'fixtures/vendor/markdown-it-video'), md);
});
});
| 766f1eef73881656e23b7622c0b9711eb13e01a7 | [
"Markdown",
"JSON",
"JavaScript"
] | 5 | JSON | rebornix/markdown-it-sanitizer | 10d930d08236ef4609ad10a6087745461d91cc7c | 351d5098686c0dd2b9e611f572c254d1502c3497 |
refs/heads/master | <repo_name>StewartAtkins/node-event-hook<file_sep>/index.js
var EventEmitter = require('events').EventEmitter;
var self = {};
module.exports = exports = self;
/*
* EventShim provides a proxy object to intercept and process events emitted by an object
* Processors can be registered to modify the event arguments before they're emitted by the proxy
* Only registered events will be emitted, though events do not need to have a processor to be registered
* forwardEvent and addEentProcessor both return the shim itself to allow for chained calls.
*
* Returns the shim object for binding event listeners, and adding processors or additional events
*
* TODO: Allow for removal of event processors
*/
self.EventShim = function(obj){
if(obj.__isEventShim){
return obj;
}
var ret;
if(obj.__eventHookShim){
ret = obj.__eventHookShim;
return ret;
}else{
ret = new EventEmitter();
Object.defineProperty(obj, "__eventHookShim", {"value": ret});
Object.defineProperty(ret, "__isEventShim", {"value": true});
}
var eventProcessors = {};
var registeredEvents = [];
var origOn = obj.on;
var origRemoveAll = obj.removeAllListeners;
ret.getObject = function(){ return obj; };
var executeProcessor = function(evt, arglist, pid){
var newArgList = [null];
for(var i=0;i<arglist.length;i++){
newArgList.push(arglist[i]);
}
if(!(evt in eventProcessors) || pid >= eventProcessors[evt].length){
newArgList[0] = evt;
ret.emit.apply(ret, newArgList);
}else{
var func = eventProcessors[evt][pid];
newArgList[0] = function(){
var newArgs = arguments;
if(!arguments.length)
newArgs = arglist;
executeProcessor(evt, newArgs, pid+1);
};
func.apply(func, newArgList);
}
};
var forwardEvent = function(evt){
if(registeredEvents.indexOf(evt) < 0){
origOn.apply(obj, [evt, function(){
executeProcessor(evt, arguments, 0);
}]);
registeredEvents.push(evt);
}
return ret;
};
var oldOn = ret.on;
ret.on = ret.addEventListener = function(){
forwardEvent(arguments[0]);
return oldOn.apply(ret, arguments);
};
ret.addEventProcessor = function(evt, processor){
if(!(evt in eventProcessors))
eventProcessors[evt] = [];
eventProcessors[evt].push(processor);
return ret;
};
return ret;
};
/*
* EventHook extends EventShim by also replacing the addEventListener and on methods of the provided object
* As a result the original object itself should emit events that have been processed by the shim
*
* Returns the event shim for adding of processors or future event forwards
*/
//TODO: apply this to all eventEmitter methods in target object
self.EventHook = function(obj){
var ret;
if(self.IsHooked(obj))
ret = self.GetShim(obj);
else
ret = self.EventShim(obj);
Object.keys(EventEmitter.prototype).forEach(function(funcName){
//Don't hook emit otherwise what the hell are we doing here...
if(funcName.toLowerCase() == "emit" || typeof(obj[funcName]) != "function" || obj[funcName].__evtHookMarker)
return;
obj[funcName] = function(){
for(var i=0;i<arguments.length;i++){
if(arguments[i] instanceof Function){
arguments[i] = arguments[i].bind(obj);
}
}
return ret[funcName].apply(ret, arguments);
};
obj[funcName].__evtHookMarker = true;
});
return ret;
};
/*
* Returns true if object has been hooked, false otherwise
*/
self.IsHooked = function(obj){
return typeof(self.GetShim(obj)) != "undefined";
};
/*
* Returns the shim object, null otherwise
*/
self.GetShim = function(obj){
if(obj.__eventHookShim)
return obj.__eventHookShim;
};<file_sep>/tests/tests.js
var evtTools = require('../');
var events = require('events');
// Verifies that the testEvent1 event is forwarded correctly to the shim
exports.testShimForwarding = function(test){
test.expect(2);
var testObj = new events.EventEmitter();
var shim = evtTools.EventShim(testObj);
shim.on("testEvent1", function(magicNo){
test.equal(magicNo, 42, "Arguments were not propagated correctly");
test.equal(this, shim, "Incorrect 'this' context in shim mode");
});
testObj.emit("testEvent1",42);
test.done();
};
exports.testShimReapplication = function(test){
test.expect(1);
var testObj = new events.EventEmitter();
var shim1 = evtTools.EventShim(testObj);
shim1.addEventProcessor("testEvent1", function(cb, magicNo){
cb(magicNo + 1);
});
var shim2 = evtTools.EventShim(testObj);
shim2.addEventProcessor("testEvent1", function(cb, magicNo){
cb(magicNo + 2);
});
shim2.on("testEvent1", function(magicNo){
test.equal(magicNo, 45, "Both shim events were not processed");
});
testObj.emit("testEvent1", 42);
test.done();
};
exports.testShimHookShim = function(test){
test.expect(1);
var testObj = new events.EventEmitter();
var shim1 = evtTools.EventShim(testObj);
shim1.addEventProcessor("testEvent1", function(cb, magicNo){
cb(magicNo + 1);
});
var shim2 = evtTools.EventShim(shim1);
shim2.addEventProcessor("testEvent1", function(cb, magicNo){
cb(magicNo + 2);
});
shim2.on("testEvent1", function(magicNo){
test.equal(magicNo, 45, "Both shim events were not processed");
});
testObj.emit("testEvent1", 42);
test.done();
};
//As the previous test, but verifies behaviour when forwardEvent is called instead of passing the event name in the pseudo-constructor
//Removed since forwarding doesn't need to be exposed anymore, and therefore isn't
/*
exports.testShimForwardingCall = function(test){
test.expect(1);
var testObj = new events.EventEmitter();
var shim = evtTools.EventShim(testObj, []);
shim.forwardEvent("testEvent1");
shim.on("testEvent1", function(magicNo){
test.equal(magicNo, 42, "Arguments were not propagated correctly");
});
testObj.emit("testEvent1",42);
test.done();
};
*/
//Verifies that an event which is not registered will not be forwarded
//Removed since non-forwarding isn't part of the contract anymore
/*exports.testShimNonForwarding = function(test){
test.expect(0);
var testObj = new events.EventEmitter();
var shim = evtTools.EventShim(testObj, []);
shim.on("testEvent1", function(magicNo){
test.equal(1,2, "Event called when unexpected");
});
testObj.emit("testEvent1",42);
test.done();
};*/
//Verifies that testEvent2 will be dispatched, and with the right argument and to the right handler, but testEvent1 won't
exports.testShimMultiEvent = function(test){
test.expect(3);
var testObj = new events.EventEmitter();
var shim = evtTools.EventShim(testObj);
/*shim.on("testEvent1", function(magicNo){
test.ok(false, "Unregistered event was forwarded");
});*/
shim.on("testEvent2", function(magicNo){
test.notEqual(magicNo, 42, "Incorrect event was dispatched to registered handler");
test.equal(magicNo, 43, "Arguments were not propagated correctly, or incorrect event was dispatched");
});
testObj.emit("testEvent1",42);
testObj.emit("testEvent2",43);
shim.on("testEvent1", function(magicNo){
test.equal(magicNo, 42, "Incorrect argument");
});
testObj.emit("testEvent1",42);
test.done();
};
// Ensure only the correct event processor is being called on the event and the argument matches up with the processed result
exports.testShimWithProcessor = function(test){
test.expect(2);
var testObj = new events.EventEmitter();
var shim = evtTools.EventShim(testObj);
shim.addEventProcessor("testEvent1", function(cb, magicNo){
test.ok(false, "Incorrect processor was invoked");
cb(magicNo - 10);
});
shim.addEventProcessor("testEvent2", function(cb, magicNo){
test.ok(true, "Correct processor was invoked");
cb(magicNo + 10);
});
shim.on("testEvent2", function(magicNo){
test.equal(magicNo, 52, "Processed argument was not dispatched");
});
testObj.emit("testEvent2",42);
test.done();
};
//Ensures multiple processors on the same event are run and
exports.testShimWithMultipleProcessors = function(test){
test.expect(3);
var testObj = new events.EventEmitter();
var shim = evtTools.EventShim(testObj);
shim.addEventProcessor("testEvent2", function(cb, magicNo){
test.ok(true, "Processor 1 was invoked");
cb(magicNo + 10);
});
shim.addEventProcessor("testEvent2", function(cb, magicNo){
test.ok(true, "Processor 2 was invoked");
cb(magicNo * 2);
});
shim.on("testEvent2", function(magicNo){
test.equal(magicNo, 104, "Processors executed in wrong order");
});
testObj.emit("testEvent2",42);
test.done();
};
//Ensures that if a processor indicates an abort that the event isn't propagated
//Also ensures that a processor which returns neither an abort nor an array will still result in the correct arguments to the next
exports.testShimProcessorAbort = function(test){
test.expect(2);
var testObj = new events.EventEmitter();
var shim = evtTools.EventShim(testObj);
shim.addEventProcessor("testEvent2", function(cb, magicNo){
test.ok(true, "Processor 1 was invoked");
cb();
});
shim.addEventProcessor("testEvent2", function(cb, magicNo){
test.equals(magicNo, 42, "Correct argument was dispatched to second processor");
});
shim.addEventProcessor("testEvent2", function(cb, magicNo){
test.ok(false, "Event was propagated to next processor");
});
shim.on("testEvent2", function(magicNo){
test.ok(false, "Event was propagated to handler");
});
testObj.emit("testEvent2",42);
test.done();
};
//Ensures when using a processor which emits an event multiple times that:
// 1. the previous processors in the chain aren't affected - this one will fail assertion if called multiple times
// 2. that multiple emissions work, even when async (two sync emissions from second processor with async then tested via setTimeout)
// 3. that subsequent processors are called for each emission (number of assertions tested)
// 4. that the event listener gets called with the correct argument and the correct number of times (number of assertions tested, plus comparison assertion for agument)
exports.testShimProcessorAsyncAndMultiEmit = function(test){
test.expect(7);
var testObj = new events.EventEmitter();
var shim = evtTools.EventShim(testObj);
var firstProcessorCalled = false;
shim.addEventProcessor("testEvent2", function(cb, magicNo){
test.ok(!firstProcessorCalled, "First processor was called more than once");
firstProcessorCalled = true;
cb();
});
shim.addEventProcessor("testEvent2", function(cb, magicNo){
cb(magicNo);
cb(magicNo+1);
setTimeout(function(){ cb(magicNo+2); },10);
});
shim.addEventProcessor("testEvent2", function(cb, magicNo){
test.ok(true, "Marker assertion to verify number of subsequent processor calls");
cb();
});
var expectedNumber = 42;
shim.on("testEvent2", function(magicNo){
test.equals(magicNo, expectedNumber, "Incorrect argument provided");
expectedNumber++;
});
testObj.emit("testEvent2",42);
setTimeout(function(){ test.done(); }, 20);
//test.done();
};
//Verifies that hook causes the object itself to emit the event and the arguments are processed
//Also ensures that 'this' holds the correct value when called
exports.testHook = function(test){
test.expect(2);
var testObj = new events.EventEmitter();
var shim = evtTools.EventHook(testObj);
shim.addEventProcessor("testEvent2", function(cb, magicNo){
cb(magicNo + 10);
});
testObj.on("testEvent2", function(magicNo){
test.equal(magicNo, 52, "Incorrect argument was propagated");
test.equal(this, testObj, "Incorrect 'this' context in hook mode")
});
testObj.emit("testEvent2",42);
test.done();
};
exports.testHookCheck = function(test){
test.expect(2);
var testObj = new events.EventEmitter();
test.ok(!evtTools.IsHooked(testObj), "Hook check returning true incorrectly");
var shim = evtTools.EventHook(testObj);
test.ok(evtTools.IsHooked(testObj), "Hook check returning false incorrectly");
test.done();
};
exports.testShimRetrievabl = function(test){
test.expect(1);
var testObj = new events.EventEmitter();
var shim = evtTools.EventHook(testObj);
test.equal(evtTools.GetShim(testObj), shim, "Hook retrieval returned incorrect value");
test.done();
};
exports.testHookAsync = function(test){
test.expect(2);
var testObj = new events.EventEmitter();
var shim = evtTools.EventHook(testObj);
shim.addEventProcessor("testEvent2", function(cb, magicNo){
cb(magicNo + 10);
});
testObj.on("testEvent2", function(magicNo){
test.equal(magicNo, 52, "Incorrect argument was propagated");
test.equal(this, testObj, "Incorrect 'this' context in hook mode")
});
process.nextTick(function(){
testObj.emit("testEvent2",42);
test.done();
});
};
exports.testHookReapply = function(test){
test.expect(2);
var testObj = new events.EventEmitter();
var origOn = testObj.on;
var shim = evtTools.EventHook(testObj);
testObj.on = origOn;
evtTools.EventHook(testObj);
shim.addEventProcessor("testEvent2", function(cb, magicNo){
cb(magicNo + 10);
});
testObj.on("testEvent2", function(magicNo){
test.equal(magicNo, 52, "Incorrect argument was propagated");
test.equal(this, testObj, "Incorrect 'this' context in hook mode")
});
testObj.emit("testEvent2",42);
test.done();
};
<file_sep>/README.md
event-hook
==========
This library provides a mechanism to intercept and perform intermediate processing and mutation on an object's events before they're dispatched out to the object's listeners. For an example or two of how this might be used please see my framed-tcp module on npm or github.
##Usage
The module exports two functions that can be used to perform the event management, let's begin with the EventShim function. Simply pass in an object which is an EventEmitter and it will return a 'shim' object. This object should be used to bind event listeners to instead of the original object, in the usual style of shim.on('eventname', function(...){...}). Events that are emitted on the original object will then be dispatched to listeners on the shim object. The shim object has a method addEventProcessor(event, processorFunc) which inserts an intermediate processing function between the object and the shim for that particular event type. The processor will receive a callback function as it's first argument, and subsequent arguments will be the same as the ones passed to emit (excluding the event name). The processor function may do whatever it likes in the meantime, but should execute the callback function to propagate the event out to the shim's listeners. The callback may be executed multiple times if desired, and each call will result in a separate event being emitted on the shim. The callback may be executed with no arguments meaning that the event arguments will be passed on unmodified, or it may be executed by passing one or more arguments which will replace the event arguments. Multiple processors may be registered and they will be executed in the order they are registered. If a previous event modifies the argument list, prevents propagation of the event, or causes multiple events to be emitted then subsequent processors in the chain will see these effects as well as the listeners.
The EventHook function extends that of EventShim but takes it a step further, replacing the EventEmitter-related functions on the object with those of the shim (except for emit). The result of this is that when listeners are attached to the object they will behave as if they were attached to the shim, that is to say the events will have been processed, and possibly modified/suppressed/duplicated along the way. This will not affect any existing listeners attached to the object. | a6bcc30c5e48a8705d4955a68b155bed97021d00 | [
"JavaScript",
"Markdown"
] | 3 | JavaScript | StewartAtkins/node-event-hook | 4395c8351c3a0745b7e6ba66b0ebe48ad0260069 | 0a305aa753f17f3d6eda476540cabb94406a1c80 |
refs/heads/main | <file_sep># employee_tracker
Employee Management System that uses node and MySQL
## Description
Track and change your employee data with this CLI.
## Links
https://github.com/penguinorange/employee_tracker
## Resources
node
inquirer
MySQL
## Credits
Y'all
<file_sep>const util = require("util");
const mysql = require("mysql");
const inquirer = require("inquirer");
const connection = mysql.createConnection({
host: "localhost",
port: 3306,
// Your username
user: "root",
// Your password
password: "<PASSWORD>",
database: "employees"
});
connection.connect();
// Setting up connection.query to use promises instead of callbacks
// This allows us to use the async/await syntax
connection.query = util.promisify(connection.query);
function mainPrompts() {
inquirer.prompt([{
type: "list",
name: "choice",
message: "What would you like to do?",
choices: [
{
name: "View Departments",
value: "VIEW_DEPARTMENTS"
},
{
name: "Add Departments",
value: "ADD_DEPARTMENTS"
},
{
name: "View Roles",
value: "VIEW_ROLES"
},
{
name: "Add Roles",
value: "ADD_ROLES"
},
{
name: "View Employees",
value: "VIEW_EMPLOYEES"
},
{
name: "Add Employees",
value: "ADD_EMPLOYEES"
},
{
name: "Update Employee Role",
value: "UPDATE_EMPLOY_ROLE"
},
{
name: "Exit",
value: "EXIT"
}
]
}]).then(({ choice }) => {
switch (choice) {
case "VIEW_DEPARTMENTS":
displayDepartments();
break;
case "ADD_DEPARTMENTS":
addDepartments();
break;
case "VIEW_ROLES":
displayRoles();
break;
case "ADD_ROLES":
addRoles();
break;
case "VIEW_EMPLOYEES":
displayEmployees();
break;
case "ADD_EMPLOYEES":
addEmployees();
break;
case "UPDATE_EMPLOY_ROLE":
updateEmployRole();
break;
case "EXIT":
default:
process.exit();
}
})
}
function displayDepartments() {
console.log("\n");
connection.query("SELECT * FROM department").then(response => {
console.table(response);
mainPrompts();
})
}
// async/await method
async function addDepartments() {
const department = await inquirer.prompt([
{
name: "name",
message: "What is the name of the department?"
}
]);
await connection.query("INSERT INTO department SET ?", department);
console.log(`Added ${department.name} to the database`);
mainPrompts();
}
function displayRoles() {
console.log("\n");
connection.query("SELECT * FROM role").then(response => {
console.table(response);
mainPrompts();
})
}
// async/await method
async function addRoles() {
const role = await inquirer.prompt([
{
type: "input",
name: "title",
message: "What is the title of the role?"
},
{
type: "input",
name: "salary",
message: "What is the salary for this role?"
},
{
type: "input",
name: "department_id",
message: "Please enter the department ID for this role",
validate: answer => {
const pass = answer.match(
/^[1-9]\d*$/
);
if (pass) {
return true;
}
return "You must enter a number";
}
}
]);
await connection.query("INSERT INTO role SET ?", role);
console.log(`Added ${role.title} to the database`);
mainPrompts();
}
function displayEmployees() {
console.log("\n");
connection.query("SELECT * FROM employee").then(response => {
console.table(response);
mainPrompts();
})
}
// async/await method
async function addEmployees() {
const employee = await inquirer.prompt([
{
name: "first_name",
message: "What is the first name of this employee?"
},
{
name: "last_name",
message: "What is the last name of this employee?"
},
{
name: "role_id",
message: "What is this employee's ID number?",
validate: answer => {
const pass = answer.match(
/^[1-9]\d*$/
);
if (pass) {
return true;
}
return "You must enter a number";
}
},
{
name: "manager_id",
message: "What is this employee's manager ID number?",
validate: answer => {
const pass = answer.match(
/^[1-9]\d*$/
);
if (pass) {
return true;
}
return "You must enter a number";
}
}
]);
await connection.query("INSERT INTO employee SET ?", employee);
console.log(`Added ${employee.name} to the database`);
mainPrompts();
}
async function updateEmployRole() {
const employeeUpdate = await inquirer.prompt([
{
name: "id",
message: "Please select an employee by ID number",
validate: answer => {
const pass = answer.match(
/^[1-9]\d*$/
);
if (pass) {
return true;
}
return "You must enter a number";
}
},
{
name: "role_id",
message: "Please enter the new role ID number for their update",
validate: answer => {
const pass = answer.match(
/^[1-9]\d*$/
);
if (pass) {
return true;
}
return "You must enter a number";
}
}
]);
// const employUpdate = employeeUpdate.map(v => parseInt(v, 10));
// console.log(employUpdate)
console.log(employeeUpdate)
var arr = [];
for (var i = 0; i < employeeUpdate.length; i++) {
arr.push(employeeUpdate[i].value);
}
console.log(arr)
// console.log(Object.entries(employeeUpdate))
// await connection.query("UPDATE employee SET role_id = ? WHERE id = ?", employUpdate);
// console.log(parseInt(employeeUpdate))
console.log(`Updated employee #${employeeUpdate.id} to the database`);
mainPrompts();
};
mainPrompts();
| 0f656a17016af868ffe56e306836625b193f82c0 | [
"Markdown",
"JavaScript"
] | 2 | Markdown | penguinorange/employee_tracker | 59ff70874b2be3d083856fd9b1e45389ddb3a45f | 3a8f26cf86a857a791a76b74ab7db094feaf9cf2 |
refs/heads/master | <file_sep>public class Dollar extends Money{
Dollar(int amount, String currency){
super(amount, currency);
}
// Money times (int multiplier){
//
// return new Money(amount * multiplier, currency );
// }
String currency(){
return currency;
}
//todo Co z zaokrąglaniem
//todo hashCode()
//todo porownanie z nullem
// todo porownanie z obiektem
//todo 5USD + 10CHF = 10 USD przy kursie USD:CHF 2:1
}
| e5cec93755f4f9a257a0e755f53dcb13b1648bd8 | [
"Java"
] | 1 | Java | TomaszSidor/BeckTDD | 040446ec6c5095227767ebd58abc306d167b75e2 | 707841f89f603459d539a78f9d46c2594de1e30c |
refs/heads/master | <repo_name>syzygymsu/crawler-demo<file_sep>/simple_crawler.h
#ifndef SIMPLECRAWLER_H_
#define SIMPLECRAWLER_H_
#include <queue>
#include <set>
#include <string>
#include "repository.h"
#include "crawler_job.h"
#include "curl_easy_downloader.h"
#include "libxml2_parser.h"
// Простой краулер с очередью на загрузку, сразу же парсит скачанные файлы.
// Сам же предоставляет интерфейсы обратной связи для парсера и загрузчика.
class SimpleCrawler:
virtual public DownloadFeedbackInteraface,
virtual public ParseFeedbackInterface {
public:
// Конструктор с установкой зависимостей
SimpleCrawler(Repository &repository, CrawlerJob &job);
// Запуск краулера
void Execute();
private:
// реализация DownloadFeedbackInteraface
void AddRedirect(DownloadJob job, std::string url) override;
void AddDocument(DownloadJob job, RepositoryDocument document) override;
// реализация ParseFeedbackInterface
void AddHyperlink(ParseJob job, std::string url) override;
// добавление URL в очередь с дополнительными проверками
void AddUrl(std::string url, int depth);
// Репозиторий для сохранения скачанных файлов
Repository &repository_;
// Исходная задача
CrawlerJob &crawler_job_;
// Загрузчик
CurlEasyDownloader downloader_;
// Парсер
Libxml2Parser parser_;
// Набор обработанных URL (чтобы не скачивать их повторно)
std::set<std::string> known_urls_;
// Очередь задач на загрузку
std::queue<DownloadJob> download_queue_;
};
#endif /* SIMPLECRAWLER_H_ */
<file_sep>/curl_easy_downloader.h
#ifndef CURLEASYDOWNLOADER_H_
#define CURLEASYDOWNLOADER_H_
#include "download_interface.h"
// Простой загрузчик на основе curl-easy
class CurlEasyDownloader: public DownloaderBase {
public:
void Download(DownloadJob job) override;
};
#endif /* CURLEASYDOWNLOADER_H_ */
<file_sep>/threaded_crawler.h
#ifndef THREADEDCRAWLER_H_
#define THREADEDCRAWLER_H_
#include <queue>
#include <set>
#include <string>
#include <atomic>
#include <mutex>
#include <condition_variable>
#include "repository.h"
#include "crawler_job.h"
#include "curl_multi_downloader.h"
#include "libxml2_parser.h"
// Усовершенствованный краулер. Использует реализацию на curl-multi и
// запускает отдельные треды для скачивания и парсинга.
class ThreadedCrawler:
virtual public DownloadFeedbackInteraface,
virtual public ParseFeedbackInterface {
public:
// Конструктор с установкой зависимостей
ThreadedCrawler(Repository &repository, CrawlerJob &job);
// Запуск краулера
void Execute();
private:
// реализация DownloadFeedbackInteraface
void AddRedirect(DownloadJob job, std::string url) override;
void AddDocument(DownloadJob job, RepositoryDocument document) override;
// реализация ParseFeedbackInterface
void AddHyperlink(ParseJob job, std::string url) override;
// добавление URL в очередь с дополнительными проверками
void AddUrl(std::string url, int depth);
// Стартовая функция треда парсинга
void ParseThread();
// Стартовая функция треда загрузки
void DownloadThread();
// Текущее количество найденных адресов
int ParseQueueSize();
// Текущий размер очереди скачивания (включая активные закачки)
int DownloadQueueSize();
// Текущий размер очереди парсинга
int KnownUrlsSize();
// Репозиторий для сохранения скачанных файлов
Repository &repository_;
// Исходная задача
CrawlerJob &crawler_job_;
// Загрузчик
CurlMultiDownloader downloader_;
// Парсер
Libxml2Parser parser_;
// Набор обработанных URL (чтобы не скачивать их повторно)
std::set<std::string> known_urls_;
// Очередь задач на загрузку
std::queue<DownloadJob> download_queue_;
// Очередь задач на парсинг
std::queue<ParseJob> parse_queue_;
// true если все треды должны выходить
std::atomic<bool> quit_;
// Количество живых тредов, которые потенциально могут добавить задачу
std::atomic<int> live_threads_;
// Мьютекс на очередь загрузки и список известных URL
std::mutex download_mutex_;
// Мьютекс на очередь парсинга
std::mutex parse_mutex_;
// Условие ожидания задач парсинга
std::condition_variable parse_condition_;
};
#endif /* THREADEDCRAWLER_H_ */
<file_sep>/libxml2_parser.cpp
#include "libxml2_parser.h"
#define LIBXML_HTML_ENABLED
#include <libxml/HTMLparser.h>
#include <libxml/xpath.h>
#include <libxml/uri.h>
#include "destruction_stack.h"
void Libxml2Parser::Parse(ParseJob job) {
DestructionStack destructors;
// Парсим документ
htmlDocPtr html_document = htmlReadFile(
job.document.save_path.c_str(),
nullptr, // encoding
HTML_PARSE_RECOVER | HTML_PARSE_NOWARNING | HTML_PARSE_NOERROR
);
if(!html_document) {
return;
}
destructors.push([html_document](){
xmlFreeDoc(html_document);
});
// Создаем контекст xpath
xmlXPathContextPtr xpath_context = xmlXPathNewContext(html_document);
if(!xpath_context) {
return;
}
destructors.push([xpath_context](){
xmlXPathFreeContext(xpath_context);
});
// Ищем по xpath
xmlXPathObjectPtr xpath_object = xmlXPathEvalExpression(
(const xmlChar*)"//a",
xpath_context
);
if(!xpath_object) {
return;
}
destructors.push([xpath_object](){
xmlXPathFreeObject(xpath_object);
});
// Обрабатываем результаты поиска
xmlNodeSetPtr nodes = xpath_object->nodesetval;
int size = (nodes) ? nodes->nodeNr : 0;
for(int i=0; i<size; ++i) {
xmlNodePtr node = nodes->nodeTab[i];
xmlChar *href = xmlGetProp(node, (const xmlChar*)"href");
if(href) {
// Преобразуем относительные пути в абсолютные
xmlChar *url = xmlBuildURI(
href,
(const xmlChar*)job.document.origin_url.c_str()
);
if(url) {
feedback().AddHyperlink(job, (const char*)url);
xmlFree(url);
}
xmlFree(href);
}
}
}
<file_sep>/curl_easy_downloader.cpp
#include "curl_easy_downloader.h"
#include <fstream>
#include <curl/curl.h>
namespace {
// Локальный вспомогательный класс для передачи данных в callback
struct CurlUserData {
std::ofstream write_stream;
};
// Локальная функция для сохранения данных из curl
size_t CurlWriteCallback(char *buffer, size_t block_size, size_t blocks_count, void *user_data_ptr) {
size_t total_bytes = block_size*blocks_count;
if(total_bytes) {
CurlUserData &user_data = *static_cast<CurlUserData*>(user_data_ptr);
user_data.write_stream.write(buffer, total_bytes);
}
return total_bytes;
}
} // anonymous namespace
void CurlEasyDownloader::Download(DownloadJob job) {
CURLcode result;
RepositoryDocument document = repository().CreateDocument(job.url);
CurlUserData user_data;
user_data.write_stream.open(document.save_path);
CURL *handle = curl_easy_init();
curl_easy_setopt(handle, CURLOPT_URL, job.url.c_str());
curl_easy_setopt(handle, CURLOPT_WRITEDATA, static_cast<void*>(&user_data));
curl_easy_setopt(handle, CURLOPT_WRITEFUNCTION, &CurlWriteCallback);
result = curl_easy_perform(handle);
user_data.write_stream.close();
if(CURLE_OK == result) {
long response_code = 0;
result = curl_easy_getinfo(handle, CURLINFO_RESPONSE_CODE, &response_code);
if(CURLE_OK == result) {
switch(response_code) {
case 200: // OK
feedback().AddDocument(job, document);
break;
case 301: // Moved Permanently
case 302: // Moved Temporarily
case 303: // See Other
case 307: // Temporary Redirect
char *redirect_url;
result = curl_easy_getinfo(handle, CURLINFO_REDIRECT_URL, &redirect_url);
if(CURLE_OK == result) {
feedback().AddRedirect(job, redirect_url);
}
break;
default:
// игнорируем неизвестные коды ответа
break;
}
}
} else { // curl_easy_perform failed
// TODO: добавить обработку ошибок скачивания
}
curl_easy_cleanup(handle);
}
<file_sep>/README.md
## О проекте
Автор: **<NAME>**
Цель данного проекта - продемонстрировать мои навыки программирования,
построения архитектуры приложений, умение разбираться в сторонних библиотеках
и вообще думать головой. Можно считать, что это "пример кода".
В проекте используются следующие сторонние библиотеки:
* cURL
* libxml2
* boost_filesystem
* boost_program_options
На одном из собеседований мне была поставлена следующая тестовая задача (дословно):
> Рабочая версия web-crawler'а: умеет обходить веб-страницы (начиная с какой-то
> стартовой (+ удобнее задать ему фильтр "обходи только страницы, содержащие
> определенную подстроку", чтобы он не разветвлялся слишком сильно))
> до определенной глубины и сохранять их в файловую систему.
При этом на выполнение давалось 8 часов.
За 8 часов мне удалось написать рабочую версию краулера (первый коммит).
Но из-за большой спешки и отсутствия свежего опыта работы как с выбранными сторонними библиотеками, так и с C++ в целом,
пострадали стиль кода, комментирование, не удалось реализовать некоторый интересный функционал.
В данном проекте я привел весь код к единому стилю (опираясь на [рекомендации Google](http://google-styleguide.googlecode.com/svn/trunk/cppguide.html)),
добавил комментарии и доделал желаемый функционал (загрузка в несколько потоков через curl-multi, обработка в несколько тредов).
Это заняло у меня еще порядка 8 часов, но результат получился заметно лучше.
Я уверен, что в проект можно внести еще множество исправлений и улучшений, но в рамках тестового задания решил остановиться на данной версии.
<file_sep>/curl_multi_downloader.cpp
#include "curl_multi_downloader.h"
#include <fstream>
#include "download_interface.h"
namespace {
// Локальный вспомогательный класс для передачи данных в callback
struct CurlUserData {
std::ofstream write_stream;
DownloadJob job;
RepositoryDocument document;
};
// Локальная функция для сохранения данных из curl
size_t CurlWriteCallback(char *buffer, size_t block_size, size_t blocks_count, void *user_data_ptr) {
size_t total_bytes = block_size*blocks_count;
if(total_bytes) {
CurlUserData &user_data = *static_cast<CurlUserData*>(user_data_ptr);
if(!user_data.write_stream.is_open()) {
user_data.write_stream.open(user_data.document.save_path);
}
user_data.write_stream.write(buffer, total_bytes);
}
return total_bytes;
}
} // anonymous namespace
CurlMultiDownloader::CurlMultiDownloader() {
curl_multi_handle_ = curl_multi_init();
}
CurlMultiDownloader::~CurlMultiDownloader() {
// TODO освободить оставшиеся easy-хендлы.
// Для этого понадобится отдельно держать их список.
// И при штатном завершении этот список должен быть пустым.
curl_multi_cleanup(curl_multi_handle_);
}
void CurlMultiDownloader::Download(DownloadJob job) {
CurlUserData &user_data = *new CurlUserData();
user_data.job = job;
user_data.document = repository().CreateDocument(job.url);
CURL *handle = curl_easy_init();
curl_easy_setopt(handle, CURLOPT_URL, job.url.c_str());
curl_easy_setopt(handle, CURLOPT_PRIVATE, static_cast<void*>(&user_data));
curl_easy_setopt(handle, CURLOPT_WRITEDATA, static_cast<void*>(&user_data));
curl_easy_setopt(handle, CURLOPT_WRITEFUNCTION, &CurlWriteCallback);
curl_multi_add_handle(curl_multi_handle_, handle);
++count_;
}
void CurlMultiDownloader::PerformOne() {
int running_handles;
curl_multi_perform(curl_multi_handle_, &running_handles);
// Обрабатываем сообщения от отдельных загрузок
int messages_left;
CURLMsg *message;
while(message = curl_multi_info_read(curl_multi_handle_, &messages_left)) {
if(CURLMSG_DONE == message->msg) {
void *user_data_ptr;
curl_easy_getinfo(message->easy_handle, CURLINFO_PRIVATE, &user_data_ptr);
CurlUserData &user_data = *static_cast<CurlUserData*>(user_data_ptr);
if(user_data.write_stream.is_open()) {
user_data.write_stream.close();
}
if(CURLE_OK == message->data.result) {
long response_code = 0;
CURLcode result = curl_easy_getinfo(message->easy_handle, CURLINFO_RESPONSE_CODE, &response_code);
if(CURLE_OK == result) {
switch(response_code) {
case 200: // OK
feedback().AddDocument(user_data.job, user_data.document);
break;
case 301: // Moved Permanently
case 302: // Moved Temporarily
case 303: // See Other
case 307: // Temporary Redirect
char *redirect_url;
result = curl_easy_getinfo(message->easy_handle, CURLINFO_REDIRECT_URL, &redirect_url);
if(CURLE_OK == result) {
feedback().AddRedirect(user_data.job, redirect_url);
}
break;
default:
// игнорируем неизвестные коды ответа
break;
}
}
} else {
// TODO: добавить обработку ошибок скачивания
}
curl_multi_remove_handle(curl_multi_handle_, message->easy_handle);
delete &user_data;
--count_;
}
}
}
<file_sep>/destruction_stack.h
#ifndef DESTRUCTIONSTACK_H_
#define DESTRUCTIONSTACK_H_
#include <stack>
#include <functional>
// Класс для автоматического освобождения ресурсов.
// Полезен для освобождения иерархических ресурсов, выделенных в C-стиле.
// Пример:
// DestructionStack destructors;
// void *p1 = malloc(...);
// if(!p1) return;
// destructors.push([p1](){ free(p1); });
// void *p2 = malloc(...);
// if(!p2) return;
// destructors.push([p2](){ free(p2); });
class DestructionStack {
public:
// Добавляет деструктор в стек
void push(std::function<void()> destructor);
// Вызывает все деструкторы в обратном порядке
~DestructionStack();
private:
// Стек деструкторов
std::stack<std::function<void()> > destructors_;
};
#endif /* DESTRUCTIONSTACK_H_ */
<file_sep>/crawler_job.h
#ifndef CRAWLERJOB_H_
#define CRAWLERJOB_H_
#include <vector>
#include <string>
// Данные, определяющие задание для краулера
struct CrawlerJob {
// Список начальных URL.
std::vector<std::string> initial_urls;
// Список фильтров (загружаемые URL должны содержать хотя бы одну из данных
// подстрок). Если список пуст, то фильтрация не применяется.
std::vector<std::string> must_contain;
// Максимальное общее количество загруженных файлов. -1 -- без ограничений.
int max_count = -1;
// Максимальная вложенность поиска. -1 -- без ограничений.
int max_depth = -1;
// Максимальное количество параллельных закачек
int max_parallel = 10;
};
#endif /* CRAWLERJOB_H_ */
<file_sep>/curl_multi_downloader.h
#ifndef CURLMULTIDOWNLOADER_H_
#define CURLMULTIDOWNLOADER_H_
#include <curl/curl.h>
#include "download_interface.h"
// Реализация загрузчика на основе curl-multi
class CurlMultiDownloader: public DownloaderBase {
public:
// Конструктор с инициализацией хендла
CurlMultiDownloader();
// Деструктор с освобождением хендла
~CurlMultiDownloader();
virtual void Download(DownloadJob job);
inline size_t count() const { return count_; }
void PerformOne();
private:
// основной хендл curl-multi
CURLM *curl_multi_handle_;
// количество текущих загрузок
size_t count_ = 0;
};
#endif /* CURLMULTIDOWNLOADER_H_ */
<file_sep>/download_interface.h
// Классы для взаимодействия загрузчика с вызывающим кодом.
#ifndef DOWNLOAD_INTERFACE_H_
#define DOWNLOAD_INTERFACE_H_
#include <string>
#include "repository.h"
// Данные, определяющие отдельное задание для загрузчика
struct DownloadJob {
// Адрес загрузки
std::string url;
// Глубина вложенности данного задания
int depth;
};
// Интерфейс обратной связи, через который загрузчик должен уведомлять
// вызывающий код о результатах работы
class DownloadFeedbackInteraface {
public:
virtual ~DownloadFeedbackInteraface() {}
// Вызывается при обнаружении перенаправления
virtual void AddRedirect(
DownloadJob job, // оригинальное задание
std::string url // адрес перенаправления
) = 0;
// Вызывается при успешной загрузке документа
virtual void AddDocument(
DownloadJob job, // оригинальное задание
RepositoryDocument document // загруженный документ
) = 0;
};
// Общий интерфейс для загрузчика
class DownloaderInterface {
public:
virtual ~DownloaderInterface() {}
// Задает необходимые зависимости. Вызывающий код обязан обеспечить
// существование объектов зависимостей во время работы функции Download
virtual void init(
Repository &repository, // Репозиторий, в который будут сохраняться
// скачанные документы
DownloadFeedbackInteraface &feedback // Интерйес обратной связи
) = 0;
// Выполняет задание на загрузку.
virtual void Download(
DownloadJob job // Задание на загрузку
) = 0;
};
// Базовый абстрактный класс загрузчика. Реализовано сохранение зависимостей.
class DownloaderBase: virtual public DownloaderInterface {
public:
inline void init(
Repository &repository,
DownloadFeedbackInteraface &feedback ) override {
repository_ = &repository;
feedback_ = &feedback;
}
protected:
inline Repository &repository() {
return *repository_;
}
inline DownloadFeedbackInteraface &feedback() {
return *feedback_;
}
private:
Repository *repository_;
DownloadFeedbackInteraface *feedback_;
};
#endif /* DOWNLOAD_INTERFACE_H_ */
<file_sep>/simple_crawler.cpp
#include "simple_crawler.h"
#include <algorithm>
#include <iostream>
#include "url_info.h"
SimpleCrawler::SimpleCrawler(Repository& repository, CrawlerJob& job) :
repository_(repository), crawler_job_(job) {
downloader_.init(repository, *this);
parser_.init(*this);
for(auto url: job.initial_urls) {
AddUrl(url, 0);
}
}
void SimpleCrawler::AddUrl(std::string url, int depth) {
if(known_urls_.end() != known_urls_.find(url)) {
// URL уже обработан
return;
}
if(crawler_job_.max_count>0 && known_urls_.size()>=crawler_job_.max_count) {
// достигнут лимит общего количества запросов
return;
}
if(!crawler_job_.must_contain.empty()) {
bool match = std::any_of(
crawler_job_.must_contain.begin(), crawler_job_.must_contain.end(),
[url](const std::string &substring) {
return std::string::npos != url.find(substring);
}
);
if(!match) {
// URL не подходит ни под один фильтр
return;
}
}
UrlInfo url_info = ParseUrl(url);
std::transform(url_info.schema.begin(), url_info.schema.end(), url_info.schema.begin(), tolower);
if(url_info.schema!="http" && url_info.schema!="https") {
// неизвестный протокол
return;
}
known_urls_.insert(url);
DownloadJob downloadJob;
downloadJob.url = url;
downloadJob.depth = depth;
download_queue_.push(downloadJob);
}
void SimpleCrawler::Execute() {
while(!download_queue_.empty()) {
DownloadJob download_job = download_queue_.front();
download_queue_.pop();
std::cout << "Downloading: " << download_job.url << std::endl;
downloader_.Download(download_job);
}
}
void SimpleCrawler::AddRedirect(DownloadJob job, std::string url) {
AddUrl(url, 1+job.depth);
}
void SimpleCrawler::AddDocument(DownloadJob download_job, RepositoryDocument document) {
if(crawler_job_.max_depth>=0 && download_job.depth>=crawler_job_.max_depth) {
// Превышена глубина сканирования
return ;
}
ParseJob parseJob;
parseJob.document = document;
parseJob.depth = download_job.depth;
std::cout << "Parsing: " << parseJob.document.origin_url << std::endl;
parser_.Parse(parseJob);
}
void SimpleCrawler::AddHyperlink(ParseJob job, std::string url) {
AddUrl(url, 1+job.depth);
}
<file_sep>/url_info.cpp
#include "url_info.h"
#include <libxml/uri.h>
UrlInfo ParseUrl(const std::string &url) {
return ParseUrlLibxml2(url);
}
UrlInfo ParseUrlLibxml2(const std::string &url) {
UrlInfo url_info;
xmlURIPtr uriData = xmlParseURI(url.c_str());
if(uriData->scheme) url_info.schema = uriData->scheme;
if(uriData->server) url_info.domain = uriData->server;
if(uriData->path) url_info.path = uriData->path;
if(uriData->query_raw) url_info.query = uriData->query_raw;
if(uriData->fragment) url_info.hash = uriData->fragment;
xmlFreeURI(uriData);
return url_info;
}
<file_sep>/threaded_crawler.cpp
#include "threaded_crawler.h"
#include <algorithm>
#include <thread>
#include <sys/unistd.h>
#include "url_info.h"
ThreadedCrawler::ThreadedCrawler(Repository &repository, CrawlerJob &job):
repository_(repository), crawler_job_(job) {
downloader_.init(repository, *this);
parser_.init(*this);
for(auto url: job.initial_urls) {
AddUrl(url, 0);
}
}
void ThreadedCrawler::Execute() {
quit_.store(false);
live_threads_.store(0);
std::thread download_thread(&ThreadedCrawler::DownloadThread, this);
std::thread parse_thread(&ThreadedCrawler::ParseThread, this);
while(!quit_) {
fprintf( stdout, "Found: %6d, Download queue: %6d, Parse queue: %6d\n",
KnownUrlsSize(), DownloadQueueSize(), ParseQueueSize() );
sleep(1);
}
download_thread.join();
parse_thread.join();
}
void ThreadedCrawler::AddUrl(std::string url, int depth) {
std::lock_guard<std::mutex> lock(download_mutex_);
if(known_urls_.end() != known_urls_.find(url)) {
// URL уже обработан
return;
}
if(crawler_job_.max_count>0 && known_urls_.size()>=crawler_job_.max_count) {
// достигнут лимит общего количества запросов
return;
}
if(!crawler_job_.must_contain.empty()) {
bool match = std::any_of(
crawler_job_.must_contain.begin(), crawler_job_.must_contain.end(),
[url](const std::string &substring) {
return std::string::npos != url.find(substring);
}
);
if(!match) {
// URL не подходит ни под один фильтр
return;
}
}
UrlInfo url_info = ParseUrl(url);
std::transform(url_info.schema.begin(), url_info.schema.end(), url_info.schema.begin(), tolower);
if(url_info.schema!="http" && url_info.schema!="https") {
// неизвестный протокол
return;
}
known_urls_.insert(url);
DownloadJob downloadJob;
downloadJob.url = url;
downloadJob.depth = depth;
download_queue_.push(downloadJob);
}
void ThreadedCrawler::AddHyperlink(ParseJob job, std::string url) {
AddUrl(url, 1+job.depth);
}
void ThreadedCrawler::AddRedirect(DownloadJob job, std::string url) {
AddUrl(url, 1+job.depth);
}
void ThreadedCrawler::AddDocument(DownloadJob job, RepositoryDocument document) {
ParseJob parse_job;
parse_job.depth = job.depth;
parse_job.document = document;
std::lock_guard<std::mutex> lock(parse_mutex_);
parse_queue_.push(parse_job);
parse_condition_.notify_one();
}
void ThreadedCrawler::DownloadThread() {
while(!quit_) {
// Добавляем необходимое количество задач в активную загрузку
if(downloader_.count() < crawler_job_.max_parallel) {
std::lock_guard<std::mutex> lock(download_mutex_);
while(!download_queue_.empty() &&
downloader_.count() < crawler_job_.max_parallel) {
downloader_.Download(download_queue_.front());
download_queue_.pop();
}
}
// Обрабатываем события скачивания
downloader_.PerformOne();
// Проверка на выход
if(!downloader_.count()) {
std::lock(download_mutex_, parse_mutex_);
if(download_queue_.empty() && parse_queue_.empty() && 0==live_threads_) {
quit_ = true;
parse_condition_.notify_all();
}
download_mutex_.unlock();
parse_mutex_.unlock();
}
}
}
void ThreadedCrawler::ParseThread() {
std::unique_lock<std::mutex> lock(parse_mutex_);
while(!quit_) {
while(!quit_ && parse_queue_.empty()) {
// Блокируем тред если нет текущих задач
parse_condition_.wait(lock);
}
if(!parse_queue_.empty()) {
// Выбираем следующую задачу
ParseJob parse_job = parse_queue_.front();
parse_queue_.pop();
++live_threads_;
// Освобождаем мьютекс и исполняем задачу
lock.unlock();
parser_.Parse(parse_job);
lock.lock();
--live_threads_;
}
}
}
int ThreadedCrawler::DownloadQueueSize() {
std::lock_guard<std::mutex> lock(download_mutex_);
return download_queue_.size() + downloader_.count();
}
int ThreadedCrawler::ParseQueueSize() {
std::lock_guard<std::mutex> lock(parse_mutex_);
return parse_queue_.size();
}
int ThreadedCrawler::KnownUrlsSize() {
std::lock_guard<std::mutex> lock(download_mutex_);
return known_urls_.size();
}
<file_sep>/url_info.h
#ifndef URLINFO_H_
#define URLINFO_H_
#include <string>
// Структура для хранения разбитого на части URL
// Общий вид URL: schema://domain/path?query#hash
struct UrlInfo {
std::string schema; // Компоненты URL
std::string domain; //
std::string path; //
std::string query; //
std::string hash; //
};
// Функция по-умолчанию для разбиения URL. Передает вызов одной из реализаций.
UrlInfo ParseUrl(const std::string &url);
// Разбиение URL при помощи libxml2
UrlInfo ParseUrlLibxml2(const std::string &url);
#endif /* URLINFO_H_ */
<file_sep>/libxml2_parser.h
#ifndef LIBXML2PARSER_H_
#define LIBXML2PARSER_H_
#include "parse_interface.h"
// Парсер на основе libxml2
class Libxml2Parser: public ParserBase {
public:
void Parse(ParseJob job) override;
};
#endif /* LIBXML2PARSER_H_ */
<file_sep>/repository.h
#ifndef REPOSITORY_H_
#define REPOSITORY_H_
#include <string>
// Отдельный сохраненный файл
class RepositoryDocument {
public:
static RepositoryDocument create(
const std::string& origin_url,
const std::string& save_path
);
std::string origin_url;
std::string save_path;
};
// Репозиторий для сохранения файлов
class Repository {
public:
// Конструктор с заданием базовой директории для сохранения файлов
explicit Repository(const std::string &base_path);
// Генерирует путь для сохранения файла на основе URL
std::string PathFromUrl(std::string url);
// Создает документ на основе URL
RepositoryDocument CreateDocument(std::string url);
// Экранирует элемент пути файла
std::string SanitizePath(const std::string &path);
private:
// Базовая директория для сохранения файлов
std::string base_path_;
};
#endif /* REPOSITORY_H_ */
<file_sep>/destruction_stack.cpp
#include "destruction_stack.h"
void DestructionStack::push(std::function<void()> destructor) {
destructors_.push(destructor);
}
DestructionStack::~DestructionStack() {
while(!destructors_.empty()) {
std::function<void()> destructor = destructors_.top();
destructors_.pop();
destructor();
}
}
<file_sep>/repository.cpp
#include "repository.h"
#include "url_info.h"
#include <boost/filesystem.hpp>
namespace fs = boost::filesystem;
RepositoryDocument RepositoryDocument::create(
const std::string& origin_url,
const std::string& save_path) {
RepositoryDocument document;
document.origin_url = origin_url;
document.save_path = save_path;
return document;
}
Repository::Repository(const std::string& base_path):
base_path_(base_path) {
fs::path path(base_path);
if(fs::exists(path)) {
if(!fs::is_directory(path)) {
throw std::string("Base path is not a directory");
}
} else {
fs::create_directories(path);
}
}
std::string Repository::PathFromUrl(std::string url) {
fs::path path(base_path_);
UrlInfo url_info = ParseUrl(url);
// добавляем директорию с схемой и доменом
{
std::string schema = url_info.schema;
std::transform(schema.begin(), schema.end(), schema.begin(), tolower);
std::string top_dir;
if(schema == std::string("https")) {
top_dir += "https_";
} else {
top_dir += "http_";
}
top_dir += url_info.domain;
path /= SanitizePath(top_dir);
}
// добавляем директории с путем
path /= SanitizePath(url_info.path);
if(path.filename()=="." || url_info.path.empty()) {
// если путь в URL заканчивается на / или пуст (только домен)
path /= "_";
}
// добавляем запрос
if(!url_info.query.empty()) {
path += '_';
path += url_info.query;
}
// добавляем расширение файла
if(path.extension().empty()) {
path += ".html";
}
return path.native();
}
RepositoryDocument Repository::CreateDocument(std::string url) {
std::string save_path = PathFromUrl(url);
fs::path path(save_path);
fs::create_directories(path.parent_path());
return RepositoryDocument::create(url, save_path);
}
std::string Repository::SanitizePath(const std::string &path) {
// TODO экранировать или удалить потенциально нежелательные значения
// Например, `..`
return path;
}
<file_sep>/parse_interface.h
// Классы для взаимодействия парсера с вызывающим кодом.
#ifndef PARSE_INTERFACE_H_
#define PARSE_INTERFACE_H_
#include "repository.h"
// Данные, определяющие отдельное задание для парсера
struct ParseJob {
// Документ для парсинга
RepositoryDocument document;
// Глубина вложенности данного задания
int depth;
};
// Интерфейс обратной связи, через который парсер должен уведомлять вызывающий
// код о результатах работы
class ParseFeedbackInterface {
public:
virtual ~ParseFeedbackInterface() {}
// Вызывается при обнаружении гиперссылки
virtual void AddHyperlink(
ParseJob job, // Оригинальное задание
std::string url // Адрес найденной гиперссылки
) = 0;
};
// Общий интерфейс для парсера
class ParserInterface {
public:
virtual ~ParserInterface() {}
// Задает необходимые зависимости. Вызывающий код обязан обеспечить
// существование объектов зависимостей во время работы функции Parse
virtual void init(
ParseFeedbackInterface &feedback // Интерфейс обратной связи
) = 0;
// Выполняет задание на парсинг.
virtual void Parse(
ParseJob job // Задание на парсинг
) = 0;
};
// Базовый абстрактный класс парсера. Реализовано сохранение зависимостей.
class ParserBase: virtual public ParserInterface {
public:
inline void init(ParseFeedbackInterface &feedback ) override {
feedback_ = &feedback;
}
protected:
inline ParseFeedbackInterface &feedback() {
return *feedback_;
}
private:
ParseFeedbackInterface *feedback_;
};
#endif /* PARSE_INTERFACE_H_ */
<file_sep>/main.cpp
#include <cstdlib>
#include <iostream>
#include <vector>
#include <string>
#include <stdexcept>
#include <curl/curl.h>
#include <libxml/parser.h>
#include <boost/program_options.hpp>
#include "repository.h"
#include "crawler_job.h"
#include "simple_crawler.h"
#include "threaded_crawler.h"
namespace po = boost::program_options;
void boot() {
curl_global_init(CURL_GLOBAL_DEFAULT);
xmlInitParser();
}
void shutdown() {
curl_global_cleanup();
xmlCleanupParser();
}
int main(int arguments_count, char** arguments) {
boot();
try {
std::string base_path;
CrawlerJob job;
po::options_description description("Allowed options");
description.add_options()
("help,h", "show help message")
("init,i", po::value<std::vector<std::string> >(&job.initial_urls), "initial URLs")
("contains,c", po::value<std::vector<std::string> >(&job.must_contain), "download only URLs, containing one of this substrings")
("depth,d", po::value<int>(&job.max_depth)->default_value(5), "Maximum depth, -1 for no limit")
("max,m", po::value<int>(&job.max_count)->default_value(100), "Maximum number of URLs, -1 for no limit")
("parallel,p", po::value<int>(&job.max_parallel)->default_value(10), "Maximum parallel downloads")
("save,s", po::value<std::string>(&base_path)->default_value("sites"), "Save path")
;
po::variables_map variables;
po::store(po::parse_command_line(arguments_count, arguments, description), variables);
po::notify(variables);
if (variables.count("help")) {
std::cout << description << std::endl;
return 1;
}
// непосредственно запуск
Repository repo(base_path);
ThreadedCrawler crawler(repo, job);
crawler.Execute();
} catch(std::string s) {
std::cout << "Exception: " << s << std::endl;
} catch(std::runtime_error e) {
std::cout << "Runtime error: " << e.what() << std::endl;
}
shutdown();
return 0;
}
| fdb2d38d6e362c14dcef44fe6fe431ebad4bcc34 | [
"Markdown",
"C++"
] | 21 | C++ | syzygymsu/crawler-demo | ea9d6d26ac4b28994dd75a8fb7db686b1b12300f | 8c7254abf71450735ebadd7b2787f445c43033c0 |
refs/heads/master | <file_sep>//Wee, <NAME>. 201701042 WFQR
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <ctype.h>
#define MAX 53
typedef struct node{ // node
char data;
int frequency;
struct node *left;
struct node *right;
int queued;
}Node;
Node* pQueue[MAX]; //make queue
int n;
int fifo = 0;
void HEAPIFY(int r){ // make min heap
Node* key = pQueue[r];
int i = r;
int j = 2*i;
while(j<=n){ // same frequency arrangment if ASCII
if(j<n && (pQueue[j+1]->frequency < pQueue[j]->frequency || (pQueue[j+1]->frequency == pQueue[j]->frequency && pQueue[j+1]->queued < pQueue[j]->queued)))
j+=1;
if(pQueue[j]->frequency < key->frequency || (pQueue[j]->frequency == key->frequency && pQueue[j]->queued < key->queued)){
pQueue[i] = pQueue[j];
i = j;
j = 2*i;
}
else
break;
}
pQueue[i] = key;
}
void P_insert(Node* node){ // priority enqueue
if(n==MAX)
printf("KILL ME NOW\n");
else{
n++;
int i = n;
int j = i/2;
while(i>1 && (pQueue[j]->frequency > node->frequency/*)){//*/|| (pQueue[j]->frequency==node->frequency&&pQueue[j]->queued<node->queued))){
pQueue[i] = pQueue[j];
i = j;
j = i/2;
}
pQueue[i] = node;
pQueue[i]->queued = fifo++;
}
}
Node* P_extract(){ // priority dequeue
if(n==0){
printf("I just wanted to sleep\n");
return NULL;
}else{
Node* temp = (Node*)malloc(sizeof(Node));
temp = pQueue[1];
pQueue[1] = pQueue[n--];
HEAPIFY(1);
return temp;
}
}
int isEmpty(){ // is queue empty
return (n==0);
}
int isLeaf(Node* node){ // is leaf / end of tree
return (node->left==NULL && node->right==NULL);
}
Node* huffman(char *carr, int *farr, int sz){ // do huffman
n = 0; // init q
Node* temp;
int arrsize = sz;
int c;
for(c = 0; c < arrsize; c++){ //pqueue all values
temp = (Node*)malloc(sizeof(Node));
temp->data = carr[c];
temp->frequency = farr[c];
temp->left = NULL;
temp->right = NULL;
P_insert(temp);
}
while(n>1){ // analysis of for loop and changing to while loop
// printf("%d %d\n", c,n); // pair the 2 least frequencies and put back to queue
temp = (Node*)malloc(sizeof(Node));
temp->data = '{'; // just to make sure it is last
temp->left = P_extract();
temp->right = P_extract();
temp->frequency = temp->left->frequency + temp->right->frequency;
// printf("%d\t%c%d\t%c%d\n", temp->frequency,temp->left->data,temp->left->frequency , temp->right->data,temp->right->frequency);
P_insert(temp);
}
Node* root = P_extract();
return root;
}
char output[256][100]; // store output since alphabetical order needed
void traverse(Node* node, char s[]){ // traversal of nodes
// printf("%c\t%s\n", node->data, s);
if(isLeaf(node)){
int temp = node->data;
strcpy(output[node->data],s);
return;
}
if(node->left!=NULL){
char strleft[100];
strcpy(strleft,s);
strcat(strleft,"0");
traverse(node->left, strleft);
}
if(node->right){
char strright[100];
strcpy(strright,s);
strcat(strright,"1");
traverse(node->right, strright);
}
}
int main()
{
char charArray[256];
int charfreq[256];
int i;
for(i=0;i<256;i++){ // init frequency to 0
charfreq[i]=0;
charArray[i]=i;
}
char c = getchar(); // get frequencies
while(c!=EOF){
if(isalpha(c))
charfreq[c]++;
c = getchar();
}
char new_charArray[100];
int new_charfreq[100];
int new_stuff = 0;
for(i = 60; i<130; i++){
if(charfreq[i]==0) continue;
new_charArray[new_stuff] = charArray[i]; // only use char w/ non-0 frequencies
new_charfreq[new_stuff++] = charfreq[i];
}
// printarray(new_charArray,new_charfreq,new_stuff);
Node* root = huffman(new_charArray, new_charfreq, new_stuff); //huffman
char s[100] = ""; // init of a string to print
traverse(root, s); // traversal
int ctr=0;
for(i = 60; i<130; i++){ // alphabetize
if(isalpha(i)&&charfreq[i]!=0){
printf("%c %s\n", i, output[i]); // print final
}
}
return 0;
}
| 0d253f8df4f5881a675e2ca21cb45de6c27d8bb4 | [
"C"
] | 1 | C | Feinw/cs32mp2 | 354d87f7a03f196a0545df7ca34d5f23433af611 | a48fcff2936cfb22bb23a8378fcaede71430c48e |
refs/heads/master | <repo_name>SteveNashH/phpcs-pre-push-hook<file_sep>/src/pre-push
#!/bin/sh
PHPCS_BIN=vendor/bin/phpcs
PHPCS_CODING_STANDARD=PSR2
PHPCS_FILE_PATTERN=" ^app.*\.(php)$"
remote=$1
url=$2
while read local_ref local_sha remote_ref remote_sha
do
LOCAL_REF=$local_ref
done
branch=`echo $LOCAL_REF | awk -F '/' '{printf $3}'`
origin_branch=`echo $remote/$branch`
FILES=$(git diff $LOCAL_REF $origin_branch --stat)
#echo $FILES
# FILES=$(git diff HEAD^..HEAD --stat)
if [ "$FILES" == "" ]; then
exit 0
fi
echo $FILES
for FILE in $FILES
do
echo $FILE | egrep -q $PHPCS_FILE_PATTERN
RETVAL=$?
if [ "$RETVAL" -eq "0" ]
then
PHPCS_OUTPUT=$($PHPCS_BIN --standard=$PHPCS_CODING_STANDARD --colors --encoding=utf-8 -n $FILE)
PHPCS_RETVAL=$?
RS=1
if [ $PHPCS_RETVAL -ne 0 ];
then
echo $PHPCS_OUTPUT
RS=0
fi
fi
done
if [ $RS -eq 0 ]
then
exit 1
fi
exit 0
<file_sep>/README.md
<h1 align="center"> phpcd-pre-push-hook </h1>
<p align="center"> phpcs-pre-push-hook.</p>
## Installing
```shell
$ composer require stevenash/phpcd-pre-push-hook -vvv
```
Or alternatively, include a dependency for `wickedreports/phpcs-pre-commit-hook` in your composer.json file manually:
{
"require-dev": {
"stevenash/phpcd-pre-push-hook": "*"
}
}
To enable code sniff, аdd to `post-install-cmd` and `post-update-cmd` in `composer.json` installation script:
"scripts": {
"post-install-cmd": [
"Stevenash\\PhpcdPrePushHook\\Installer::postInstall"
],
"post-update-cmd": [
"Stevenash\\PhpcdPrePushHook\\Installer::postInstall"
]
}
## Usage
TODO
## Contributing
You can contribute in one of three ways:
1. File bug reports using the [issue tracker](https://github.com/stevenash/phpcd-pre-push-hook/issues).
2. Answer questions or fix bugs on the [issue tracker](https://github.com/stevenash/phpcd-pre-push-hook/issues).
3. Contribute new features or update the wiki.
_The code contribution process is not very formal. You just need to make sure that you follow the PSR-0, PSR-1, and PSR-2 coding guidelines. Any new code contributions must be accompanied by unit tests where applicable._
## License
MIT<file_sep>/src/setup.sh
#!/bin/sh
if [ -e .git/hooks/pre-push ];
then
PRE_COMMIT_EXISTS=1
else
PRE_COMMIT_EXISTS=0
fi
if [ ! -d .git/hooks ]; then
mkdir -p .git/hooks
fi
cp vendor/stevenash/phpcd-pre-push-hook/src/pre-push .git/hooks/pre-push
chmod +x .git/hooks/pre-push
if [ "$PRE_COMMIT_EXISTS" = 0 ];
then
echo "Pre-push git hook is installed!"
else
echo "Pre-push git hook is updated!"
fi
<file_sep>/src/Installer.php
<?php
namespace Stevenash\PhpcdPrePushHook;
class Installer
{
public static function postInstall()
{
system('sh vendor/stevenash/phpcd-pre-push-hook/src/setup.sh');
// if (strtoupper(substr(PHP_OS, 0, 3)) === 'WIN') {
//
// } else {
// system('sh vendor/wickedreports/phpcs-pre-commit-hook/src/setup.sh');
// }
}
} | d45fd135e98f4def6d5db8743ec8b728d70cd637 | [
"Markdown",
"PHP",
"Shell"
] | 4 | Shell | SteveNashH/phpcs-pre-push-hook | f66b4c594b8b892794542b4c8b9a1961c1d2d7dc | 67a1aab292d79665feaf05a6e52c6583e7291797 |
refs/heads/master | <file_sep># Recurrent Attention Model for Image Classification
---
## Description
Convolutional Neural Network is proved to be an effective mechanism to perform object-classification task. Yet, it imposes tremendous computational overhead while processing the image as it needs to read entire image to emit the prediction. [(Mnih et al. 2014)](https://arxiv.org/pdf/1406.6247.pdf) developed a Recurrent Attention Model (RAM) mimicking the retina-like structure that focuses attention to only those parts of the image which provide maximum information to accomplish the classification task. As demonstrated by the authors, this model achieves considerable accuracy when tested on MNIST dataset in three different settings, namely, ‘Centered Digits’, ‘Non-Centered Digits’ and ‘Cluttered Non-Centered Digits’. In this project, I implement this model as it is and study the results.
## Architecture

The Recurrent Attention Model consists of
1. Glimpse Sensor
2. Glimpse Network
3. Core Network
4. Action Network
5. Location Network
### 1. Glimpse Sensor
Glimpse sensor resembles the structure of the retina of human eye. It produces a glimpse by reading patches of the image at specified location with successively decreasing resolutions.
### 2. Glimpse Network
Glimpse Network is a feedforward network that takes glimpse and its location as input and encodes it in 1-dimensional vector.
### 3. Core Network
Core network is a Recurrent Neural Network that takes glimpse-encoding(produced by Glimpse Network) and previous state as input and produces next state as output. The Recurrent Neural Network is used to make the agent remember its previous decisions.
### 4. Action Network
Action Network is a fully connected layer followed by softmax activation. It takes the state produced by Core Network as input and emits probability of each class as ouotput. The class with maximum probability is the action taken by the agent.
### 5. Location Network
Location Network decides location of the next glimpse. It is a fully connected layer with linear activation that takes the state produced by Core Network as input and produces 2-dimensional output which is then used as the mean of Gaussian distribution over which the next location is sampled.
## From Reinforcement Learning Perspective
The typical Reinforcement Learning framework consists of
1. The environment and
2. The agent
At any particular timestep, the environment is in a certain state. The agent through its sensors observes this state and based on its policy decides the action. After taking this action, environment goes in the next state and agent receives some reward. The objective is to decide a policy that would maximize this reward. The detailed description of Reinforcement Learning framework can be found at [(<NAME> & <NAME>, 1998)](http://incompleteideas.net/book/bookdraft2017nov5.pdf).
Here, the image data constitutes partially observable environment. Glimpse Sensor, Glimpse Network, Core Network, Action Network and Location Network together form the agent. The class-prediction and next glimpse location together form an action. Ideally, the policy is the joint probability distribution of class-prediction and next glimpse location. However, for simplicity we assume these two events to be independent of each other and treat them separately for training. After taking an action, the agent receives positive reward for correct class-prediction and zero or negative reward for the wrong prediction. We can set negative reward if we want agent to quickly arrive at correct solution.
## Training

--
Unlike other supervised learning problems, here we have two objective functions to optimize. During training, the goal is to minimize standard cross-entropy loss associated with action network while maximizing expected reward associated with the location network. The cross-entropy loss is minimized using back-propagation through Action Network, Core Network and Glimpse Network (red arrow) and the expected reward is maximized using Policy Gradient on Location Network (blue arrow).
<file_sep>import tensorflow as tf
import math
class Cell(object):
def __init__(self, descriptor):
self._descriptor = descriptor
self._built = False
@property
def descriptor(self):
return self._descriptor
@property
def built(self):
return self._built
class SensorCell(Cell):
def __init__(self, descriptor):
super(SensorCell, self).__init__(descriptor)
class NetworkCell(Cell):
def __init__(self, descriptor):
super(NetworkCell, self).__init__(descriptor)
@property
def variable_collection(self):
return tf.get_collection(self._descriptor.variable_collections[0])
def _add_variables_to_collections(self):
raise NotImplementedError
class GlimpseSensorCell(SensorCell):
"""Sensor mimicing retina-like structure to capture
glimpse of an image"""
def __init__(self, descriptor):
"""Initializes the sensor
Args:
glimpse_descriptor: RetinaGlimpseDescriptor object
image_descriptor: ImageDescriptor object
"""
super(GlimpseSensorCell, self).__init__(descriptor)
@property
def shapes(self):
return self._glimpse_shapes_list
def __call__(self, image, location):
"""Glimpse sensor
Args:
images: [batch_size x height x width x channels] tensor
of input images
locations: [batch_size x 2] tensor representing location
of sensor scaled to ([-1, 1], [-1, 1])
Returns:
Encoded glimpse
"""
if not self._built:
self._build(image, location)
self._built = True
return tf.concat([self._create_glimpse(image, size, location) for size in self._glimpse_shapes_list],
axis=1)
def _build(self, image, location):
self._glimpse_shapes_list = list()
for i in range(self._descriptor.number_of_scales):
self._glimpse_shapes_list.append(tf.constant([int(math.pow(2, i) * self._descriptor.scan_height),
int(math.pow(2, i) * self._descriptor.scan_width)]))
def _create_glimpse(self, image, size, location):
return tf.contrib.layers.flatten(
tf.image.resize_images(tf.image.extract_glimpse(image, size, location),
tf.constant([int(self._descriptor.scan_height),
int(self._descriptor.scan_width)])))
class GlimpseNetworkCell(NetworkCell):
def __init__(self, descriptor):
super(GlimpseNetworkCell, self).__init__(descriptor)
@property
def kernel_in_hg(self):
return self._kernel_in_hg
@property
def bias_hg(self):
return self._bias_hg
@property
def kernel_loc_hl(self):
return self._kernel_loc_hl
@property
def bias_hl(self):
return self._bias_hl
@property
def kernel_hg_out(self):
return self._kernel_hg_out
@property
def kernel_hl_out(self):
return self._kernel_hl_out
@property
def bias_out(self):
return self._bias_out
def __call__(self, glimpse, location):
if not self._built:
self._build(glimpse, location)
self._built = True
h_g = self._descriptor.activation_hg(
tf.add(tf.matmul(glimpse, self._kernel_in_hg),
self._bias_hg))
h_l = self._descriptor.activation_hl(
tf.add(tf.matmul(location, self._kernel_loc_hl),
self._bias_hl))
z_g = tf.add(tf.add(tf.matmul(h_g, self._kernel_hg_out),
tf.matmul(h_l, self._kernel_hl_out)),
self._bias_out)
g = self._descriptor.output_activation(z_g)
return g
def _build(self, glimpse, location):
self._kernel_in_hg = tf.get_variable(
"kernel_in_hg",
shape=[glimpse.shape[1],
self._descriptor.hg_vector_length],
initializer=self._descriptor.kernel_in_hg_initializer,
trainable=self._descriptor.backprop_trainable)
self._bias_hg = tf.get_variable(
"bias_hg",
shape=[1,
self._descriptor.hg_vector_length],
initializer=self._descriptor.bias_hg_initializer,
trainable=self._descriptor.backprop_trainable)
self._kernel_loc_hl = tf.get_variable(
"kernel_loc_hl",
shape=[location.shape[1],
self._descriptor.hl_vector_length],
initializer=self._descriptor.kernel_loc_hl_initializer,
trainable=self._descriptor.backprop_trainable)
self._bias_hl = tf.get_variable(
"bias_hl",
shape=[1,
self._descriptor.hl_vector_length],
initializer=self._descriptor.bias_hl_initializer,
trainable=self._descriptor.backprop_trainable)
self._kernel_hg_out = tf.get_variable(
"kernel_hg_out",
shape=[self._descriptor.hg_vector_length,
self._descriptor.output_dimensions],
initializer=self._descriptor.kernel_hg_out_initializer,
trainable=self._descriptor.backprop_trainable)
self._kernel_hl_out = tf.get_variable(
"kernel_hl_out",
shape=[self._descriptor.hl_vector_length,
self._descriptor.output_dimensions],
initializer=self._descriptor.kernel_hl_out_initializer,
trainable=self._descriptor.backprop_trainable)
self._bias_out = tf.get_variable(
"bias_out",
shape=[1,
self._descriptor.output_dimensions],
initializer=self._descriptor.bias_out_initializer,
trainable=self._descriptor.backprop_trainable)
self._add_variables_to_collections()
def _add_variables_to_collections(self):
for collection in self._descriptor.variable_collections:
tf.add_to_collection(collection, self._kernel_in_hg)
tf.add_to_collection(collection, self._bias_hg)
tf.add_to_collection(collection, self._kernel_loc_hl)
tf.add_to_collection(collection, self._bias_hl)
tf.add_to_collection(collection, self._kernel_hg_out)
tf.add_to_collection(collection, self._kernel_hl_out)
tf.add_to_collection(collection, self._self._bias_out)
class CoreNetworkCell(NetworkCell):
def __init__(self,
descriptor):
super(CoreNetworkCell, self).__init__(descriptor)
self._initial_state = \
tf.nn.rnn_cell.LSTMStateTuple(\
tf.Variable(\
tf.zeros(\
[self._descriptor.batch_size,
self._descriptor.output_dimensions])),
tf.Variable(\
tf.zeros(\
[self._descriptor.batch_size,
self._descriptor.output_dimensions])))
@property
def lstm_cell(self):
return self._lstm_cell
@property
def initial_state(self):
return self._initial_state
def __call__(self, inputs, state):
if not self._built:
self._build(inputs, state)
self._built = True
h, state = self._lstm_cell(inputs, state)
return self._descriptor.output_activation(h), state
def _build(self, inputs, state):
self._lstm_cell = tf.nn.rnn_cell.BasicLSTMCell(self._descriptor.output_dimensions)
self._add_variables_to_collections()
def _add_variables_to_collections(self):
for collection in self._descriptor.variable_collections:
tf.add_to_collection(collection, self._initial_state)
class ActionNetworkCell(NetworkCell):
def __init__(self,
descriptor):
super(ActionNetworkCell, self).__init__(descriptor)
@property
def kernel_in_fa(self):
return self._kernel_in_fa
@property
def bias_fa(self):
return self._bias_fa
def __call__(self, inputs):
if not self._built:
self._build(inputs)
self._built = True
return tf.nn.softmax(self._descriptor.output_activation(\
tf.add(tf.matmul(inputs, self._kernel_in_fa), self._bias_fa)))
def _build(self, inputs):
self._kernel_in_fa = tf.get_variable("kernel_in_fa",
shape=[inputs.shape[1],
self._descriptor.output_dimensions],
initializer=self._descriptor.kernel_in_fa_initializer,
trainable=self._descriptor.backprop_trainable)
self._bias_fa = tf.get_variable("bias_fa",
shape=[1,
self._descriptor.output_dimensions],
initializer=self._descriptor.bias_fa_initializer,
trainable=self._descriptor.backprop_trainable)
self._add_variables_to_collections()
def _add_variables_to_collections(self):
for collection in self._descriptor.variable_collections:
tf.add_to_collection(collection, self._kernel_in_fa)
tf.add_to_collection(collection, self._bias_fa)
class LocationNetworkCell(NetworkCell):
def __init__(self,
descriptor):
super(LocationNetworkCell, self).__init__(descriptor)
@property
def kernel_in_fl(self):
return self._kernel_in_fl
@property
def bias_fl(self):
return self._bias_fl
def __call__(self, inputs):
if not self._built:
self._build(inputs)
self._built = True
return self._descriptor.output_activation(\
tf.matmul(tf.concat([tf.ones([inputs.shape[0], 1]), inputs], 1), self._kernel_in_fl))
def _build(self, inputs):
self._kernel_in_fl = tf.get_variable("kernel_in_fl",
shape=[inputs.shape[1] + tf.Dimension(1),
self._descriptor.output_dimensions],
initializer=self._descriptor.kernel_in_fl_initializer,
trainable=self._descriptor.backprop_trainable)
self._add_variables_to_collections()
def _add_variables_to_collections(self):
for collection in self._descriptor.variable_collections:
tf.add_to_collection(collection, self._kernel_in_fl)
class BaselineNetworkCell(NetworkCell):
def __init__(self,
descriptor):
super(BaselineNetworkCell, self).__init__(descriptor)
@property
def kernel_in_fb(self):
return self._kernel_in_fb
@property
def bias_fb(self):
return self._bias_fb
def __call__(self, inputs):
if not self._built:
self._build(inputs)
self._built = True
return self._descriptor.output_activation(\
tf.add(tf.matmul(inputs, self._kernel_in_fb), self._bias_fb))
def _build(self, inputs):
self._kernel_in_fb = tf.get_variable("kernel_in_fb",
shape=[inputs.shape[1],
self._descriptor.output_dimensions],
initializer=self._descriptor.kernel_in_fb_initializer,
trainable=self._descriptor.backprop_trainable)
self._bias_fb = tf.get_variable("bias_fb",
shape=[1,
self._descriptor.output_dimensions],
initializer=self._descriptor.bias_fb_initializer,
trainable=self._descriptor.backprop_trainable)
self._add_variables_to_collections()
def _add_variables_to_collections(self):
for collection in self._descriptor.variable_collections:
tf.add_to_collection(collection, self._kernel_in_fb)
tf.add_to_collection(collection, self._bias_fb)
class ClippedRandomNormalSamplerCell:
def __init__(self,
descriptor):
self._descriptor = descriptor
self._built = False
@property
def descriptor(self):
return self._descriptor
@property
def built(self):
return self._built
def __call__(self, inputs):
if not self._built:
self._build(inputs)
self._built = True
return tf.clip_by_value(\
tf.contrib.distributions.MultivariateNormalDiag(\
inputs, self._scales).sample(),
self._descriptor.min_val,
self._descriptor.max_val)
def _build(self, inputs):
self._scales = tf.ones([self._descriptor.batch_size, inputs.shape[1]])
<file_sep>import tensorflow as tf
from descriptors import GlimpseDescriptor
from descriptors import GlimpseNetworkDescriptor
from descriptors import CoreNetworkDescriptor
from descriptors import ActionNetworkDescriptor
from descriptors import LocationNetworkDescriptor
from descriptors import ClippedRandomNormalSamplerDescriptor
from descriptors import BaselineNetworkDescriptor
from networkcells import GlimpseNetworkCell
from networkcells import GlimpseSensorCell
from networkcells import CoreNetworkCell
from networkcells import ActionNetworkCell
from networkcells import LocationNetworkCell
from networkcells import ClippedRandomNormalSamplerCell
from networkcells import BaselineNetworkCell
class ModelDescriptor(object):
def __init__(self,
sequence_length,
number_of_scales,
glimpse_width,
glimpse_height,
num_glimpse_fc,
num_loc_fc,
glimpse_net_out_dim,
core_network_state_units,
number_of_actions,
location_dimensionality,
batch_size):
self._sequence_length = sequence_length
self._num_glimpse_scales = number_of_scales
self._glimpse_width = glimpse_width
self._glimpse_height = glimpse_height
self._num_glimpse_fc = num_glimpse_fc
self._num_loc_fc = num_loc_fc
self._glimpse_net_out_dim = glimpse_net_out_dim
self._core_network_state_units = core_network_state_units
self._number_of_actions = number_of_actions
self._loc_dim = location_dimensionality
self._batch_size = batch_size
@property
def sequence_length(self):
return self._sequence_length
@property
def num_glimpse_scales(self):
return self._num_glimpse_scales
@property
def glimpse_width(self):
return self._glimpse_width
@property
def glimpse_height(self):
return self._glimpse_height
@property
def num_glimpse_fc(self):
return self._num_glimpse_fc
@property
def num_loc_fc(self):
return self._num_loc_fc
@property
def glimpse_net_out_dim(self):
return self._glimpse_net_out_dim
@property
def core_network_state_units(self):
return self._core_network_state_units
@property
def number_of_actions(self):
return self._number_of_actions
@property
def loc_dim(self):
return self._loc_dim
@property
def batch_size(self):
return self._batch_size
class Model(object):
def __init__(self, descriptor):
self._descriptor = descriptor
self._built = False
self._construct_descriptors()
self._initialize_network()
@property
def built(self):
return self._built
@property
def descriptor(self):
return self._descriptor
@property
def backprop_outputs(self):
return self._backprop_outputs
@property
def glimpse_descriptor(self):
return self._glimpse_desc
@property
def glimpse_network_descriptor(self):
return self._glimpse_net_desc
@property
def core_network_descriptor(self):
return self._core_net_desc
@property
def action_network_descriptor(self):
return self._act_net_desc
@property
def location_network_descriptor(self):
return self._loc_net_desc
@property
def baseline_network_descriptor(self):
return self._baseline_net_desc
@property
def random_sampler_descriptor(self):
return self._rand_sampler_desc
@property
def glimpse_sensor(self):
return self._glimpse_sensor
@property
def glimpse_network(self):
return self._glimpse_network
@property
def core_network(self):
return self._core_network
@property
def action_network(self):
return self._action_network
@property
def location_network(self):
return self._location_network
@property
def baseline_network(self):
return self._baseline_network
@property
def sampler(self):
return self._sampler
@property
def current_core_network_state(self):
return self._current_core_network_state
@property
def glimpse_sensor_output(self):
return self._glimpse_sensor_output
@property
def glimpse_network_output(self):
return self._glimpse_network_output
@property
def core_network_output(self):
return self._core_network_output
@property
def action_network_output(self):
return self._action_network_output
@property
def baseline_network_output(self):
return self._baseline_network_output
@property
def location_network_output(self):
return self._location_network_output
@property
def next_location_output(self):
return self._next_location_output
def __call__(self, inputs):
if not self._built:
self._build(inputs)
self._built = True
for t in range(1, self._descriptor.sequence_length + 1):
self._glimpse_sensor_output.append(\
self._glimpse_sensor(inputs,
self._next_location_output[t - 1]))
self._glimpse_network_output.append(\
self._glimpse_network(\
self._glimpse_sensor_output[t - 1],
self._next_location_output[t - 1]))
h, self._current_core_network_state =\
self._core_network(self._glimpse_network_output[t - 1],
self._current_core_network_state)
if t < self._descriptor.sequence_length:
self._core_network_output.append(h)
self._location_network_output.append(\
self._location_network(h))
self._next_location_output.append(\
self._sampler(self._location_network_output[t]))
self._action_network_output.append(self._action_network(h))
self._baseline_network_output.append(self._baseline_network(h))
return {"ACTIONS": self._action_network_output[-1],
"BASELINES_REDUCED": tf.transpose(\
tf.reduce_mean(\
tf.concat(self._baseline_network_output, axis=1),
axis=0,
keep_dims=True)),
"BASELINES": tf.transpose(\
tf.concat(self._baseline_network_output, axis=1)),
"LOCATIONS": tf.stack(self._next_location_output),
"MEANS": tf.stack(self._location_network_output),
"STATES": tf.concat(\
[tf.ones([self._descriptor.sequence_length, self._descriptor.batch_size, 1]),
tf.stack(self._core_network_output)], axis=2)}
def _build(self, inputs):
self._current_core_network_state = self._core_network.initial_state
self._glimpse_sensor_output = list()
self._glimpse_network_output = list()
self._core_network_output = [self._current_core_network_state.h]
self._action_network_output = list()
self._baseline_network_output = list()
self._location_network_output =\
[self._location_network(self._core_network_output[0])]
self._next_location_output = [self._sampler(self._location_network_output[0])]
def _construct_descriptors(self):
self._glimpse_desc =\
GlimpseDescriptor(\
self._descriptor.glimpse_height,
self._descriptor.glimpse_width,
self._descriptor.num_glimpse_scales)
self._glimpse_net_desc =\
GlimpseNetworkDescriptor(\
self._descriptor.glimpse_net_out_dim,
tf.nn.relu,
self._descriptor.num_glimpse_fc,
self._descriptor.num_loc_fc,
tf.nn.relu,
tf.nn.relu,
tf.contrib.layers.xavier_initializer(),
tf.contrib.layers.xavier_initializer(),
tf.contrib.layers.xavier_initializer(),
tf.contrib.layers.xavier_initializer(),
tf.contrib.layers.xavier_initializer(),
tf.contrib.layers.xavier_initializer(),
tf.contrib.layers.xavier_initializer())
self._core_net_desc =\
CoreNetworkDescriptor(\
self._descriptor.core_network_state_units,
tf.identity,
self._descriptor.batch_size)
self._act_net_desc =\
ActionNetworkDescriptor(\
self._descriptor.number_of_actions,
tf.nn.relu,
tf.contrib.layers.xavier_initializer(),
tf.contrib.layers.xavier_initializer())
self._loc_net_desc =\
LocationNetworkDescriptor(\
self._descriptor.loc_dim,
tf.identity,
tf.contrib.layers.xavier_initializer(),
tf.contrib.layers.xavier_initializer(),
variable_collections=["location_network_collection"])
self._baseline_net_desc =\
BaselineNetworkDescriptor(1,
tf.nn.relu,
tf.contrib.layers.xavier_initializer(),
tf.contrib.layers.xavier_initializer(),
variable_collections=["baseline_network_collection"])
self._rand_sampler_desc =\
ClippedRandomNormalSamplerDescriptor(self._descriptor.batch_size)
def _initialize_network(self):
self._glimpse_sensor =\
GlimpseSensorCell(self._glimpse_desc)
self._glimpse_network =\
GlimpseNetworkCell(self._glimpse_net_desc)
self._core_network =\
CoreNetworkCell(self._core_net_desc)
self._action_network =\
ActionNetworkCell(self._act_net_desc)
self._location_network =\
LocationNetworkCell(self._loc_net_desc)
self._baseline_network =\
BaselineNetworkCell(self._baseline_net_desc)
self._sampler =\
ClippedRandomNormalSamplerCell(self._rand_sampler_desc)
<file_sep>import tensorflow as tf
class DataReader(object):
def __init__(self,
data_path,
input_feature_key,
target_feature_key,
batch_size,
filename_queue_epochs = None,
one_hot_labels=False,
one_hot_depth=None,
capacity=30,
num_threads=1,
min_after_dequeue=10):
self._data_path = data_path
self._input_feature_key = input_feature_key
self._target_feature_key = target_feature_key
self._batch_size = batch_size
self._filename_queue_epochs = filename_queue_epochs
self._one_hot_labels = one_hot_labels
self._one_hot_depth = one_hot_depth
self._capacity = capacity
self._num_threads = num_threads
self._min_after_dequeue = min_after_dequeue
self._feature =\
{self._input_feature_key: tf.FixedLenFeature([], tf.string),
self._target_feature_key: tf.FixedLenFeature([], tf.int64)}
self._filename_queue = tf.train.string_input_producer(
[self._data_path], num_epochs=filename_queue_epochs)
self._reader = tf.TFRecordReader()
self._read_op_name, self._serialized_example = self._reader.read(\
self._filename_queue)
self._read_features = tf.parse_single_example(self._serialized_example,
features=self._feature)
self._data = tf.decode_raw(self._read_features[self._input_feature_key],
tf.float32)
self._label = tf.cast(self._read_features[self._target_feature_key],
tf.int32)
@property
def data_path(self):
return self._data_path
@property
def input_feature_key(self):
return self._input_feature_key
@property
def target_feature_key(self):
return self._target_feature_key
@property
def filename_queue_epochs(self):
return self._filename_queue_epochs
@property
def one_hot_labels(self):
return self._one_hot_labels
@property
def one_hot_depth(self):
return self._one_hot_depth
@property
def capacity(self):
return self._capacity
@property
def num_threads(self):
return self._num_threads
@property
def min_after_dequeue(self):
return self._min_after_dequeue
@property
def feature(self):
return self._feature
@property
def filename_queue(self):
return self._filename_queue
@property
def reader(self):
return self._reader
def read(self):
data, labels = tf.train.shuffle_batch(\
[self._data, self._label],
batch_size=self._batch_size,
capacity=self._capacity,
num_threads=self._num_threads,
min_after_dequeue=self._min_after_dequeue)
if self._one_hot_labels:
if isinstance(self._one_hot_depth, int):
labels = tf.one_hot(labels, depth=self._one_hot_depth)
else:
raise Exception("one_hot_depth should be of type int, found " + \
str(type(self._one_hot_depth)))
return data, labels
class ImageDataReader(DataReader):
def __init__(self,
data_path,
input_feature_key,
target_feature_key,
batch_size,
shape,
filename_queue_epochs = None,
one_hot_labels=False,
one_hot_depth=None,
capacity=30,
num_threads=1,
min_after_dequeue=10):
super(ImageDataReader, self).__init__(data_path,
input_feature_key,
target_feature_key,
batch_size,
filename_queue_epochs,
one_hot_labels,
one_hot_depth,
capacity,
num_threads,
min_after_dequeue)
self._shape = shape
self._data = tf.reshape(self._data, shape)
@property
def shape(self):
return self._shape
<file_sep>import tensorflow as tf
import numpy as np
from models import Model, ModelDescriptor
from training import BackpropTrainer, ReinforceTrainer
from objectives import LossFunction, RewardFunction
from readwrite import ImageDataReader
import os
os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"] = "0"
# Specify parameters
number_of_glimpses = 12
number_of_scales = 3
glimpse_width = 12
glimpse_height = 12
num_glimpse_fc = 128
num_loc_fc = 128
glimpse_network_output_dimensionality = 256
core_network_state_units = 256
number_of_actions = 10
location_dimensionality = 2
batch_size = 200
data_path = '/share/jproject/fg538/r-006-gpu-3/mnist_cluttered_train_data.tfrecords'
# Create DataReader object
datareader = ImageDataReader(data_path,
'train/image',
'train/label',
batch_size,
(60, 60, 1),
one_hot_labels=True,
one_hot_depth=10)
# Read images and labels
images, labels = datareader.read()
# Create placeholders for input
X = tf.placeholder(tf.float32, [None, 60, 60, 1], name='X')
y = tf.placeholder(tf.float32, [None, 10], name='labels')
# Create Attention Model Descriptor
model_descriptor = ModelDescriptor(number_of_glimpses,
number_of_scales,
glimpse_width,
glimpse_height,
num_glimpse_fc,
num_loc_fc,
glimpse_network_output_dimensionality,
core_network_state_units,
number_of_actions,
location_dimensionality,
batch_size)
# Initialize Attention Model
attention_model = Model(model_descriptor)
# Build Attention Model
model_out = attention_model(X)
# Define Objective Functions
objective1 = LossFunction(model_out['ACTIONS'], y)
objective2 = RewardFunction(model_out['ACTIONS'], y, batch_size, number_of_glimpses)
objective3 = LossFunction(model_out['BASELINES'], objective2.rewards, tf.losses.mean_squared_error)
# Define Training Operations
bp_train1 = BackpropTrainer(tf.trainable_variables(), objective1.loss, tf.train.GradientDescentOptimizer(0.1))
bp_train2 = BackpropTrainer(attention_model.baseline_network.variable_collection, objective3.loss, tf.train.GradientDescentOptimizer(0.1))
rl_train = ReinforceTrainer(attention_model.location_network.variable_collection,
0.1,
0.9,
batch_size,
model_out['MEANS'],
model_out['LOCATIONS'],
model_out['STATES'],
objective2.rewards,
model_out['BASELINES_REDUCED'])
# Define Metrics
correct_prediction = tf.equal(tf.argmax(model_out['ACTIONS'], 1), tf.argmax(y, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
expected_reward = tf.reduce_mean(objective2.rewards)
saver = tf.train.Saver()
# Configure Session
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
config.gpu_options.per_process_gpu_memory_fraction = 0.33
# Define numpy arrays to store accuracy, loss and rewards for later use
train_acc = np.zeros([5000])
train_reward = np.zeros([5000])
train_loss = np.zeros([5000])
# Start the Session
with tf.Session(config=config) as sess:
# Initialize global and local variables
sess.run(tf.global_variables_initializer())
sess.run(tf.local_variables_initializer())
# Define summary-writer
merged_summary = tf.summary.merge_all()
writer = tf.summary.FileWriter('log', sess.graph)
# Initialize threads-coordinator
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(sess=sess, coord=coord)
# Start training
print('Starting training...')
for i in range(5000):
batch_x, batch_lbl = sess.run([images, labels])
sess.run([bp_train1.train_op, bp_train2.train_op, rl_train.train_op], feed_dict={X: batch_x, y: batch_lbl})
acc, loss, reward, locs = sess.run([accuracy, objective1.loss, expected_reward, model_out['LOCATIONS']],
feed_dict={X: batch_x, y: batch_lbl})
print(locs)
train_acc[i] = acc
train_reward[i] = reward
train_loss[i] = loss
s = sess.run(merged_summary, feed_dict={X: batch_x, y: batch_lbl})
writer.add_summary(s, i)
if (i+1) % 10 == 0:
print('Step {}: train_accuracy={}, train_loss={}, expected_reward = {}'.format(i+1, acc, loss, reward))
if (((i+1) % 10 == 0) and (acc > 0.90)):
params = saver.save(sess, '/N/u/rrrane/Project/model/{}_{}.ckpt'.format(output, i+1))
print('Model saved: {}'.format(params))
coord.request_stop()
coord.join(threads)
<file_sep>import tensorflow as tf
class Descriptor(object):
def __init__(self):
pass
class SensorDescriptor(Descriptor):
def __init__(self, scan_length):
super(SensorDescriptor, self).__init__()
self._scan_length = scan_length
@property
def scan_length(self):
return self._scan_length
class NetworkDescriptor(Descriptor):
def __init__(self,
output_dimensions,
output_activation,
apply_softmax,
include_bias_in_kernel,
variable_collections,
train_with_reinforce=False):
super(NetworkDescriptor, self).__init__()
self._output_dimensions = output_dimensions
self._output_activation = output_activation
self._apply_softmax = apply_softmax
self._include_bias_in_kernel = include_bias_in_kernel
self._variable_collections = variable_collections
self._train_with_reinforce = train_with_reinforce
self._backprop_trainable = not self._train_with_reinforce
@property
def output_dimensions(self):
return self._output_dimensions
@property
def output_activation(self):
return self._output_activation
@property
def apply_softmax(self):
return self._apply_softmax
@property
def include_bias_in_kernel(self):
return self._include_bias_in_kernel
@property
def variable_collections(self):
return self._variable_collections
@property
def train_with_reinforce(self):
return self._train_with_reinforce
@property
def backprop_trainable(self):
return self._backprop_trainable
class ImageSensorDescriptor(SensorDescriptor):
def __init__(self,
scan_height,
scan_width):
super(ImageSensorDescriptor, self).__init__(scan_height * scan_width)
self._scan_height = scan_height
self._scan_width = scan_width
@property
def scan_height(self):
return self._scan_height
@property
def scan_width(self):
return self._scan_width
class GlimpseDescriptor(ImageSensorDescriptor):
def __init__(self,
scan_height,
scan_width,
num_scales):
super(GlimpseDescriptor, self).__init__(scan_height, scan_width)
self._num_scales = num_scales
@property
def number_of_scales(self):
return self._num_scales
class GlimpseNetworkDescriptor(NetworkDescriptor):
def __init__(self,
output_dimensions,
output_activation,
len_hg,
len_hl,
activation_hg,
activation_hl,
kernel_in_hg_initializer,
bias_hg_initializer,
kernel_loc_hl_initializer,
bias_hl_initializer,
kernel_hg_out_initializer,
kernel_hl_out_initializer,
bias_out_initializer,
apply_softmax=False,
include_bias_in_kernel=False,
variable_collections=list(),
train_with_reinforce = False):
super(GlimpseNetworkDescriptor, self).__init__(output_dimensions,
output_activation,
apply_softmax,
include_bias_in_kernel,
variable_collections,
train_with_reinforce)
self._len_hg = len_hg
self._len_hl = len_hl
self._activation_hg = activation_hg
self._activation_hl = activation_hl
self._kernel_in_hg_initializer = kernel_in_hg_initializer
self._bias_hg_initializer = bias_hg_initializer
self._kernel_loc_hl_initializer = kernel_loc_hl_initializer
self._bias_hl_initializer = bias_hl_initializer
self._kernel_hg_out_initializer = kernel_hg_out_initializer
self._kernel_hl_out_initializer = kernel_hl_out_initializer
self._bias_out_initializer = bias_out_initializer
@property
def hg_vector_length(self):
return self._len_hg
@property
def hl_vector_length(self):
return self._len_hl
@property
def activation_hg(self):
return self._activation_hg
@property
def activation_hl(self):
return self._activation_hl
@property
def kernel_in_hg_initializer(self):
return self._kernel_in_hg_initializer
@property
def bias_hg_initializer(self):
return self._bias_hg_initializer
@property
def kernel_loc_hl_initializer(self):
return self._kernel_loc_hl_initializer
@property
def bias_hl_initializer(self):
return self._bias_hl_initializer
@property
def kernel_hg_out_initializer(self):
return self._kernel_hg_out_initializer
@property
def kernel_hl_out_initializer(self):
return self._kernel_hl_out_initializer
@property
def bias_out_initializer(self):
return self._bias_out_initializer
class CoreNetworkDescriptor(NetworkDescriptor):
def __init__(self,
output_dimensions,
output_activation,
batch_size,
apply_softmax=False,
include_bias_in_kernel=False,
variable_collections=list(),
train_with_reinforce = False):
super(CoreNetworkDescriptor, self).__init__(output_dimensions,
output_activation,
apply_softmax,
include_bias_in_kernel,
variable_collections,
train_with_reinforce)
self._batch_size = batch_size
@property
def batch_size(self):
return self._batch_size
class ActionNetworkDescriptor(NetworkDescriptor):
def __init__(self,
output_dimensions,
output_activation,
kernel_in_fa_initializer,
bias_fa_initializer,
apply_softmax=True,
include_bias_in_kernel=False,
variable_collections=list(),
train_with_reinforce = False):
super(ActionNetworkDescriptor, self).__init__(output_dimensions,
output_activation,
apply_softmax,
include_bias_in_kernel,
variable_collections,
train_with_reinforce)
self._kernel_in_fa_initializer = kernel_in_fa_initializer
self._bias_fa_initializer = bias_fa_initializer
@property
def kernel_in_fa_initializer(self):
return self._kernel_in_fa_initializer
@property
def bias_fa_initializer(self):
return self._bias_fa_initializer
class LocationNetworkDescriptor(NetworkDescriptor):
def __init__(self,
output_dimensions,
output_activation,
kernel_in_fl_initializer,
bias_fl_initializer,
apply_softmax=False,
include_bias_in_kernel=True,
variable_collections=list(),
train_with_reinforce=True):
super(LocationNetworkDescriptor, self).__init__(output_dimensions,
output_activation,
apply_softmax,
include_bias_in_kernel,
variable_collections,
train_with_reinforce)
self._kernel_in_fl_initializer = kernel_in_fl_initializer
self._bias_fl_initializer = bias_fl_initializer
@property
def kernel_in_fl_initializer(self):
return self._kernel_in_fl_initializer
@property
def bias_fl_initializer(self):
return self._bias_fl_initializer
class BaselineNetworkDescriptor(NetworkDescriptor):
def __init__(self,
output_dimensions,
output_activation,
kernel_in_fb_initializer,
bias_fb_initializer,
apply_softmax=False,
include_bias_in_kernel=False,
variable_collections=list(),
train_with_reinforce=False):
super(BaselineNetworkDescriptor, self).__init__(output_dimensions,
output_activation,
apply_softmax,
include_bias_in_kernel,
variable_collections,
train_with_reinforce)
self._kernel_in_fb_initializer = kernel_in_fb_initializer
self._bias_fb_initializer = bias_fb_initializer
@property
def kernel_in_fb_initializer(self):
return self._kernel_in_fb_initializer
@property
def bias_fb_initializer(self):
return self._bias_fb_initializer
class ClippedRandomNormalSamplerDescriptor:
def __init__(self,
batch_size,
min_val=-1.0,
max_val=1.0):
self._batch_size = batch_size
self._min_val = min_val
self._max_val = max_val
@property
def batch_size(self):
return self._batch_size
@property
def min_val(self):
return self._min_val
@property
def max_val(self):
return self._max_val
<file_sep>import tensorflow as tf
class ObjectiveFunction(object):
def __init__(self, inputs, targets):
self._inputs = inputs
self._targets = targets
@property
def inputs(self):
return self._inputs
@property
def targets(self):
return self._targets
class LossFunction(ObjectiveFunction):
def __init__(self,
inputs,
targets,
loss = tf.losses.softmax_cross_entropy):
super(LossFunction, self).__init__(inputs, targets)
self._loss = loss(self._targets, self._inputs)
@property
def loss(self):
return self._loss
class RewardFunction(ObjectiveFunction):
def __init__(self,
inputs,
targets,
batch_size,
sequence_length):
super(RewardFunction, self).__init__(inputs, targets)
self._rewards = tf.transpose(tf.concat(\
[tf.zeros([batch_size, sequence_length - 1]),
tf.expand_dims(tf.cast(tf.equal(tf.argmax(inputs, axis=1),
tf.argmax(targets, axis=1)),
tf.float32), axis=1)], axis=1))
@property
def rewards(self):
return self._rewards
<file_sep>import tensorflow as tf
class Trainer(object):
def __init__(self, variable_collection):
self._variable_collection = variable_collection
@property
def variable_collection(self):
return self._variable_collection
class BackpropTrainer(Trainer):
def __init__(self,
variable_collection,
loss_function,
optimizer = tf.train.AdamOptimizer()):
super(BackpropTrainer, self).__init__(variable_collection)
self._optimizer = optimizer
self._loss_function = loss_function
@property
def optimizer(self):
return self._optimizer
@property
def loss_function(self):
return self._loss_function
@property
def train_op(self):
return self._optimizer.minimize(
self._loss_function, var_list=self._variable_collection)
class ReinforceTrainer(Trainer):
def __init__(self,
variable_collection,
variance,
learning_rate,
batch_size,
means,
samples,
inputs,
rewards,
baselines):
super(ReinforceTrainer, self).__init__(variable_collection)
self._variance = variance
self._learning_rate = learning_rate
self._batch_size = batch_size
self._means = means
self._samples = samples
self._inputs = inputs
self._rewards = rewards
self._baselines = baselines
@property
def variance(self):
return self._variance
@property
def batch_size(self):
return self._batch_size
@property
def means(self):
return self._means
@property
def samples(self):
return self._sample
@property
def inputs(self):
return self._inputs
@property
def rewards(self):
return self._rewards
@property
def baselines(self):
return self._baselines
@property
def train_op(self):
return tf.assign_add(\
self._variable_collection[0],
(self._learning_rate / self._variance) * self._policy_gradient())
def _policy_gradient(self):
"""
Calculates policy gradient
Args:
mu: [T x M x 2] tensor
l: [T x M x2] tensor
s: [T x M x 257] tensor
R: [T x M] tensor
b_t: [T x 1] tensor
batch_size: size of the batch
"""
step1 = tf.subtract(self._rewards, self._baselines) #[T x M]
step2 = tf.transpose(self._inputs, [2, 0, 1]) #[257 x T x M]
step3 = tf.multiply(step1, step2) #[257 x T x M]
step4 = tf.transpose(step3, [0, 2, 1]) #[257 x M x T]
step5 = tf.subtract(self._samples, self._means) #[T x M x 2]
step6 = tf.tensordot(step4, step5, axes=[[2], [0]]) #[257 x M x M x 2]
step7 = tf.transpose(step6, [0, 3, 1, 2]) #[257 x 2 x M x M]
step8 = tf.multiply(step7, tf.eye(self._batch_size)) #[257 x 2 x M x M]
gradient = tf.reduce_mean(step8, axis=[2, 3]) #[257 x 2]
return gradient
| f465c32960c69be0be57a45732142cc7a8138556 | [
"Markdown",
"Python"
] | 8 | Markdown | rrrane/mnist-classification | 574af7617aaf5c662a261a3a9eaf0ba66eb4afca | 37fefc5310d7bceda4b587b4fbdefb9c6d07f7e4 |
refs/heads/master | <repo_name>pvdk276/Game_Rockman<file_sep>/Game_Rockman/Game_Rockman/Input.cpp
/*+===================================================================
File: Input.cpp
Summary: Định nghĩa các phương thức của CInput.
===================================================================+*/
#include "Input.h"
CInput* CInput::s_instance = NULL;
int CInput::Init(HINSTANCE hInstance, HWND hWnd)
{
HRESULT result;
result = DirectInput8Create(
hInstance,
DIRECTINPUT_VERSION,
IID_IDirectInput8,
(void**)&m_lpInput,
NULL);
if (result != DI_OK)
{
//trace(L"[ERROR] Failed to Create Direct Input!!! Error code: %d.", result);
return 0;
}
//
// Khởi tạo đối tượng làm việc với bàn phím
//
result = m_lpInput->CreateDevice(
GUID_SysKeyboard,
&m_lpInputDevice,
NULL);
if (result != DI_OK)
{
//trace(L"[ERROR] Faile to create direct input keyboard!!! Error code: %d.", result);
return 0;
}
//
// Thiết lập dạng dữ liệu bàn phím
//
result = m_lpInputDevice->SetDataFormat(&c_dfDIKeyboard);
if (result != DI_OK)
{
//trace(L"[ERROR] Faile to set data format for direct input keyboard!!! Error code: %d.", result);
return 0;
}
result = m_lpInputDevice->SetCooperativeLevel(hWnd, DISCL_NONEXCLUSIVE | DISCL_FOREGROUND);
if (result != DI_OK)
{
//trace(L"[ERROR] Faile to set cooperative level for direct input keyboard!!! Error code: %d.", result);
return 0;
}
//
// Cài đặt bộ đệm cho bàn phím
//
DIPROPDWORD dipdw;
dipdw.diph.dwSize = sizeof(DIPROPDWORD);
dipdw.diph.dwHeaderSize = sizeof(DIPROPHEADER);
dipdw.diph.dwObj = 0;
dipdw.diph.dwHow = DIPH_DEVICE;
dipdw.dwData = KEYBOARD_BUFFER_SIZE;
m_lpInputDevice->SetProperty(DIPROP_BUFFERSIZE, &dipdw.diph);
m_lpInputDevice->Acquire();
if (result != DI_OK)
{
//trace(L"[WARRNING] Faile to acquire direct input keyboard!!! Error code: %d.", result);
return 0;
}
return 1;
}
void CInput::ProcessKeyBroad()
{
if (m_lpInputDevice->GetDeviceState(sizeof(m_keyStates), m_keyStates) != DI_OK)
{
m_lpInputDevice->Acquire();
m_lpInputDevice->GetDeviceState(sizeof(m_keyStates), m_keyStates);
}
m_dwElements = KEYBOARD_BUFFER_SIZE;
m_lpInputDevice->GetDeviceData(sizeof(DIDEVICEOBJECTDATA), m_keyEvents, &m_dwElements, 0);
}
int CInput::IsKeyPress(int keyCode)
{
for (DWORD i = 0; i < m_dwElements; i++)
{
int keyCodeBuffer = m_keyEvents[i].dwOfs;
int keyState = m_keyEvents[i].dwData;
if ((keyCode == keyCodeBuffer) && ((keyState & 0x80) > 0))
{
return 1;
}
}
return 0;
}
int CInput::IsKeyRelease(int keyCode)
{
DWORD dwElements = KEYBOARD_BUFFER_SIZE;
HRESULT result = m_lpInputDevice->GetDeviceData(sizeof(DIDEVICEOBJECTDATA), m_keyEvents, &dwElements, 0);
for (DWORD i = 0; i < dwElements; i++)
{
int keyCodeBuffer = m_keyEvents[i].dwOfs;
int keyState = m_keyEvents[i].dwData;
if ((keyCode == keyCodeBuffer) && ((keyState & 0x80) == 0))
{
return 1;
}
}
return 0;
}
int CInput::IsKeyDown(int keyCode)
{
return (m_keyStates[keyCode] & 0x80) > 0;
}
int CInput::IsKeyUp(int keyCode)
{
return (m_keyStates[keyCode] & 0x80) == 0;
}
CInput* CInput::GetInstance()
{
if (s_instance == NULL)
{
s_instance = new CInput();
}
return s_instance;
}
CInput::~CInput()
{
if (m_lpInput)
delete m_lpInput;
if (m_lpInputDevice)
delete m_lpInputDevice;
}
void CInput::Unacquire()
{
m_lpInputDevice->Unacquire();
}<file_sep>/Game_Rockman/Game_Rockman/Sprite.h
/*+===================================================================
File: Sprite.h
Summary: Định nghĩa lớp CSprite .
===================================================================+*/
#pragma once
#include <d3d9.h>
#include <d3dx9.h>
#include "Texture.h"
#include "Graphic.h"
/*C+==================================================================
Class: CSprite
Summary: Lớp CSprite dùng để vẽ hình và tạo animation.
==================================================================-C*/
class CSprite
{
private:
//LPDIRECT3DTEXTURE9 m_lpImage; /*Texture giữ hình ảnh*/
//LPD3DXSPRITE m_lpSpriteHandler; /**/
CTexture* m_pTexture; /*Con trỏ đối tượng texture giữ hình ảnh*/
public:
int m_nIndex; /*Vị trí hiện tại trên texture*/
int m_nStartIndex; /*Chỉ số đầu của Sprite*/
int m_nEndIndex; /*Chỉ số cuối của Sprite*/
float m_nTimeAnimation; /*Thời gian của hiệu ứng chuyển frame*/
float m_nTimeLocal; /*Thời gian tính hiệu ứng chuyển frame*/
/*M+==================================================================
Method: CSprite::CSprite
Summary: Phương thức tạo khởi với các tham số.
Args: texture - Con trỏ chứa texture sẽ truyền vào
timeAnimation - Thời gian hiệu ứng chuyển frame
==================================================================-M*/
CSprite(CTexture* texture, float timeAnimation);
/*M+==================================================================
Method: CSprite::CSprite
Summary: Phương thức tạo khởi với các tham số.
Args: texture - Con trỏ chứa texture sẽ truyền vào
start - Chỉ số frame đầu
end - Chỉ số frame cuối
timeAnimation - Thời gian hiệu ứng chuyển frame
==================================================================-M*/
CSprite(CTexture* texture, int start, int end, float timeAnimation);
/*M+==================================================================
Method: CSprite::~CSprite
Summary: Phương thức hủy.
==================================================================-M*/
~CSprite();
/*M+==================================================================
Method: CSprite::SetIndex
Summary: Phương thức thiết lập vị trí bắt đầu và kết thúc của một
chuyển động.
==================================================================-M*/
void SetIndex(int startIndex, int endIndex);
/*M+==================================================================
Method: CSprite::Next
Summary: Phương thức chuyển sprite.
==================================================================-M*/
void Next();
/*M+==================================================================
Method: CSprite::Reset
Summary: Phương thức reset lại vị trí của sprite đầu (m_nStartIndex).
==================================================================-M*/
void Reset();
/*M+==================================================================
Method: CSprite::Update
Summary: Phương thức cập nhật Sprite
Args: deltaTime - Thời gian chuyển frame
==================================================================-M*/
void Update(float deltaTime);
/*M+==================================================================
Method: CSprite::Draw
Summary: Phương thức vẽ sprite bình thường.
Args: D3DXVECTOR2 pos - Vị trí vẽ
color - Màu để tạo alpha
angle - góc xoay
==================================================================-M*/
void Draw(D3DXVECTOR2 pos, float angle = 0.0f);
void DrawFlip(D3DXVECTOR2 pos, float scaleX = 1.0f, float scaleY = 1.0f, float angle = 0.0f);
/*M+==================================================================
Method: CSprite::DrawFlipX
Summary: Phương thức vẽ sprite lật theo trục X.
Args: D3DXVECTOR2 pos - Vị trí vẽ
==================================================================-M*/
void DrawFlipX(D3DXVECTOR2 pos, float angle = 0.0f);
/*M+==================================================================
Method: CSprite::GetWidth
Summary: Phương thức lấy về chiều rộng của 1 Frame.
==================================================================-M*/
int GetWidth();
/*M+==================================================================
Method: CSprite::GetHeight
Summary: Phương thức lấy về chiều cao của 1 Frame.
==================================================================-M*/
int GetHeight();
};
<file_sep>/Game_Rockman/Game_Rockman/Texture.cpp
/*+========================================================
File: Texture.cpp
Sumary: Hiện thực hóa các phương thức của lớp Texture
========================================================+*/
#include "Texture.h"
CTexture::CTexture(char* fileName, int cols, int rows, int count)
{
m_szFileName = fileName;
m_nCols = cols;
m_nRows = rows;
m_nCount = count;
this->load();
}
CTexture::~CTexture()
{
if (m_lpTexture != NULL)
m_lpTexture->Release();
if (m_szFileName)
delete m_szFileName;
}
void CTexture::load()
{
D3DXIMAGE_INFO info;
HRESULT result;
//Load hình từ đường dẫn của file
result = D3DXGetImageInfoFromFile(m_szFileName, &info);
if (result != D3D_OK)
{
char str[255];
sprintf_s(str, "[texture.cpp] Failed to get information from image file [%s]", m_szFileName);
OutputDebugString(str);
return;
}
RECT s = { 0, 0, info.Width, info.Height };
this->m_rectSize = s;
m_nFrameWidth = info.Width / m_nCols;
m_nFrameHeight = info.Height / m_nRows;
LPDIRECT3DDEVICE9 dev = CGraphic::GetInstance()->GetDevice();
if (!dev)
{
OutputDebugString("[texture.cpp] Can not get device to Load Image From File.");
return;
}
//Hàm tạo texture từ file
result = D3DXCreateTextureFromFileEx(
dev,
m_szFileName,
info.Width,
info.Height,
1,
D3DUSAGE_DYNAMIC,
D3DFMT_UNKNOWN,
D3DPOOL_DEFAULT,
D3DX_DEFAULT,
D3DX_DEFAULT,
D3DCOLOR_XRGB(255, 0, 255), //Màu để tẩy
&info,
NULL,
&m_lpTexture
);
if (result != D3D_OK)
{
char str[255];
sprintf_s(str, "[texture.cpp] Failed to create texture from file [%s]", m_szFileName);
OutputDebugString(str);
return;
}
}
void CTexture::Draw(D3DXVECTOR2 pos, bool isCenter)
{
D3DXVECTOR3 position(pos.x, pos.y, 0);
LPD3DXSPRITE spriteHandler = CGraphic::GetInstance()->GetSpriteHandler();
if (!spriteHandler)
{
OutputDebugString("[texture.cpp] Can not get sprite handler to draw.");
return;
}
if (isCenter){
D3DXVECTOR3 center = D3DXVECTOR3(
(m_rectSize.right - m_rectSize.left) / 2.0f,
(m_rectSize.bottom - m_rectSize.top) / 2.0f,
0);
spriteHandler->Draw(m_lpTexture, &m_rectSize, ¢er, &position, 0xFFFFFFFF);
}
else
{
spriteHandler->Draw(m_lpTexture, &m_rectSize, NULL, &position, 0xFFFFFFFF);
}
}
void CTexture::DrawRect(D3DXVECTOR2 pos, RECT srcRect, float angle, bool isCenter)
{
D3DXVECTOR3 position(pos.x, pos.y, 0);
LPD3DXSPRITE spriteHandler = CGraphic::GetInstance()->GetSpriteHandler();
if (!spriteHandler)
{
OutputDebugString("[texture.cpp] Can not get sprite handler to draw.");
return;
}
if (isCenter){
D3DXVECTOR3 center = D3DXVECTOR3(
(srcRect.right - srcRect.left) / 2.0f,
(srcRect.bottom - srcRect.top) / 2.0f,
0);
D3DXVECTOR2 translationSprite = D3DXVECTOR2(0, 0);
D3DXMATRIX oldMatrix;
spriteHandler->GetTransform(&oldMatrix);
D3DXMATRIX matRotate;
D3DXMatrixTransformation2D(&matRotate, NULL, NULL, NULL, NULL, angle, &pos);
spriteHandler->SetTransform(&matRotate);
spriteHandler->Draw(m_lpTexture, &srcRect, ¢er, NULL, 0xFFFFFFFF);
D3DXMatrixRotationZ(&matRotate, 0);
spriteHandler->SetTransform(&oldMatrix);
}
else
{
spriteHandler->Draw(m_lpTexture, &srcRect, NULL, &position, 0xFFFFFFFF);
}
}
void CTexture::DrawRectFlip(D3DXVECTOR2 pos, RECT srcRect, float scaleX, float scaleY, float angle, bool isCenter)
{
D3DXVECTOR3 position(pos.x, pos.y, 0);
LPD3DXSPRITE spriteHandler = CGraphic::GetInstance()->GetSpriteHandler();
if (!spriteHandler)
{
OutputDebugString("[texture.cpp] Can not get sprite handler to draw.");
return;
}
D3DXVECTOR2 scaling(scaleX, scaleY);
if (isCenter){
D3DXVECTOR3 center = D3DXVECTOR3(
(srcRect.right - srcRect.left) / 2.0f,
(srcRect.bottom - srcRect.top) / 2.0f,
0);
D3DXMATRIX oldMatrix;
spriteHandler->GetTransform(&oldMatrix);
D3DXMATRIX matRotate;
D3DXMatrixTransformation2D(&matRotate, NULL, NULL, &scaling, NULL, angle, &pos);
spriteHandler->SetTransform(&matRotate);
spriteHandler->Draw(m_lpTexture, &srcRect, ¢er, NULL, 0xFFFFFFFF);
D3DXMatrixRotationZ(&matRotate, 0);
spriteHandler->SetTransform(&oldMatrix);
}
else
{
spriteHandler->Draw(m_lpTexture, &srcRect, NULL, &position, 0xFFFFFFFF);
}
}
LPDIRECT3DTEXTURE9 CTexture::GetTexture()
{
return this->m_lpTexture;
}
void CTexture::DrawRectFlipX(D3DXVECTOR2 pos, RECT srcRect, float angle, bool isCenter)
{
D3DXVECTOR3 position(pos.x, pos.y, 0);
LPD3DXSPRITE spriteHandler = CGraphic::GetInstance()->GetSpriteHandler();
if (!spriteHandler)
{
OutputDebugString("[texture.cpp] Can not get sprite handler to draw.");
return;
}
D3DXVECTOR2 scaling(-1.0f, 1.0f);
if (isCenter){
D3DXVECTOR3 center = D3DXVECTOR3(
(srcRect.right - srcRect.left) / 2.0f,
(srcRect.bottom - srcRect.top) / 2.0f,
0);
D3DXMATRIX oldMatrix;
spriteHandler->GetTransform(&oldMatrix);
D3DXMATRIX matRotate;
D3DXMatrixTransformation2D(&matRotate, NULL, NULL, &scaling, NULL, angle, &pos);
spriteHandler->SetTransform(&matRotate);
spriteHandler->Draw(m_lpTexture, &srcRect, ¢er, NULL, 0xFFFFFFFF);
D3DXMatrixRotationZ(&matRotate, 0);
spriteHandler->SetTransform(&oldMatrix);
}
else
{
spriteHandler->Draw(m_lpTexture, &srcRect, NULL, &position, 0xFFFFFFFF);
}
}
<file_sep>/Game_Rockman/Game_Rockman/Game.cpp
#include "Game.h"
CGame::CGame()
{
}
CGame::~CGame()
{
if (m_pTimer)
delete m_pTimer;
}
int CGame::Init(HINSTANCE hInstance)
{
CGameWindow* pGameWindow = CGameWindow::GetInstance();
// Khởi tạo cửa sổ game.
if (!pGameWindow->Init(hInstance))
{
OutputDebugString("[Game.cpp] Cannot init pGameWindow.");
return 0;
}
// Khởi tạo đối tượng đồ họa của game.
if (!CGraphic::GetInstance()->Init(pGameWindow->GetHWND()))
{
OutputDebugString("[Game.cpp] Cannot init CGraphic.");
return 0;
}
//Khởi tạo đối tượng Input.
if (!CInput::GetInstance()->Init(pGameWindow->GetHInstance(), pGameWindow->GetHWND()))
{
OutputDebugString("[Game.cpp] Cannot init CInput.");
return 0;
}
//Khởi tạo đối tượng quản lí GameState.
if (!CGameStateManager::GetInstance()->Init(new CMenuState()))
{
OutputDebugString("[Game.cpp] Cannot init CGameStateManager.");
return 0;
}
// Khởi tạo đối tượng timer
m_pTimer = CTimer::GetInstance();
m_pTimer->SetMaxFps((float)GAME_FPS);
return 1;
}
void CGame::Run()
{
MSG msg;
int done = 0;
m_pTimer->StartCount();
while (!done)
{
if (PeekMessage(&msg, NULL, 0, 0, PM_REMOVE))
{
if (msg.message == WM_QUIT) done = 1;
TranslateMessage(&msg);
DispatchMessage(&msg);
}
else
{
if (m_pTimer->GetTime() < 1.0f)
{
m_pTimer->EndCount();
// Get Stage hiện tại và run.
CGameStateManager::GetInstance()->GetCurrentState()->Run(m_pTimer->GetTime());
}
}
}
}<file_sep>/Game_Rockman/Game_Rockman/Sprite.cpp
/*+===================================================================
File: Sprite.cpp
Summary: Định nghĩa các phương thức của CSprite.
===================================================================+*/
#include "Sprite.h"
CSprite::CSprite(CTexture* texture, float timeAnimation)
{
m_pTexture = texture;
m_nTimeAnimation = timeAnimation;
m_nIndex = m_nStartIndex = 0;
m_nEndIndex = m_pTexture->m_nCount - 1;
m_nTimeLocal = 0.0f;
}
CSprite::CSprite(CTexture* texture, int start, int end, float timeAnimation)
{
m_pTexture = texture;
m_nTimeAnimation = timeAnimation;
m_nIndex = m_nStartIndex = start;
m_nEndIndex = end;
m_nTimeLocal = 0.0f;
}
CSprite::~CSprite()
{
if (m_pTexture)
delete m_pTexture;
}
void CSprite::SetIndex(int startIndex, int endIndex)
{
m_nStartIndex = startIndex;
m_nEndIndex = endIndex;
}
void CSprite::Next()
{
m_nIndex++;
if (m_nIndex > m_nEndIndex)
m_nIndex = m_nStartIndex;
}
void CSprite::Reset()
{
m_nIndex = 0;
}
void CSprite::Update(float deltaTime)
{
m_nTimeLocal += deltaTime;
if (m_nTimeLocal >= m_nTimeAnimation / 1000.0f) {
m_nTimeLocal = 0;
this->Next();
}
}
void CSprite::Draw(D3DXVECTOR2 pos, float angle)
{
RECT rect;
int spritePerRow = m_pTexture->m_nCols;
int width = m_pTexture->m_nFrameWidth;
int height = m_pTexture->m_nFrameHeight;
rect.left = (m_nIndex % spritePerRow) * width;
rect.top = (m_nIndex / spritePerRow) * height;
rect.right = rect.left + width;
rect.bottom = rect.top + height;
LPD3DXSPRITE spriteHandler = CGraphic::GetInstance()->GetSpriteHandler();
if (!spriteHandler)
{
OutputDebugString("[sprite.cpp] Can not get sprite handler to draw.");
return;
}
LPDIRECT3DTEXTURE9 texture = m_pTexture->GetTexture();
if (!texture)
{
OutputDebugString("[sprite.cpp] Can not get texture to draw.");
return;
}
m_pTexture->DrawRect(pos, rect, angle);
}
void CSprite::DrawFlip(D3DXVECTOR2 pos, float scaleX, float scaleY, float angle)
{
int spritePerRow = m_pTexture->m_nCols;
int width = m_pTexture->m_nFrameWidth;
int height = m_pTexture->m_nFrameHeight;
RECT rect;
rect.left = (m_nIndex % spritePerRow) * width;
rect.top = (m_nIndex / spritePerRow) * height;
rect.right = rect.left + width;
rect.bottom = rect.top + height;
LPD3DXSPRITE spriteHandler = CGraphic::GetInstance()->GetSpriteHandler();
if (!spriteHandler)
{
OutputDebugString("[sprite.cpp] Can not get sprite handler to draw.");
return;
}
LPDIRECT3DTEXTURE9 texture = m_pTexture->GetTexture();
if (!texture)
{
OutputDebugString("[sprite.cpp] Can not get texture to draw.");
return;
}
m_pTexture->DrawRectFlip(pos, rect, scaleX, scaleY);
}
void CSprite::DrawFlipX(D3DXVECTOR2 pos, float angle)
{
int spritePerRow = m_pTexture->m_nCols;
int width = m_pTexture->m_nFrameWidth;
int height = m_pTexture->m_nFrameHeight;
RECT rect;
rect.left = (m_nIndex % spritePerRow) * width;
rect.top = (m_nIndex / spritePerRow) * height;
rect.right = rect.left + width;
rect.bottom = rect.top + height;
LPD3DXSPRITE spriteHandler = CGraphic::GetInstance()->GetSpriteHandler();
if (!spriteHandler)
{
OutputDebugString("[sprite.cpp] Can not get sprite handler to draw.");
return;
}
LPDIRECT3DTEXTURE9 texture = m_pTexture->GetTexture();
if (!texture)
{
OutputDebugString("[sprite.cpp] Can not get texture to draw.");
return;
}
m_pTexture->DrawRectFlipX(pos, rect, angle);
}
int CSprite::GetWidth()
{
return m_pTexture->m_nFrameWidth;
}
int CSprite::GetHeight()
{
return m_pTexture->m_nFrameHeight;
}
<file_sep>/Game_Rockman/Game_Rockman/Graphic.h
#pragma once
/*+===================================================================
File: CGraphic.h
Summary: Định nghĩa lớp CGraphic .
===================================================================+*/
#include <d3dx9.h>
#include "Global.h"
/*C+==================================================================
Class: CGraphic
Summary: Lớp CGraphic dùng để tạo ra các đối tượng DirectX sử dụng trong
việc vẽ đồ họa trong game.
==================================================================-C*/
class CGraphic
{
private:
LPDIRECT3D9 m_d3d; /* Con trỏ đối tượng DirectX */
LPDIRECT3DDEVICE9 m_d3ddv; /* Con trỏ DirectX Device */
LPD3DXSPRITE m_lpSpriteHandler; /**/
LPD3DXFONT m_pFontHandle; /* Con trỏ dùng trong việc vẽ chữ */
static CGraphic* s_instance; /* Con trỏ đến thực thể cửa sổ duy nhất của lớp */
public:
/*M+==================================================================
Method: CGraphic::Init
Summary: Phương thức tạo ra CGraphic.
==================================================================-M*/
int Init(HWND hWnd);
/*M+==================================================================
Method: CGraphic::BeginDraw
Summary: Phương thức khi bắt đầu việc vẽ hình.
==================================================================-M*/
int BeginDraw();
/*M+==================================================================
Method: CGraphic::EndDraw
Summary: Phương thức khi kế thúc việc vẽ hình.
==================================================================-M*/
void EndDraw();
/*M+==================================================================
Method: CGraphic::GetSpriteHandler
Summary: Phương thức lấy ra LPD3DXSPRITE.
==================================================================-M*/
LPD3DXSPRITE GetSpriteHandler();
/*M+==================================================================
Method: CGraphic::GetInstance
Summary: Phương thức lấy thực thể duy nhất của lớp.
==================================================================-M*/
static CGraphic* GetInstance();
/*M+==================================================================
Method: CGraphic::GetDevice
Summary: Phương thức lấy con trỏ Direct X device để truyền cho các phương
thức ở các lớp khác
==================================================================-M*/
LPDIRECT3DDEVICE9 GetDevice();
/*M+==================================================================
Method: CGraphic::GetFontHandle
Summary: Phương thức lấy con trỏ font của DirectX
==================================================================-M*/
LPD3DXFONT* CGraphic::GetFontHandle()
{
return &m_pFontHandle;
}
/*M+==================================================================
Method: CGraphic::InitPixelShader
Summary: Phương thức khởi tạo pixel shader để tạo hiệu ứng chớp
của sao và nước
Params: hlslFile - File chứa các thông tin để dò màu và chuyển màu
==================================================================-M*/
bool InitPixelShader(char* hlslFile);
//Biến lưu trữ PixelShader
static IDirect3DPixelShader9* texPS;
~CGraphic();
};
<file_sep>/Game_Rockman/Game_Rockman/GameInfo.h
#pragma once
#include "Global.h"
extern int g_nScore;
extern int g_nLives;
extern int g_nCurentLvl;
extern int g_nCurrentBullet;
#define MAX_SCORE_LEVEL_1 20000
#define MAX_SCORE_LEVEL_2 30000
#define MAX_SCORE_LEVEL_3 40000
<file_sep>/Game_Rockman/Game_Rockman/WinMain.cpp
#include <windows.h>
#include "Game.h"
int WINAPI WinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance, LPSTR lpCmdLine, int nCmdShow)
{
CGame* pGame;
// Khởi tạo game.
pGame = new CGame();
pGame->Init(hInstance);
// Chạy game.
pGame->Run();
return 0;
}
<file_sep>/Game_Rockman/Game_Rockman/ResourcesManager.h
/*+========================================================
File: ResourcesManager.h
Sumary: Chứa các khai báo của các lớp quản lý resource
trong game
========================================================+*/
#pragma once
#include "Texture.h"
#include "Sprite.h"
//#include "DSUtil.h"
#include <map>
#include <fstream>
using namespace std;
/*C+==================================================================
Class: CResourcesManager
Summary: Lớp CResourcesManager dùng để quản lý tài nguyên chung
==================================================================-C*/
class CResourcesManager
{
protected:
/* File text chứa đường dẫn của tất cả các file texture */
char* m_szMetaTextureFile;
/* Hash Map chứa thông tin của các texture */
map<int, CTexture*> m_aTexture;
//File chứa dữ liệu về sprite
char* m_szSpriteFileData;
//Danh sách các sprite đã được load
map<int, CSprite*> m_aSprite;
//File chứa dữ liệu âm thanh
char* m_szMetaSoundFile;
////Danh sách các âm thanh đã được load
//map<int, CSound*> m_aSounds;
////Đối tượng quản lý âm thanh
//CSoundManager* m_pSoundManager;
//Biến instance của resource manager
static CResourcesManager* s_ResManagerInstance;
/*M+==================================================================
Method: CResourcesManager::CResourcesManager
Summary: Phương thức khởi tạo
Param(s): metaTextFile - Đường dẫn đến file text chứa tất cả đường dẫn
của các file texture
==================================================================-M*/
CResourcesManager();
public:
/*M+==================================================================
Method: CResourcesManager::~CResourcesManager
Summary: Phương thức hủy
==================================================================-M*/
virtual ~CResourcesManager();
/*M+==================================================================
Method: CResourcesManager::GetTexture
Summary: Phương thức lấy về Texture tương ứng với Id truyền vào
Param(s): textureId - ID của texture muốn get về
==================================================================-M*/
virtual CTexture* GetTexture(int textureId);
/*M+==================================================================
Method: CResourcesManager::GetSprite
Summary: Phương thức lấy về Sprite tương ứng với Id truyền vào
Param(s): spriteId - ID của sprite muốn get về
==================================================================-M*/
virtual CSprite* GetSprite(int spriteId);
/*M+==================================================================
Method: CResourcesManager::GetSound
Summary: Phương thức lấy về Sound tương ứng với Id truyền vào
Param(s): soundId - ID của sound muốn get về
==================================================================-M*/
//virtual CSound* GetSound(int soundId);
/*M+==================================================================
Method: CResourcesManager::LoadResources
Summary: Phương thức lấy về gọi tất cả các hàm lấy resource để tiết kiệm xử lý
==================================================================-M*/
void LoadResources();
/*M+==================================================================
Method: CResourcesManager::GetSprite
Summary: Phương thức lấy về Instance của lớp Resource manager
==================================================================-M*/
static CResourcesManager* GetInstance();
};
<file_sep>/Game_Rockman/Game_Rockman/BaseGameState.cpp
/*+===================================================================
File: BaseGameState.cpp
Summary: Định nghĩa các phương thức của CBaseGameState.
===================================================================+*/
#include "BaseGameState.h"
CBaseGameState::CBaseGameState()
{
m_pNextState = NULL;
}
CBaseGameState::~CBaseGameState()
{
if (m_pNextState)
delete m_pNextState;
}
void CBaseGameState::Run(float deltaTime)
{
// Update thế giới.
Update(deltaTime);
// Vẽ thế giới.
if (CGraphic::GetInstance()->BeginDraw())
{
CInput::GetInstance()->ProcessKeyBroad();
Render();
CGraphic::GetInstance()->EndDraw();
}
}<file_sep>/Game_Rockman/Game_Rockman/GameInfo.cpp
#include "GameInfo.h"
int g_nScore = 0;
int g_nLives = 2;
int g_nCurentLvl = ID_LEVEL1;
int g_nCurrentBullet = ID_R_BULLET;<file_sep>/Game_Rockman/Game_Rockman/Global.h
/*+===================================================================
File: Global.h
Summary: File chứa các hằng số và các biến môi trường
===================================================================+*/
#pragma once
#include <stdio.h>
#include <stdarg.h>
#include <windows.h>
#include <cmath>
#define GAME_NAME "Game_Rockman"
#define SCREEN_WIDTH 512
#define SCREEN_HEIGHT 400
#define GAME_FPS 60
#define GRAVITY -600.0f
#define PI 3.14159265f
#pragma region Các hằng số ID kiểu đối tượng game
#define ID_GRASS_CAN_FALL 0
#define ID_GRASS 2
#define ID_WATER 1
#define ID_SBOX 3
#define ID_ROCKMAN 10
#define ID_TILE 11
#define ID_RUNNING_MAN 12
#define ID_ITEM_BLOCK 13
#define ID_ITEM_FLYING 14
#define ID_ENEMY_SNIPER 15
#define ID_ENEMY_SHELTER 16
#define ID_BRIDGE 17
#define ID_GUN_BLOCK 18
#define ID_GUN_MACHINE 19
#define ID_BOSS_ONE 20
#define ID_BOSS_ONE_ENEMY 21
#define ID_BOSS_ONE_GUN_LEFT 22
#define ID_BOSS_ONE_GUN_RIGHT 23
#define ID_BOSS_ONE_HEART 24
#define ID_STONE_FLYING 25
#define ID_STONE_FALLING_POINT 26
#define ID_STONE_FALLING 27
#define ID_FIRE 28
#define ID_ENEMY_DIVER 29
#define ID_ENEMY_MACHINE 30
#define ID_GRENADE 31
#define ID_TANK 32
#define ID_BOSS_THREE 33
#define ID_BOSS_THREE_CHILD 34
#define ID_BOSS_THREE_BULLET 35
#define ID_BOSS_TWO 36
#define ID_BOSS_TWO_BULLET 37
#define ID_BOSS_TWO_ARM_HEAD 38
#define ID_BOSS_TWO_ARM_BODY 39
#define ID_BOSS_THREE_DOOR 40
#define ID_BOSS_TWO_LEFT_HAND 41
#define ID_BOSS_TWO_RIGHT_HAND 42
#define ID_R_BULLET 200
#define ID_M_BULLET 201
#define ID_S_BULLET 202
#define ID_F_BULLET 203
#define ID_L_BULLET 204
#define ID_ENEMY_SMALL_WHITE_BULLET 205
#define ID_ENEMY_SMALL_RED_BULLET 206
#define ID_ENEMY_BIG_BULLET 207
#define ID_BOSS_ONE_BULLET 208
#define ID_ENEMY_DIVER_BULLET 209
#define ID_M_ITEM 300
#define ID_B_ITEM 301
#define ID_F_ITEM 302
#define ID_S_ITEM 303
#define ID_L_ITEM 304
#define ID_R_ITEM 305
#define ID_EAGLE_ITEM 306
#define ID_SMALL_EXPLOSION 400
#define ID_BIG_EXPLOSION 401
#define ID_BULLET_EXPLOSION 402
#define ID_ENEMY_EXPLOSION 403
#define ID_ENEMY_FALL_IN_WATER 404
#pragma endregion
#pragma region Các hằng số ID của sprite
#define ID_SPRITE_ROCKMAN_STAND 0
#define ID_SPRITE_ROCKMAN_STAND_FIRE 1
#define ID_SPRITE_ROCKMAN_PREPARE_RUN 2
#define ID_SPRITE_ROCKMAN_RUN 3
#define ID_SPRITE_ROCKMAN_RUN_FIRE 4
#define ID_SPRITE_ROCKMAN_JUMP 5
#define ID_SPRITE_ROCKMAN_JUMP_FIRE 6
#define ID_SPRITE_ROCKMAN_STAIR 7
#define ID_SPRITE_ROCKMAN_STAIR_FIRE 8
#define ID_SPRITE_ROCKMAN_STAIR_END 9
#define ID_SPRITE_ROCKMAN_START 10
#define ID_SPRITE_BILL_SWIM_HIDE 11
#define ID_SPRITE_BILL_SWIM_STAND 12
#define ID_SPRITE_BILL_SWIM_FIRE 13
#define ID_SPRITE_BILL_SWIM_LOOK_TOP 14
#define ID_SPRITE_BILL_SWIM_LOOK_ABOVE 15
#define ID_SPRITE_BILL_SWIM_GRASS_UP sz 16
#define ID_SPRITE_BILL_DYING 17
#define ID_SPRITE_BILL_DIE 18
#define ID_SPRITE_R_BULLET 19
#define ID_SPRITE_S1_BULLET 20
#define ID_SPRITE_S2_BULLET 21
#define ID_SPRITE_S3_BULLET 22
#define ID_SPRITE_L_BULLET 23
#define ID_SPRITE_F_BULLET 24
#define ID_SPRITE_ENEMY_SMALL_WHITE_BULLET 25
#define ID_SPRITE_BULLET_EXPLOSION 26
#define ID_SPRITE_FLY_ITEM 27
#define ID_SPRITE_M_ITEM 28
#define ID_SPRITE_B_ITEM 29
#define ID_SPRITE_F_ITEM 30
#define ID_SPRITE_S_ITEM 31
#define ID_SPRITE_L_ITEM 32
#define ID_SPRITE_R_ITEM 33
#define ID_SPRITE_SMALL_EXPLOSION 34
#define ID_SPRITE_BIG_EXPLOSION 35
#define ID_SPRITE_ITEM_BLOCK_CLOSE 36
#define ID_SPRITE_ITEM_BLOCK_CHANGING 37
#define ID_SPRITE_ITEM_BLOCK_OPEN 38
#define ID_SPRITE_ENEMY_SNIPER_STAND 39
#define ID_SPRITE_ENEMY_SNIPER_STAND_FIRE 40
#define ID_SPRITE_ENEMY_SNIPER_TOP 41
#define ID_SPRITE_ENEMY_SNIPER_TOP_FIRE 42
#define ID_SPRITE_ENEMY_SNIPER_BOT 43
#define ID_SPRITE_ENEMY_SNIPER_BOT_FIRE 44
#define ID_SPRITE_ENEMY_EXPLOSION 45
#define ID_SPRITE_ENEMY_SHELTER_HIDE 46
#define ID_SPRITE_ENEMY_SHELTER_RISE 47
#define ID_SPRITE_ENEMY_SHELTER_SIT 48
#define ID_SPRITE_ENEMY_SHELTER_STAND 49
#define ID_SPRITE_ENEMY_SHELTER_STAND_FIRE 50
#define ID_SPRITE_ENEMY_SHELTER_BOT 51
#define ID_SPRITE_ENEMY_SHELTER_BOT_FIRE 52
#define ID_SPRITE_ENEMY_SHELTER_DIE 53
#define ID_SPRITE_GUN_BLOCK_GUN 54
#define ID_SPRITE_GUN_BLOCK_BASE_CLOSE 55
#define ID_SPRITE_GUN_BLOCK_BASE_CHANGING 56
#define ID_SPRITE_GUN_BLOCK_BASE_OPEN 57
#define ID_SPRITE_GUN_MACHINE_CHANGE_QUATER 58
#define ID_SPRITE_GUN_MACHINE_CHANGE_HAFT 59
#define ID_SPRITE_GUN_MACHINE_LEFT 60
#define ID_SPRITE_GUN_MACHINE_TOP 61
#define ID_SPRITE_GUN_MACHINE_UP 62
#define ID_SPRITE_BOSS_ONE_HEART 63
#define ID_SPRITE_BOSS_ONE_GUN_LEFT_FIRE 64
#define ID_SPRITE_BOSS_ONE_GUN_LEFT_REFIRE 65
#define ID_SPRITE_BOSS_ONE_GUN_RIGHT_FIRE 66
#define ID_SPRITE_BOSS_ONE_GUN_RIGHT_REFIRE 67
#define ID_SPRITE_BRIDGE1 68
#define ID_SPRITE_BRIDGE2 69
#define ID_SPRITE_BRIDGE3 70
#define ID_SPRITE_BRIDGE4 71
#define ID_SPRITE_BRIDGE5 72
#define ID_SPRITE_ENEMY_RUN 73
#define ID_SPRITE_ENEMY_JUMP 74
#define ID_SPRITE_ENEMY_DIE 75
#define ID_SPRITE_STONE_FLYING 76
#define ID_SPRITE_STONE_FALLING 77
#define ID_SPRITE_FIRE 78
#define ID_SPRITE_ENEMY_DIVER_DIVE 79
#define ID_SPRITE_ENEMY_DIVER_FIRE 80
#define ID_SPRITE_ENEMY_MACHINE_STAND 81
#define ID_SPRITE_ENEMY_MACHINE_FIRE 82
#define ID_SPRITE_GRENADE 83
#define ID_SPRITE_TANK_LEFT_1 84
#define ID_SPRITE_TANK_LEFT_RUN_1 85
#define ID_SPRITE_TANK_BOT_1 86
#define ID_SPRITE_TANK_BOT_RUN_1 87
#define ID_SPRITE_TANK_DOWN_1 88
#define ID_SPRITE_TANK_DOWN_RUN_1 89
#define ID_SPRITE_TANK_LEFT_2 90
#define ID_SPRITE_TANK_LEFT_RUN_2 91
#define ID_SPRITE_TANK_BOT_2 92
#define ID_SPRITE_TANK_BOT_RUN_2 93
#define ID_SPRITE_TANK_DOWN_2 94
#define ID_SPRITE_TANK_DOWN_RUN_2 95
#define ID_SPRITE_TANK_LEFT_3 96
#define ID_SPRITE_TANK_LEFT_RUN_3 97
#define ID_SPRITE_TANK_BOT_3 98
#define ID_SPRITE_TANK_BOT_RUN_3 99
#define ID_SPRITE_TANK_DOWN_3 100
#define ID_SPRITE_TANK_DOWN_RUN_3 101
#define ID_SPRITE_TANK_LEFT_4 102
#define ID_SPRITE_TANK_LEFT_RUN_4 103
#define ID_SPRITE_TANK_BOT_4 104
#define ID_SPRITE_TANK_BOT_RUN_4 105
#define ID_SPRITE_TANK_DOWN_4 106
#define ID_SPRITE_TANK_DOWN_RUN_4 107
#define ID_SPRITE_ENEMY_SMALL_RED_BULLET 108
#define ID_SPRITE_BOSS_THREE_APPEARING 110
#define ID_SPRITE_ENEMY_FALL_IN_WATER 109
#define ID_SPRITE_BOSS_THREE_APPEARING 110
#define ID_SPRITE_BOSS_THREE_APPEARED 111
#define ID_SPRITE_BOSS_THREE_OPENING 112
#define ID_SPRITE_BOSS_THREE_OPENED 113
#define ID_SPRITE_BOSS_THREE_CLOSING 114
#define ID_SPRITE_BOSS_THREE_CHILD 115
#define ID_SPRITE_BOSS_TWO_BULLET 116
#define ID_SPRITE_BOSS_TWO_ARM_HEAD 117
#define ID_SPRITE_BOSS_TWO_ARM_BODY 118
#define ID_SPRITE_BOSS_THREE_DOOR 119
#define ID_SPRITE_BOSS_ONE_DIE 120
#define ID_SPRITE_EAGLE_ITEM 121
#define ID_SPRITE_BOSS_THREE_DIE 122
#define ID_SPRITE_BOSS_TWO_MOUTH_CLOSING 123
#define ID_SPRITE_BOSS_TWO_MOUTH_OPENING 124
#define ID_SPRITE_BOSS_TWO_MOUTH_OPENED 125
#define ID_SPRITE_BOSS_TWO_HIDE 126
#define ID_SPRITE_BOSS_TWO_APPEAR 127
#define ID_SPRITE_BOSS_TWO_DIE 128
// Debug box sprite.
#define ID_TEXTURE_BILL_BOX 900
#define ID_TEXTURE_WATER_BOX 901
#define ID_TEXTURE_GROUND_BOX 902
#pragma endregion
#pragma region Các hằng số ID của texture
#define ID_TEXTURE_BILL 0
////Định nghĩa ID của hình chứa tile các map
#define ID_TEXTURE_MAP1_TILES 101
#define ID_TEXTURE_MAP2_TILES 102
#define ID_TEXTURE_MAP3_TILES 103
#define ID_TEXTURE_ICON_ROCK_MAN 6969
#define ID_TEXTURE_ICON_ROCK_MAN_GRAY 6970
#define ID_TEXTURE_BACKGROUND_2 6971
#define ID_TEXTURE_BACKGROUND_3 6972
#define ID_TEXTURE_BACKGROUND_4 6973
#define ID_TEXTURE_GAME_TITLE 6974
#pragma endregion
#pragma region Các hằng số ID của sound
#define ID_SOUND_OP_THEME 0
#define ID_SOUND_LEVEL1_THEME 1
#define ID_SOUND_LEVEL2_THEME 2
#define ID_SOUND_LEVEL3_THEME 3
#define ID_SOUND_END_THEME 4
#define ID_SOUND_CONTRA_FIRE 5
#define ID_SOUND_CONTRA_DEAD 6
#define ID_SOUND_FALL 7
#define ID_SOUND_GUN_UP 8
#define ID_SOUND_BOSS_DEAD 9
#define ID_SOUND_GAME_OVER_THEME 10
#pragma endregion
#pragma region Các hằng số ID của Level
#define ID_LEVEL1 1
#define ID_LEVEL2 2
#define ID_LEVEL3 3
#define ID_END_GAME 4
#pragma endregion
#pragma region Các hằng chuỗi đường dẫn của file map
#define MAP1_PATH "Resources\\Data\\Map1.txt"
#define MAP2_PATH "Resources\\Data\\Map2.txt"
#define MAP3_PATH "Resources\\Data\\Map3.txt"
#pragma endregion
#pragma region Các hằng chuỗi đường dẫn của file resource
#define TEXTURE_PATH "Resources\\Data\\PlayTextureData.txt"
#define SPRITE_PATH "Resources\\Data\\PlaySpriteData.txt"
#define SOUND_PATH "Resources\\Data\\PlaySoundData.txt"
#pragma endregion
#pragma region Các hằng chuỗi đường dẫn của file HLSL
#define HLSL1_PATH "Resources\\Data\\Map1HLSL.fx"
#define HLSL2_PATH "Resources\\Data\\Map2HLSL.fx"
#define HLSL3_PATH "Resources\\Data\\Map3HLSL.fx"
#define HLSL_EAGLE_PATH "Resources\\Data\\ItemEagleHLSL.fx"
#pragma endregion<file_sep>/Game_Rockman/Game_Rockman/Timer.h
#pragma once
#include <Windows.h>
#include "Global.h"
/*C+==================================================================
Class: CTimer
Summary: Lớp CTimer gồm các thuộc tính và phương thức tính toán
và xử lý thời gian trong vòng lặp game cũng như trong
các hàm Update của các đối tượng game
==================================================================-C*/
class CTimer
{
public:
//Thời điểm bắt đầu đếm
__int64 m_nTimeStart;
//Thời điểm kết thúc đếm
__int64 m_nTimeEnd;
static CTimer* GetInstance();
/*M+==================================================================
Method: CTimer::~CTimer
Summary: Phương thức hủy
==================================================================-M*/
~CTimer();
/*M+==================================================================
Method: CTimer::GetSecondPerTick
Summary: Phương thức lấy về khoảng thời gian (tính theo giây)
giữa 2 tick liên tiếp
==================================================================-M*/
void GetSecondPerTick();
/*M+==================================================================
Method: CTimer::StartCount
Summary: Phương thức bắt đầu đếm
==================================================================-M*/
void StartCount();
/*M+==================================================================
Method: CTimer::EndCount
Summary: Phương thức kết thúc đếm
==================================================================-M*/
void EndCount();
/*M+==================================================================
Method: CTimer::SetMaxFps
Summary: Phương thức gán thời gian frame
Param: maxFps - Thời gian frame
==================================================================-M*/
void SetMaxFps(float maxFps);
/*M+==================================================================
Method: CTimer::GetTime
Summary: Phương thức trả về khoảng thời gian giữa 2 frame liên tiếp
==================================================================-M*/
float GetTime();
private:
/*M+==================================================================
Method: CTimer::CTimer
Summary: Phương thức khởi tạo
==================================================================-M*/
CTimer();
//Số tick trên một giây
__int64 m_nCountPerSec;
//Tỉ lệ giữa tick và giây (số giây giữa 2 tick liên tiếp)
float m_fTimeScale;
//Số Frame trên một giây tối đa
float m_fLockFps;
//Thời gian delta time tính theo giây giữa 2 frame liên tiếp
float m_fDeltaTime;
//Thời gian delta time trong một giây
int m_nFrameRate;
static CTimer* s_TimerInstance;
};
<file_sep>/Game_Rockman/Game_Rockman/ViewPort.cpp
#include "Viewport.h"
CViewport* CViewport::s_instance = NULL;
CViewport::CViewport() {
D3DXMatrixIdentity(&m_MatrixTransform);
m_MatrixTransform._22 = -1;
this->SetPosition(D3DXVECTOR2(0, (float)SCREEN_HEIGHT));
}
CViewport::~CViewport() {
}
D3DXVECTOR2 CViewport::TransformMatrix(D3DXVECTOR2 pos) {
D3DXVECTOR4 result;
D3DXVECTOR2 x = m_ViewportPos;
D3DXVec3Transform(&result, &D3DXVECTOR3(pos.x, pos.y, 0), &m_MatrixTransform);
return D3DXVECTOR2(static_cast<int>(result.x), static_cast<int>(result.y));
}
void CViewport::SetPosition(D3DXVECTOR2 pos, bool isFollowBill) {
if (isFollowBill)
{
// Cập nhật vị trí viewport theo bill.
m_ViewportPos.x = pos.x - SCREEN_WIDTH / 2.0f;
m_ViewportPos.y = pos.y + SCREEN_HEIGHT / 2.0f;
}
else
{
m_ViewportPos.x = pos.x;
m_ViewportPos.y = pos.y;
}
// Xác định lại vị trí viewport nếu ra khỏi biên.
m_ViewportPos.x = m_ViewportPos.x >= m_limitX1 ? m_ViewportPos.x : m_limitX1;
m_ViewportPos.x = m_ViewportPos.x >= m_limitX2 ? m_limitX2 : m_ViewportPos.x;
m_ViewportPos.y = m_ViewportPos.y >= m_limitY1 ? m_ViewportPos.y : m_limitY1;
m_ViewportPos.y = m_ViewportPos.y >= m_limitY2 ? m_limitY2 : m_ViewportPos.y;
// Cập nhật lại biên trái và dưới, để bill ko thể đi lùi.
m_limitX1 = m_ViewportPos.x;
m_limitY1 = m_ViewportPos.y;
// Xác định lại ma trận transform.
m_MatrixTransform._41 = -m_ViewportPos.x;
m_MatrixTransform._42 = m_ViewportPos.y;
}
D3DXVECTOR2 CViewport::GetPosition()
{
return m_ViewportPos;
}
void CViewport::SetLimitPos(int mapWidth, int mapHeight)
{
if (g_nCurentLvl == ID_LEVEL2)
m_limitX1 = 64;
else
m_limitX1 = 0;
m_limitX2 = mapWidth - SCREEN_WIDTH;
m_limitY1 = SCREEN_HEIGHT;
m_limitY2 = mapHeight >= SCREEN_HEIGHT ? mapHeight : SCREEN_HEIGHT;
}
CViewport* CViewport::GetInstance()
{
if (s_instance == NULL)
{
s_instance = new CViewport();
}
return s_instance;
}
//void CViewport::SetToPos(D3DXVECTOR2 pos, float timeFrame)
//{
// D3DXVECTOR2 posA(this->GetPosition().x, this->GetPosition().y);
// switch (g_nCurentLvl)
// {
// case ID_LEVEL1:
// case ID_LEVEL3:
// if (posA.x < pos.x)
// posA.x += VIEWPORT_VX * timeFrame;
// else
// posA.x = pos.x;
// this->SetPosition(posA);
// break;
// case ID_LEVEL2:
// posA.y += VIEWPORT_VY * timeFrame;
// if (posA.y > pos.y)
// posA.y = pos.y;
// this->SetPosition(posA);
// break;
// }
//}<file_sep>/Game_Rockman/Game_Rockman/GameWindow.h
#pragma once
#include <Windows.h>
#include "Global.h"
class CGameWindow
{
private:
HINSTANCE m_hInstance; /* Định danh của ứng dụng */
HWND m_hWnd; /* Định danh của cửa sổ */
static CGameWindow* s_instance; /* Con trỏ đến thực thể cửa sổ duy nhất của lớp */
public:
CGameWindow();
~CGameWindow();
/*M+==================================================================
Method: CGameWindow::Init
Summary: Phương thức tạo ra CGameWindow.
==================================================================-M*/
int Init(HINSTANCE hInstance);
/*M+==================================================================
Method: CGameWindow::WinProc
Summary: Phương thức xử lý thông điệp cửa sổ.
==================================================================-M*/
static LRESULT CALLBACK WinProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam);
/*M+==================================================================
Method: CGameWindow::GetHInstance
Summary: Phương thức lấy đối tượng Handle Instance.
==================================================================-M*/
HINSTANCE GetHInstance();
/*M+==================================================================
Method: CGameWindow::GetHWND
Summary: Phương thức lấy đối tượng HWND.
==================================================================-M*/
HWND GetHWND();
/*M+==================================================================
Method: CGameWindow::GetInstance
Summary: Phương thức lấy thực thể duy nhất của lớp.
==================================================================-M*/
static CGameWindow* GetInstance();
};
<file_sep>/Game_Rockman/Game_Rockman/Texture.h
/*+========================================================
File: Texture.h
Sumary: Định nghĩa lớp Texture, quản lý các texture, thực hiện vẽ
========================================================+*/
#pragma once
#include <windows.h>
#include <d3d9.h>
#include <d3dx9.h>
#include <stdio.h>
#include "Graphic.h"
/*C+==================================================================
Class: CTexture
Summary: Lớp CTexure dùng để tạo và sử dụng các Texture
==================================================================-C*/
class CTexture
{
public:
/*M+==================================================================
Method: CTexture::CTexture
Summary: Phương thức khởi tạo.
Params: fileName - đường dẫn đến file hình để load lên texture
cols - số hàng của texture
rows - số cột của texture
count - số lượng frame của texture
==================================================================-M*/
CTexture(char* fileName, int cols = 1, int rows = 1, int count = 1);
/*M+==================================================================
Method: CTexture::CTexture
Summary: Phương thức hủy.
==================================================================-M*/
~CTexture();
/*M+==================================================================
Method: CTexture::Draw
Summary: Phương thức vẽ toàn bộ texture tại vị trí x, y
Params: pos - Vị trí vẽ
isCenter - Vẽ từ vị trí center hay không
==================================================================-M*/
void Draw(D3DXVECTOR2 pos, bool isCenter = true);
/*M+==================================================================
Method: CTexture::DrawRect
Summary: Phương thức vẽ một phần RECT của texture tại vị trí x, y
Params: pos - Vị trí vẽ
srcRect - Khu vực nguồn muốn vẽ
isCenter - Có vẽ từ vị trí trung tâm hay không
angle - Góc quay (tính theo radian)
==================================================================-M*/
void DrawRect(D3DXVECTOR2 pos, RECT srcRect, float angle = 0.0f, bool isCenter = true);
void DrawRectFlip(D3DXVECTOR2 pos, RECT srcRect, float scaleX = 1.0f, float scaleY = 1.0f, float angle = 0.0f, bool isCenter = true);
/*M+==================================================================
Method: CTexture::DrawRectFlipX
Summary: Phương thức vẽ một phần RECT của texture tại vị trí x, y
và lật theo chiều X
Params: pos - Vị trí vẽ
srcRect - Khu vực nguồn muốn vẽ
isCenter - Có vẽ từ vị trí trung tâm hay không
angle - Góc quay (tính theo radian)
==================================================================-M*/
void DrawRectFlipX(D3DXVECTOR2 pos, RECT srcRect, float angle = 0.0f, bool isCenter = true);
/*M+==================================================================
Method: CTexture::GetTexture
Summary: Lấy về con trỏ của texture hiện tại
==================================================================-M*/
LPDIRECT3DTEXTURE9 GetTexture();
//Số cột của texture
int m_nCols;
//Số hàng của texture
int m_nRows;
//Số lượng frame của texture
int m_nCount;
//Kích thước chiều rộng của texture
int m_nFrameWidth;
//Kích thước chiều cao của texture
int m_nFrameHeight;
private:
//Đường dẫn đến file hình
char* m_szFileName;
//Kích thước của hình
RECT m_rectSize;
//Con trỏ DirectX Texture chứa texture hiện tại
LPDIRECT3DTEXTURE9 m_lpTexture;
/*M+==================================================================
Method: CTexture::load
Summary: Phương thức load hình từ file hình vào texture
Notes: Phương thức private
==================================================================-M*/
void load();
};<file_sep>/Game_Rockman/Game_Rockman/GameObject.cpp
#include "GameObject.h"
CGameObject::CGameObject()
{
}
CGameObject::CGameObject(D3DXVECTOR2 pos, int id, int typeID, int val1, int val2)
{
}
<file_sep>/Game_Rockman/Game_Rockman/GameWindow.cpp
#include "GameWindow.h"
CGameWindow::CGameWindow()
{
}
CGameWindow::~CGameWindow()
{
}
CGameWindow* CGameWindow::s_instance = NULL;
int CGameWindow::Init(HINSTANCE hInstance)
{
m_hInstance = hInstance;
// Tạo thuộc tính cho lớp cửa sổ.
WNDCLASSEX wc;
wc.cbSize = sizeof(WNDCLASSEX);
wc.style = CS_HREDRAW | CS_VREDRAW;
wc.hInstance = m_hInstance;
wc.lpfnWndProc = (WNDPROC)CGameWindow::WinProc;
wc.cbClsExtra = 0;
wc.cbWndExtra = 0;
wc.hIcon = NULL;
wc.hCursor = LoadCursor(NULL, IDC_ARROW);
wc.hbrBackground = (HBRUSH)GetStockObject(WHITE_BRUSH);
wc.lpszMenuName = NULL;
wc.lpszClassName = GAME_NAME; // Cần để là 1 hằng lưu lại tên. nếu bị lỗi LPCWSTR vào properties của project set character set -> multi-byte
wc.hIconSm = NULL;
// Đăng ký lớp cửa sổ.
RegisterClassEx(&wc);
RECT clientRect = { 0, 0, SCREEN_WIDTH, SCREEN_HEIGHT };
AdjustWindowRect(&clientRect, WS_OVERLAPPEDWINDOW, FALSE);
// Tạo cửa sổ.
m_hWnd = CreateWindow(
GAME_NAME,
GAME_NAME,
WS_OVERLAPPEDWINDOW,
CW_USEDEFAULT,
CW_USEDEFAULT,
clientRect.right - clientRect.left,
clientRect.bottom - clientRect.top,
NULL,
NULL,
m_hInstance,
NULL);
if (!m_hWnd)
{
//int error = GetLastError();
//trace(L"[ERORR] Failed to create window!!! Error code: %d.", error);
//MessageBox(NULL, "Cannot create game window", "Error", MB_OK);
return 0;
}
ShowWindow(m_hWnd, SW_SHOWNORMAL);
UpdateWindow(m_hWnd);
return 1;
}
LRESULT CALLBACK CGameWindow::WinProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam)
{
switch (message)
{
case WM_DESTROY:
PostQuitMessage(0);
break;
default:
return DefWindowProc(hWnd, message, wParam, lParam);
}
return 0;
}
HINSTANCE CGameWindow::GetHInstance()
{
return this->m_hInstance;
}
HWND CGameWindow::GetHWND()
{
return m_hWnd;
}
CGameWindow* CGameWindow::GetInstance()
{
if (s_instance == NULL)
s_instance = new CGameWindow();
return s_instance;
}
<file_sep>/Game_Rockman/Game_Rockman/GameObject.h
#pragma once
/*+========================================================
File: GameObject.h
Sumary: Định nghĩa lớp GameObject tổng quát
========================================================+*/
#include "ResourcesManager.h"
class CGameObject
{
private:
public:
CGameObject();
/*M+==================================================================
Method: CGameObject::CGameObject
Summary: Phương thức khởi tạo có tham số
Params: pos - Vị trí của Object
id - ID của object
typeID - ID của loại object
==================================================================-M*/
CGameObject(D3DXVECTOR2 pos, int id, int typeID, int val1 = 0, int val2 = 0);
};<file_sep>/Game_Rockman/Game_Rockman/ViewPort.h
/*+========================================================
File: Viewport.h
Sumary: Định nghĩa lớp Viewport
========================================================+*/
#pragma once
#include <d3dx9.h>
#include "Global.h"
#include "GameInfo.h"
/*C+==================================================================
Class: CViewport
Summary: Lớp CViewport định nghĩa Camera để vẽ theo tọa độ của
thế giới thật
==================================================================-C*/
#define VIEWPORT_VX 100
#define VIEWPORT_VY 100
class CViewport
{
public:
/*M+==================================================================
Method: CViewport::CViewport
Summary: Phương thức khởi tạo
==================================================================-M*/
CViewport();
/*M+==================================================================
Method: CViewport::~CViewport
Summary: Phương thức hủy
==================================================================-M*/
~CViewport();
/*M+==================================================================
Method: CViewport::TransformMatrix
Summary: Phương thức chuyển tọa độ của direct X sang tọa độ của
thế giới thực
==================================================================-M*/
D3DXVECTOR2 TransformMatrix(D3DXVECTOR2 pos);
/*M+==================================================================
Method: CViewport::SetPosition
Summary: Phương thức chuyển tọa độ của direct X sang tọa độ của
thế giới thực
==================================================================-M*/
void SetPosition(D3DXVECTOR2 pos, bool isFollowBill = true);
/*M+==================================================================
Method: CViewport::SetPosition
Summary: Phương thức lấy ra vị trí viewport.
==================================================================-M*/
D3DXVECTOR2 GetPosition();
/*M+==================================================================
Method: CViewport::SetLimitPos
Summary: Phương thức thiết lập biên trái, phải, trên, dưới của
viewport theo map.
==================================================================-M*/
void SetLimitPos(int mapWidth, int mapHeight);
static CViewport* GetInstance();
private:
//Ma trận Transform
D3DXMATRIX m_MatrixTransform;
//Vị trí của view port
D3DXVECTOR2 m_ViewportPos;
float m_limitX1; // Giới hạn bên trái.
float m_limitX2; // Giới hạn bên phải.
float m_limitY1; // Giới hạn ở dưới.
float m_limitY2; // Giới ở trên.
static CViewport* s_instance;
};<file_sep>/Game_Rockman/Game_Rockman/Graphic.cpp
/*+===================================================================
File: Graphic.cpp
Summary: Định nghĩa các phương thức của CGraphic.
===================================================================+*/
#include "Graphic.h"
CGraphic* CGraphic::s_instance = NULL;
IDirect3DPixelShader9* CGraphic::texPS = NULL;
int CGraphic::Init(HWND hWnd)
{
// Tạo ra đối tượng DirectX.
m_d3d = Direct3DCreate9(D3D_SDK_VERSION);
D3DPRESENT_PARAMETERS d3dpp;
ZeroMemory(&d3dpp, sizeof(d3dpp));
// Khởi tạo các thuộc tính cho d3dpp.
d3dpp.Windowed = TRUE;
d3dpp.SwapEffect = D3DSWAPEFFECT_DISCARD;
d3dpp.BackBufferFormat = D3DFMT_UNKNOWN;
d3dpp.BackBufferCount = 1;
d3dpp.BackBufferWidth = SCREEN_WIDTH;
d3dpp.BackBufferHeight = SCREEN_HEIGHT;
// Tạo ra DirectX Device.
m_d3d->CreateDevice(
D3DADAPTER_DEFAULT,
D3DDEVTYPE_HAL,
hWnd,
D3DCREATE_SOFTWARE_VERTEXPROCESSING,
&d3dpp,
&m_d3ddv);
if (!m_d3ddv)
{
/*MessageBox(NULL, L"Failed to create directX device", L"Error", MB_OK);*/
return 0;
}
D3DXCreateSprite(m_d3ddv, &m_lpSpriteHandler);
//
// Khởi tạo font handle
//
D3DXCreateFont(m_d3ddv, 18, 0,
FW_BOLD, 1, FALSE, DEFAULT_CHARSET,
OUT_DEFAULT_PRECIS, CLEARTYPE_QUALITY,
DEFAULT_PITCH | FF_DONTCARE,
"Press Start", &m_pFontHandle);
return 1;
}
int CGraphic::BeginDraw()
{
if (m_d3ddv->BeginScene())
return 0;
m_d3ddv->Clear(0, NULL, D3DCLEAR_TARGET, D3DCOLOR_XRGB(0, 0, 0), 1.0f, 0);
m_lpSpriteHandler->Begin(D3DXSPRITE_ALPHABLEND);
return 1;
}
void CGraphic::EndDraw()
{
m_lpSpriteHandler->End();
m_d3ddv->EndScene();
m_d3ddv->Present(NULL, NULL, NULL, NULL);
}
LPD3DXSPRITE CGraphic::GetSpriteHandler()
{
return m_lpSpriteHandler;
}
CGraphic* CGraphic::GetInstance()
{
if (s_instance == NULL)
{
s_instance = new CGraphic();
}
return s_instance;
}
LPDIRECT3DDEVICE9 CGraphic::GetDevice()
{
return m_d3ddv;
}
bool CGraphic::InitPixelShader(char* hlslFile)
{
HRESULT hr = 0;
ID3DXBuffer* shader = 0;
ID3DXBuffer* errorBuffer = 0;
ID3DXConstantTable* TextCT = 0;
// Compile shader from a file
hr = D3DXCompileShaderFromFile(hlslFile,
0,
0,
"PSMain", // entry point function name.
"ps_2_0", // HLSL shader name.
D3DXSHADER_DEBUG,
&shader, // containing the created shader
&errorBuffer, // containing a listing of errors and warnings
&TextCT); // used to access shader constants
if (errorBuffer)
{
::MessageBox(0, (char*)errorBuffer->GetBufferPointer(), 0, 0);
errorBuffer->Release();
return false;
}
if (FAILED(hr))
{
::MessageBox(0, "D3DXCompileShaderFromFile() - FAILED", 0, 0);
return false;
}
// creates a pixel shader
hr = m_d3ddv->CreatePixelShader((DWORD*)shader->GetBufferPointer(), &texPS);
if (FAILED(hr))
{
::MessageBox(0, "CreateVertexShader - FAILED", 0, 0);
return false;
}
shader->Release();
return true;
}
CGraphic::~CGraphic()
{
if (m_pFontHandle)
delete m_pFontHandle;
if (m_lpSpriteHandler)
delete m_lpSpriteHandler;
if (m_d3ddv)
delete m_d3ddv;
if (m_d3d)
delete m_d3d;
}<file_sep>/Game_Rockman/Game_Rockman/GameStateManager.h
/*+===================================================================
File: GameStateManager.h
Summary: Định nghĩa lớp CGameStateManager .
===================================================================+*/
#pragma once
#include "BaseGameState.h"
#include "ResourcesManager.h"
#include "Timer.h"
#include <vector>
#include "MenuState.h"
/*C+==================================================================
Class: CGameStateManager
Summary: Lớp CGameStateManager là lớp singletone chứa phương thức để
chuyển đổi giữa các GameState, lưu trạng thái, get GameState hiện tại.
==================================================================-C*/
class CGameStateManager
{
private:
CBaseGameState* m_pCurrentState; /* State hiện tại của game */
static CGameStateManager* s_instance; /* Con trỏ đến thực thể cửa sổ duy nhất của lớp */
//Đối tượng quản lý tài nguyên Game
CResourcesManager* m_pResourceManager;
//Mảng chứa các State
vector<CBaseGameState*> m_aGameState;
public:
CGameStateManager();
~CGameStateManager();
/*M+==================================================================
Method: CGameStateManager::Init
Summary: Phương thức khởi tạo.
==================================================================-M*/
int Init(CBaseGameState* state);
/*M+==================================================================
Method: CGameStateManager::GetCurrentState
Summary: Phương thức lấy State hiện tại của game.
==================================================================-M*/
CBaseGameState* GetCurrentState();
/*M+==================================================================
Method: CGameStateManager::ChangeState
Summary: Phương thức lấy State hiện tại của game.
==================================================================-M*/
void ChangeState(CBaseGameState* state);
/*M+==================================================================
Method: CGameStateManager::GetInstance
Summary: Phương thức lấy thực thể duy nhất của lớp.
==================================================================-M*/
static CGameStateManager* GetInstance();
CBaseGameState* m_pNextState;
};
<file_sep>/Game_Rockman/Game_Rockman/ResourcesManager.cpp
/*+===================================================================
File: ResourcesManager.cpp
Summary: Định nghĩa các phương thức của lớp CResoucesManager
===================================================================+*/
#include "ResourcesManager.h"
#include "GameWindow.h"
#pragma region Các phương thức của lớp ResourcesManager
CResourcesManager* CResourcesManager::s_ResManagerInstance = NULL;
CResourcesManager::CResourcesManager()
{
m_szMetaTextureFile = TEXTURE_PATH;
m_szSpriteFileData = SPRITE_PATH;
m_szMetaSoundFile = SOUND_PATH;
//m_pSoundManager = new CSoundManager();
HRESULT result;
//result = m_pSoundManager->Initialize(CGameWindow::GetInstance()->GetHWND(), DSSCL_PRIORITY);
/*if (result != DS_OK)
{
OutputDebugString("[ERROR] Can not init sound(ResourcesManager.cpp - Line 78).\n");
}
result = m_pSoundManager->SetPrimaryBufferFormat(2, 22050, 16);
if (result != DS_OK)
{
OutputDebugString("[ERROR] Can not set sound buffer(ResourcesManager.cpp - Line 86).\n");
}*/
}
CResourcesManager::~CResourcesManager()
{
if (m_szMetaTextureFile)
delete m_szMetaTextureFile;
if (m_szSpriteFileData)
delete m_szSpriteFileData;
if (m_szSpriteFileData)
delete m_szSpriteFileData;
map<int, CTexture*>::iterator i;
for (i = m_aTexture.begin(); i != m_aTexture.end(); i++)
{
if (i->second != NULL)
delete i->second;
}
m_aTexture.clear();
map<int, CSprite*>::iterator j;
for (j = m_aSprite.begin(); j != m_aSprite.end(); j++)
{
if (j->second != NULL)
delete j->second;
}
m_aSprite.clear();
/*map<int, CSound*>::iterator k;
for (k = m_aSounds.begin(); k != m_aSounds.end(); k++)
{
if (k->second != NULL)
delete k->second;
}
m_aSounds.clear();*/
}
CTexture* CResourcesManager::GetTexture(int textureId){
map<int, CTexture*>::iterator iTexture;
//Lấy ra texture có texture ID giống như tham số truyền vào
iTexture = m_aTexture.find(textureId);
//Kiểm tra trong map chưa có texture này
if (iTexture == m_aTexture.end())
{
//Mở file chứa dữ liệu texture lên
ifstream metaTextureStream;
metaTextureStream.open(m_szMetaTextureFile, ios::in);
if (!metaTextureStream.is_open())
{
OutputDebugString("[resourcesmanager.cpp] Fail to open Meta Texture file.");
return NULL;
}
int ID, rows, cols, count;
//Duyệt từng hàng trong file để tìm dòng có Id giống với textureId
do
{
metaTextureStream >> ID;
if (ID == textureId)
break;
// Xuống hàng
while (metaTextureStream.get() != '\n');
} while (1);
metaTextureStream >> rows >> cols >> count;
char* fileName = new char[100];
metaTextureStream.get();
metaTextureStream.getline(fileName, 100);
//Đóng file
metaTextureStream.close();
metaTextureStream.clear();
CTexture* tmpTexture = new CTexture(fileName, cols, rows, count);
m_aTexture.insert(::pair<int, CTexture*>(textureId, tmpTexture));
iTexture = m_aTexture.find(textureId);
}
return iTexture->second;
}
CSprite* CResourcesManager::GetSprite(int spriteId)
{
CSprite* result = NULL;
map<int, CSprite*>::iterator iSprite;
// Tìm sprite có spriteID trong mảng m_aSprite
iSprite = m_aSprite.find(spriteId);
if (iSprite == m_aSprite.end()) // Nếu chưa có sprite này
{
// Mở file chứa các dữ liệu tương ứng
fstream spriteData;
spriteData.open(m_szSpriteFileData, ios::in);
if (!spriteData.is_open())
{
OutputDebugString("[resourcesmanager.cpp] Fail to open Meta Sprite file.");
return NULL;
}
// Các thông tin cần lấy ra
int ID, animationTime, startIndex, endIndex, textureID;
// Duyệt file để tìm thông tin
do
{
spriteData >> ID;
if (spriteId == ID)
break;
// Xuống hàng
while (spriteData.get() != '\n');
} while (1);
spriteData >> animationTime >> startIndex >> endIndex >> textureID;
// Đóng file
spriteData.close();
spriteData.clear();
// Tạo sprite mới
CSprite* tempSprite = new CSprite(GetTexture(textureID), startIndex, endIndex, animationTime);
// Thêm sprite đó vào danh sách
m_aSprite.insert(pair<int, CSprite*>(spriteId, tempSprite));
// Trả về sprite vừa mới tạo
return tempSprite;
}
return iSprite->second;
}
//CSound* CResourcesManager::GetSound(int soundId)
//{
// map<int, CSound*>::iterator iSound;
//
// // Tìm texture có ID sound ID trong mảng m_aSound
// iSound = m_aSounds.find(soundId);
//
// if (iSound == m_aSounds.end()) // Nếu chưa có sound này
// {
// // Mở file chứa các dữ liệu tương ứng
// fstream soundData;
// soundData.open(m_szMetaSoundFile, ios::in);
//
// if (!soundData.is_open())
// {
// output_print("[ERROR] Failed to open file sound.txt\n");
// return NULL;
// }
//
// // Các thông tin cần lấy
// int ID;
//
// do
// {
// soundData >> ID;
// if (ID == soundId)
// break;
//
// // Xuống hàng
// while (soundData.get() != '\n');
// } while (1);
//
// // Tên file âm thanh
// char fileName[100];
// soundData.get();
// soundData.getline(fileName, 100);
//
// // Đóng file
// soundData.close();
// soundData.clear();
//
//
//
// CSound* tempSound;
// HRESULT result = m_pSoundManager->Create(&tempSound, fileName);
// if (result != S_OK)
// {
// output_print("[ERROR] Failed to load sound from file %s\n", fileName);
// return NULL;
// }
// m_aSounds.insert(::pair<int, CSound*>(soundId, tempSound));
//
// // Lấy về sound vừa thêm vào
// iSound = m_aSounds.find(soundId);
// }
// return iSound->second;
//}
CResourcesManager* CResourcesManager::GetInstance()
{
if (!s_ResManagerInstance)
s_ResManagerInstance = new CResourcesManager();
return s_ResManagerInstance;
}
#pragma endregion<file_sep>/Game_Rockman/Game_Rockman/GameStateManager.cpp
/*+===================================================================
File: GameStateManager.cpp
Summary: Định nghĩa các phương thức của CGameStateManager.
===================================================================+*/
#include "GameStateManager.h"
//#include "PlayState.h"
CGameStateManager* CGameStateManager::s_instance = nullptr;
CGameStateManager::CGameStateManager()
{
}
CGameStateManager::~CGameStateManager()
{
if (m_pCurrentState)
delete m_pCurrentState;
if (m_pNextState)
delete m_pNextState;
if (m_pResourceManager)
delete m_pResourceManager;
}
int CGameStateManager::Init(CBaseGameState* state)
{
m_pResourceManager = CResourcesManager::GetInstance();
m_pCurrentState = state;
return 1;
}
CBaseGameState* CGameStateManager::GetCurrentState()
{
if (m_pCurrentState->m_bFinished)
m_pCurrentState = m_aGameState[0];
return m_pCurrentState;
}
CGameStateManager* CGameStateManager::GetInstance()
{
if (s_instance == nullptr)
{
s_instance = new CGameStateManager();
}
return s_instance;
}
void CGameStateManager::ChangeState(CBaseGameState* state)
{
this->m_pNextState = state;
if (!m_aGameState.empty())
{
m_aGameState.pop_back();
}
m_aGameState.push_back(state);
CTimer::GetInstance()->StartCount();
}
<file_sep>/Game_Rockman/Game_Rockman/MenuState.cpp
/*+===================================================================
File: MenuState.cpp
Summary: Hiện thực hóa các phương thức của lớp CMenuState
===================================================================+*/
#include "MenuState.h"
#include "Viewport.h"
#include "Input.h"
CMenuState::CMenuState()
{
this->Init();
}
CMenuState::~CMenuState()
{
if (m_pBackground)
delete m_pBackground;
/*if (m_pSound)
delete m_pSound;*/
}
void CMenuState::Init()
{
m_pBackground = CResourcesManager::GetInstance()->GetTexture(ID_TEXTURE_BACKGROUND_3);
m_pRockManIcon = CResourcesManager::GetInstance()->GetTexture(ID_TEXTURE_ICON_ROCK_MAN);
m_pRockManIconGray = CResourcesManager::GetInstance()->GetTexture(ID_TEXTURE_ICON_ROCK_MAN_GRAY);
m_pLine1 = CResourcesManager::GetInstance()->GetTexture(ID_TEXTURE_BACKGROUND_4);
m_pLine2 = CResourcesManager::GetInstance()->GetTexture(ID_TEXTURE_BACKGROUND_2);
m_pGameTitle = CResourcesManager::GetInstance()->GetTexture(ID_TEXTURE_GAME_TITLE);
//m_pSound = CResourcesManager::GetInstance()->GetSound(ID_SOUND_OP_THEME);
//m_pSound->Play();
m_pos = D3DXVECTOR2(0, 0);
m_v = D3DXVECTOR2(-200.0f, 0.0f);
m_bIsEnterGame = false;
m_bFinished = false;
m_fTimeEnterGame = 2.0f;
}
void CMenuState::Update(float deltaTime)
{
if (m_bFinished)
return;
if (CInput::GetInstance()->IsKeyPress(DIK_RETURN))
{
m_pos.x = SCREEN_WIDTH / 2;
m_v.x = 0;
}
if (m_pos.x == SCREEN_WIDTH / 2)
{
if (CInput::GetInstance()->IsKeyPress(DIK_RETURN))
{
m_bIsEnterGame = true;
}
}
if (m_bIsEnterGame)
{
if (m_fTimeTwinkle > 0)
{
m_fTimeTwinkle -= deltaTime;
}
else
{
m_fTimeTwinkle = 0.2f;
m_bIsDraw = !m_bIsDraw;
}
if (m_fTimeEnterGame >= 0)
{
m_fTimeEnterGame -= deltaTime;
}
else
{
m_fTimeEnterGame = 2.0f;
this->End();
}
}
}
void CMenuState::Render()
{
if (m_bFinished)
return;
//Vẽ màn hình xanh
for (int i = 0; i <= SCREEN_HEIGHT; i += 15)
{
for (int j = 0; j <= SCREEN_WIDTH; j += 15)
{
m_pBackground->Draw(D3DXVECTOR2(j, i));
}
}
//Vẽ Game Title
m_pGameTitle->Draw(D3DXVECTOR2(SCREEN_WIDTH / 2, SCREEN_HEIGHT / 2.5));
//Vẽ Rock man
m_pRockManIcon->Draw(D3DXVECTOR2(SCREEN_WIDTH / 2, SCREEN_HEIGHT / 1.5));
//Vẽ Line1
for (int i = 0;i <= SCREEN_WIDTH / 2 - 50 ;i+=5)
{
m_pLine1->Draw(D3DXVECTOR2(i, SCREEN_HEIGHT / 1.5));
}
for (int i = SCREEN_WIDTH;i >= SCREEN_WIDTH / 2 + 50;i -= 5)
{
m_pLine1->Draw(D3DXVECTOR2(i, SCREEN_HEIGHT / 1.5));
}
//Vẽ Line2
for (int i = 0;i <= SCREEN_WIDTH / 2 - 50;i += 5)
{
m_pLine2->Draw(D3DXVECTOR2(i, SCREEN_HEIGHT / 1.5 + 10));
}
for (int i = SCREEN_WIDTH;i >= SCREEN_WIDTH / 2 + 50;i -= 5)
{
m_pLine2->Draw(D3DXVECTOR2(i, SCREEN_HEIGHT / 1.5 + 10));
}
//Khi nhấn Enter
if (m_bIsDraw)
{
m_pRockManIconGray->Draw(D3DXVECTOR2(SCREEN_WIDTH / 2, SCREEN_HEIGHT / 1.5));
}
}
void CMenuState::End()
{
m_bFinished = true;
delete m_pNextState;
//m_pNextState = new CPlaysa();
CGameStateManager::GetInstance()->ChangeState(m_pNextState);
}<file_sep>/Game_Rockman/Game_Rockman/Input.h
/*+===================================================================
File: Input.h
Summary: Định nghĩa lớp CInput .
===================================================================+*/
#pragma once
#include <dinput.h>
#define KEYBOARD_BUFFER_SIZE 1024
/*C+==================================================================
Class: CInput
Summary: Lớp CInput dùng để xử lí bàn phím, xác định phím nào được
nhấn.
==================================================================-C*/
class CInput
{
private:
LPDIRECTINPUT8 m_lpInput; /*Đối tượng DirectInput*/
LPDIRECTINPUTDEVICE8 m_lpInputDevice; /*Đối tượng DirectInputDevice*/
BYTE m_keyStates[256]; /**/
DIDEVICEOBJECTDATA m_keyEvents[KEYBOARD_BUFFER_SIZE]; /**/
DWORD m_dwElements;
static CInput* s_instance; /* Con trỏ đến thực thể cửa sổ duy nhất của lớp */
public:
~CInput();
/*M+==================================================================
Method: CInput::Init
Summary: Phương thức tạo ra CInput.
==================================================================-M*/
int Init(HINSTANCE hInstance, HWND hWnd);
/*M+==================================================================
Method: CInput::ProcessKeyBroad
Summary: Phương thức khởi động bàn phím.
==================================================================-M*/
void ProcessKeyBroad();
/*M+==================================================================
Method: CInput::IsKeyPress
Summary: Phương thức kiếm tra xem phím có được nhấn hay không(theo
phương pháp buffer).
==================================================================-M*/
int IsKeyPress(int keyCode);
/*M+==================================================================
Method: CInput::IsKeyPress
Summary: Phương thức kiếm tra xem phím có được nhả hay không(theo
phương pháp buffer).
==================================================================-M*/
int IsKeyRelease(int keyCode);
/*M+==================================================================
Method: CInput::IsKeyDown
Summary: Phương thức kiếm tra xem phím có được giữ hay không.
==================================================================-M*/
int IsKeyDown(int keyCode);
/*M+==================================================================
Method: CInput::IsKeyDown
Summary: Phương thức kiếm tra xem phím có được thả hay không.
==================================================================-M*/
int IsKeyUp(int keyCode);
/*M+==================================================================
Method: CInput::GetInstance
Summary: Phương thức lấy thực thể duy nhất của lớp.
==================================================================-M*/
static CInput* GetInstance();
/*M+==================================================================
Method: CInput::Unacquire
Summary: Phương thức unacquire keyboard
==================================================================-M*/
void Unacquire();
};
<file_sep>/Game_Rockman/Game_Rockman/Game.h
#ifndef _GAME_H_
#define _GAME_H_
#include "d3d9.h"
#include "GameWindow.h"
#include "Timer.h"
#include "Graphic.h"
#include "Input.h"
#include "GameStateManager.h"
class CGame
{
private:
CTimer* m_pTimer;
public:
CGame();
~CGame();
int Init(HINSTANCE hInstance);
void Run();
};
#endif<file_sep>/Game_Rockman/Game_Rockman/MenuState.h
/*+===================================================================
File: MenuState.h
Summary: Định nghĩa lớp CMenuState.
===================================================================+*/
#pragma once
#include "ResourcesManager.h"
#include "GameStateManager.h"
#include "Global.h"
#include "Sprite.h"
//#include "ScoreState.h"
//#include "DSUtil.h"
/*C+==================================================================
Class: CMenuState
Summary: Lớp CMenuState chứa các thuộc tính và phương thức của menu
==================================================================-C*/
class CMenuState : public CBaseGameState
{
private:
CTexture* m_pBackground;
CTexture* m_pRockManIcon;
CTexture* m_pRockManIconGray;
CTexture* m_pLine1;
CTexture* m_pLine2;
CTexture* m_pGameTitle;
D3DXVECTOR2 m_pos;
D3DXVECTOR2 m_v;
//CSound* m_pSound;
bool m_bIsEnterGame;
bool m_bIsDraw;
float m_fTimeEnterGame;
float m_fTimeTwinkle;
RECT playerRect;
public:
CMenuState();
~CMenuState();
void Init();
void Update(float deltaTime);
void Render();
void End();
};<file_sep>/Game_Rockman/Game_Rockman/BaseGameState.h
#pragma once
#include "Graphic.h"
#include "Input.h"
class CBaseGameState
{
protected:
/*M+==================================================================
Method: CBaseGameState::Update
Summary: Phương thức ảo update thế giới của GameState.
==================================================================-M*/
virtual void Update(float deltaTime) = 0;
/*M+==================================================================
Method: CBaseGameState::Render
Summary: Phương thức ảo vẽ thế giới của GameState.
==================================================================-M*/
virtual void Render() = 0;
public:
CBaseGameState();
~CBaseGameState();
/*M+==================================================================
Method: CBaseGameState::Init
Summary: Phương thức ảo khởi tạo của GameState.
==================================================================-M*/
virtual void Init() = 0;
/*M+==================================================================
Method: CBaseGameState::Init
Summary: Phương thức run của GameState.
==================================================================-M*/
void Run(float deltaTime);
//Biến kiểm tra State đã kết thúc chưa
bool m_bFinished;
//Biến chứa con trỏ của state tiếp theo
CBaseGameState* m_pNextState;
};
<file_sep>/Game_Rockman/Game_Rockman/PlayState.h
#pragma once
#include "BaseGameState.h"
class CPlayState : public CBaseGameState
{
private:
public:
}; | f44b7e43cdaa23be8c8abaae3be9e2e01a6e0d5e | [
"C",
"C++"
] | 30 | C++ | pvdk276/Game_Rockman | ca05d222169cd4ea97eb94b588db929371ec338b | 9811e0d74efd742da0549d68c66c59f2fca83a7a |
refs/heads/master | <repo_name>aakylbekov/DNA<file_sep>/UnmanagedCode/Source.cpp
#pragma once
#include "Header.h"
#include<cmath>
LowLevel3DArithmetic::LowLevel3DArithmetic(double baseOx, double baseOy, double baseOz) {
_baseOx = baseOx;
_baseOy = baseOy;
_baseOz = baseOz;
_zeroOx = _zeroOy = _zeroOz = 0.0;
}
double LowLevel3DArithmetic::DistanceFromZeroToBasePoint() {
return sqrt(pow(_zeroOx - _baseOx, 2) +
(pow(_zeroOy - _baseOy, 2)) +
(pow(_zeroOz - _baseOz, 2)));
}
<file_sep>/UnmanagedCode/Header.h
#pragma once
class LowLevel3DArithmetic {
public:
LowLevel3DArithmetic(double baseOx, double baseOy, double baseOz);
double DistanceFromZeroToBasePoint();
private:
double _baseOx, _baseOy, _baseOz;
double _zeroOx, _zeroOy, _zeroOz;
};
<file_sep>/Bridge/Bridge.cpp
#include <string>
#include "C:\Users\Iskander\source\repos\Solution2\UnmanagedCode\Header.h"
#include "C:\Users\Iskander\source\repos\Solution2\UnmanagedCode\Source.cpp"
extern "C" __declspec(dllexport)
double DistanceFromZeroToBasePoint(double baseOx, double baseOy, double baseOz) {
LowLevel3DArithmetic ll3da(baseOx, baseOy, baseOz);
return ll3da.DistanceFromZeroToBasePoint();
}
extern "C" __declspec(dllexport)
int * GetSomeRandomPointer() {
int randomValue = 777;
return &randomValue;
}
extern "C" __declspec(dllexport)
void GetSomeString(char * str, int len) {
str = new char [len] {'A', 'B', 'C', 'D'};
}
extern "C" __declspec(dllexport)
int * GetIntArray() {
int * arr = new int [5]{ 1, 2, 3, 4, 5 };
return arr;
}<file_sep>/ClientApi/Program.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.InteropServices;
using System.Text;
using System.Threading.Tasks;
namespace ClientApi
{
[Flags]
public enum AccessRight : long
{
DESKTOP_READOBJECTS = 0x00000001,
DESKTOP_CREATEWINDOW = 0x00000002,
DESKTOP_CREATEMENU = 0x00000004,
DESKTOP_HOOKCONTROL = 0x00000008,
DESKTOP_JOURNALRECORD = 0x00000010,
DESKTOP_JOURNALPLAYBACK = 0x00000020,
DESKTOP_ENUMERATE = 0x00000040,
DESKTOP_WRITEOBJECTS = 0x00000080,
DESKTOP_SWITCHDESKTOP = 0x00000100,
GENERIC_ALL = (DESKTOP_READOBJECTS | DESKTOP_CREATEWINDOW | DESKTOP_CREATEMENU |
DESKTOP_HOOKCONTROL | DESKTOP_JOURNALRECORD | DESKTOP_JOURNALPLAYBACK |
DESKTOP_ENUMERATE | DESKTOP_WRITEOBJECTS | DESKTOP_SWITCHDESKTOP)
};
static class UnmanagedCodeCall
{
[DllImport("Bridge.dll", CallingConvention = CallingConvention.Cdecl)]
public static extern double DistanceFromZeroToBasePoint
(double baseOx, double baseOy, double baseOz);
[DllImport("Bridge.dll", CallingConvention = CallingConvention.Cdecl)]
public static extern IntPtr GetSomeRandomPointer();
[DllImport("Bridge.dll", CallingConvention = CallingConvention.Cdecl)]
public static extern IntPtr GetSomeString(StringBuilder str, int len);
[DllImport("Bridge.dll", CallingConvention = CallingConvention.Cdecl)]
public static extern IntPtr GetIntArray();
[DllImport("wininet.dll", SetLastError = true)]
public static extern bool InternetCheckConnection(string lpszUrl, int dwFlags, int dwReserved);
[DllImport("Advapi32.dll")]
public static extern bool GetUserName(StringBuilder lpBuffer, ref int nSize);
[DllImport("user32.dll", SetLastError = true)]
public static extern bool LockWorkStation();
}
class Program
{
static void Main(string[] args)
{
UnmanagedCodeCall.LockWorkStation();
Console.ReadLine();
}
}
}
| 11ca7d09a82aa7a9982ff0a2d43813c32e78781f | [
"C#",
"C++"
] | 4 | C++ | aakylbekov/DNA | 73c6ae557848c0fede282fdf9745f42f5c0aceaa | f3a95f0c3494151e1aac89efe32b0566f743c50e |
refs/heads/master | <repo_name>mattboran/Watermarker<file_sep>/Watermark2/Watermarker.h
/*
Watermarker.h : Class definition and function declarations for the Watermarker class.
Methods are implemented in Watermarker.cpp
This header file contains the definition for the Watermarker class
*/
#pragma once
#include "stdafx.h";
//For working with COM
#define RELEASE_INTERFACE(pi) \
{ \
if (pi) \
{ \
pi->Release(); \
pi = NULL; \
} \
}
#define DELETE_POINTER(p) \
{ \
if (p) \
{ \
delete p; \
p = NULL; \
} \
}
class Watermarker{
public:
Watermarker(){
::CoInitialize(NULL);
m_pFactory = nullptr;
m_pDecoder = nullptr;
m_pSourceImg = nullptr;
m_pSourceWmrk = nullptr;
m_pBitmapImg = nullptr;
m_pBitmapWmrk = nullptr;
m_pScaler = nullptr;
m_pConverter = nullptr;
//m_pWatermarkBuffer = nullptr;
//m_pOriginalImgBuffer = nullptr;
}
~Watermarker(){
RELEASE_INTERFACE(m_pFactory);
RELEASE_INTERFACE(m_pDecoder);
RELEASE_INTERFACE(m_pSourceImg);
RELEASE_INTERFACE(m_pSourceWmrk);
RELEASE_INTERFACE(m_pBitmapImg);
RELEASE_INTERFACE(m_pBitmapWmrk);
RELEASE_INTERFACE(m_pScaler);
RELEASE_INTERFACE(m_pConverter);
RELEASE_INTERFACE(m_pImgLock);
RELEASE_INTERFACE(m_pWmrkLock);
::CoUninitialize();
}
void Initialize();
void GetWatermarkSize(UINT &p_X, UINT &p_Y);
HRESULT LoadImageFromFile(std::wstring filepath);
HRESULT LoadWatermarkFromFile(std::wstring filepath);
HRESULT SaveImageBufferToFile(std::wstring filepath);
IWICBitmapLock *m_pImgLock;
IWICBitmapLock *m_pWmrkLock;
private:
IWICImagingFactory *m_pFactory;
IWICBitmapDecoder *m_pDecoder;
IWICBitmapFrameDecode *m_pSourceImg;
IWICBitmapFrameDecode *m_pSourceWmrk;
IWICBitmap *m_pBitmapImg;
IWICBitmap *m_pBitmapWmrk;
IWICBitmapScaler *m_pScaler;
IWICFormatConverter *m_pConverter;
//BYTE *m_pWatermarkBuffer;
//BYTE *m_pOriginalImgBuffer;
UINT xPixelsImg, yPixelsImg;
double xDpiImg, yDpiImg;
};<file_sep>/Watermark2/Watermark_main.cpp
// Watermark_main.cpp : Defines the entry point for the console application.
// By <NAME>
// Watermarking app - command line Windows application that uses the WIC interface to lay one image over another as a watermark
// Section 1 - String Manipulation Functions
// Section 2 - main():
// -Section 2A - Parse Command Line Arguments
// -Section 2B - Load image and watermark from file, converting them to the same format
// -Section 2C - Modify pixel information and assemble output buffer
// -Section 2D - Save output buffer to output.png
// -Section 2F - Cleanup and close
#include "stdafx.h"
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//
// Functions for string manipulation
//
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
template<typename Out>
void split(const std::string &s, char delim, Out result){
std::stringstream ss;
ss.str(s);
std::string item;
while (std::getline(ss, item, delim)){
*(result++) = item;
}
}
//This function creates and returns a new vector of strings that represent the pieces of split string
std::vector<std::string> split(const std::string &s, char delim){
std::vector<std::string> elems;
split(s, delim, std::back_inserter(elems));
return elems;
}
//This function converts string to utf16
std::wstring get_utf16(const std::string &str, int codepage){
if (str.empty()) return std::wstring();
int size = MultiByteToWideChar(codepage, 0, &str[0], (int)str.size(), 0, 0);
std::wstring res(size, 0);
MultiByteToWideChar(codepage, 0, &str[0], (int)str.size(), &res[0], size);
return res;
}
//This function prints a GUID to console for debugging of WIC Pixel format types
void printf_guid(GUID guid) {
printf("Guid = {%08lX-%04hX-%04hX-%02hhX%02hhX-%02hhX%02hhX%02hhX%02hhX%02hhX%02hhX}",
guid.Data1, guid.Data2, guid.Data3,
guid.Data4[0], guid.Data4[1], guid.Data4[2], guid.Data4[3],
guid.Data4[4], guid.Data4[5], guid.Data4[6], guid.Data4[7]);
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//
// Functions for looking at image buffers, modifying image buffers
//
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//This function returns true if bitmap is all one color: R G B
bool check4bppBitmapIsAllOneColor(BYTE *img, UINT stride, UINT height, UINT R, UINT G, UINT B){
bool empty = true;
for (int i = 0; i < stride * height; i++){
UINT iR = img[i];
UINT iG = img[i + 1];
UINT iB = img[i + 2];
if (iR != R && iG != G && iB != B){
empty = false;
}
}
return empty;
}
int main(int argc, char* argv[])
{
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//
// Parsing command line parameters/arguments
//
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
std::string img_name, watermark_name;
std::string img_type, watermark_type;
bool same_filetypes = false;
int default_color_RGB[3] = { 255, 255, 255 };
bool execute = false;
if (argc == 1)
{
printf("You have not entered any arguments! Please enter the name of\n the image to be watermarked, and then the name of the\n");
printf("image file containing the watermark. Use -help to get a list \nof commands, instructions, and information about the app.\n");
printf("Example:\nwatermarker.exe -help\n\nwatermarker.exe image.png watermark.png\n");
}
else if (argc == 2)
{
if (std::strcmp(argv[1], "-help") == 0 || std::strcmp(argv[1], "-h") == 0)
{
printf("Please enter the name of the image to be watermarked, and then the name of\n the image file containing the watermark.\n");
printf("This app supports the following file formats:\n .png, .tiff\n");
printf("The second image will be layed on top of the first image, with the watermark being scaled to fix.\n");
printf("This program uses the Windows Image Component Library to load and manipulate images.\n");
printf("The output is saved in the same directory as the application as 'output.png'.\n");
printf("Information:\n");
printf("watermarker.exe -help : display help information\n");
printf("watermarker.exe -version : show version information\n");
/*printf("\nArguments:\n");
printf("-c, -color: specify the background color of the watermark (default white)\n");*/
printf("Example:\nwatermarker.exe -help\n\nwatermarker.exe image.png watermark.png\n");
}
if (std::strcmp(argv[1], "-version") == 0)
{
printf("Watermarker Tool\n\nVersion: 1.0\n\nAuthor:\n\n<NAME>\n\n");
}
}
else
{
//input is valid, execute = true: parse input
execute = true;
std::vector<std::string> split_string;
split_string = split(argv[1], '.');
img_name = split_string[0];
img_type = split_string[1];
std::transform(img_name.begin(), img_name.end(), img_name.begin(), ::tolower);
std::transform(img_type.begin(), img_type.end(), img_type.begin(), ::tolower);
if (std::strcmp(img_type.c_str(), "png") != 0 && std::strcmp(img_type.c_str(), "tif") != 0 && std::strcmp(img_type.c_str(), "tiff") != 0)
{
printf("The first image is in a format not useable with this program. Please supply images in either .png or .tif (.tif, .tiff) formats.\n");
printf("You supplied filetype:\n .%s\n\n", img_type.c_str());
execute = false;
}
split_string = split(argv[2], '.');
watermark_name = split_string[0];
watermark_type = split_string[1];
std::transform(watermark_name.begin(), watermark_name.end(), watermark_name.begin(), ::tolower);
std::transform(watermark_type.begin(), watermark_type.end(), watermark_type.begin(), ::tolower);
if (std::strcmp(watermark_type.c_str(), "png") != 0 && std::strcmp(watermark_type.c_str(), "tif") != 0 && std::strcmp(watermark_type.c_str(), "tiff") != 0)
{
printf("The second image is in a format not useable with this program. Please supply images in either .png or .tif (.tif, .tiff) formats.\n");
printf("You supplied filetype: \n .%s\n\n", watermark_type.c_str());
execute = false;
}
//check to see if filetypes are the same
if (std::strcmp(img_type.c_str(), watermark_type.c_str()) == 0)
{
same_filetypes = true;
//check if same image was supplied twice
if (std::strcmp(img_name.c_str(), watermark_name.c_str()) == 0)
{
printf("You supplied the same image twice. Please specify two different images.\n");
execute = false;
}
}
}
if (!execute)
{
printf("Please correct these errors and try the application again.\n");
return 0;
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//
// Load image and apply watermark
//
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
HRESULT hr = S_OK;
std::string img_path = img_name + "." + img_type;
std::wstring w_img_path = get_utf16(img_path, CP_UTF8);
std::string watermark_path = watermark_name + "." + watermark_type;
std::wstring w_watermark_path = get_utf16(watermark_path, CP_UTF8);
BYTE *output_bytes = nullptr;
BYTE *wmrk_bytes = nullptr;
//As defined in Watermarker.cpp
Watermarker marker = Watermarker();
UINT cbWidth, cbStride, cbHeight, cbBufferSize;
UINT wmrkWidth, wmrkHeight;
marker.Initialize();
marker.LoadImageFromFile(w_img_path);
marker.m_pImgLock->GetSize(&cbWidth, &cbHeight);
marker.m_pImgLock->GetStride(&cbStride);
cbBufferSize = cbStride * cbHeight;
output_bytes = new BYTE[cbBufferSize];
wmrk_bytes = new BYTE[cbBufferSize];
hr = marker.m_pImgLock->GetDataPointer(NULL, &output_bytes);
hr = marker.LoadWatermarkFromFile(w_watermark_path);
hr = marker.m_pWmrkLock->GetDataPointer(NULL, &wmrk_bytes);
marker.m_pWmrkLock->GetSize(&wmrkWidth, &wmrkHeight);
for (int i = 0; i < cbBufferSize; i += 4)
{
UINT R = wmrk_bytes[i];
UINT G = wmrk_bytes[i + 1];
UINT B = wmrk_bytes[i + 2];
UINT A = wmrk_bytes[i + 3];
//if RGB is not white (255, 255, 255) and non-transparent
if ((R + G + B) != (255 * 3) && A > 0)
{
output_bytes[i] = R;
output_bytes[i + 1] = G;
output_bytes[i + 2] = B;
output_bytes[i + 3] = A;
}
else
{
UINT temp = output_bytes[i];
output_bytes[i] = output_bytes[i + 2];
output_bytes[i + 2] = temp;
}
}
hr = marker.SaveImageBufferToFile(L"output.png");
if (SUCCEEDED(hr))
{
//image buffer saved successfully
printf("Saved image buffer successfully to output.png");
}
RELEASE_INTERFACE(marker.m_pImgLock);
RELEASE_INTERFACE(marker.m_pWmrkLock);
return 0;
}
<file_sep>/Watermark2/ReadMe.txt
========================================================================
Windows Console Application : Watermark2 using WIC
========================================================================
Version 1.2.0
Author: <NAME>
Date: Jan 26, 2017
This is a simple command-line application that uses the Windows Imaging Component to
load two images of type either .png or .tif/.tiff, then applying the 2nd image as a
watermark to the 1st. Assuming white = transparent, this program applies all non-white
non-transparent pixels of the watermark to the original image. The watermark is first
scaled to be the full size of the original image, converted into a common format,
modified, then saved using the Windows Native Windows Codecs
Dependencies: windowscodecs.lib (windowscodecs.dll): Native to Windows XP and up
Usage:
To execute this program in Windows XP +, run it from the command line like so:
watermark2.exe [base_image] [watermark_image]
Where [base_image] and [watermark_image] are both paths to either .tif or .png images.
The paths are relative to the program's location in the directory.
To get information about the program, use flags -version for version and -help for help like so:
watermark2.exe -help
watermark2.exe -version
Watermark_main.cpp
This is the main Watermarking file. Command line arguments are parsed here, and the
Watermarker class is used to load the two images, apply watermark, and save the output
as output.png
Watermarker.h
This is the class declaration for the Watermarker class. There are also two macros for
releasing COM interfaces safely.
Watermarker.cpp
This file contains the class definition and function definitions for the Watermarker
class declared in Watermarker.h
Stdafx.h
This file contains #includes for the required standard library header files as well as
imports from WIC.
Targetver.h
File created as part of Visual Studio project and includes stdafx.h for specifying differnt
codepaths for different target OS versions.
Stdafx.cpp
File created as part of Visual Studio project to implement functions in stdafx.h
<file_sep>/Watermark2/stdafx.h
#pragma once
#include "targetver.h"
//Standard Library Includes
//headers required for this .h file
#include <algorithm>
#include <tchar.h>
#include <iostream>
#include <iterator>
#include <stdio.h>
#include <string>
#include <sstream>
#include <vector>
//Windows Image Component
#include <wincodec.h>
#include <wincodecsdk.h>
#include <dwrite.h>
//Watermarker Helper
#include "Watermarker.h"
<file_sep>/Watermark2/Watermarker.cpp
// Watermarker.cpp : Functions for the Watermarker Class can be found in this file
// Class and member function definitions are in Watermarker.h
//
// By <NAME>
//
//
#include "stdafx.h";
/*
This method initializes the WICImagingFactory for use throughout this program
*/
void Watermarker::Initialize()
{
if (!m_pFactory)
{
::CoCreateInstance(CLSID_WICImagingFactory,
nullptr,
CLSCTX_INPROC_SERVER,
IID_IWICImagingFactory,
(LPVOID*)(&m_pFactory));
}
}
/*
This method loads the image from a given filepath into m_pOriginalImgBuffer. The bitmap
is converted into 32bppRGBA prior to copying its pixels
@param= std::wstring filepath - the path + filename of the image to load
@return= HRESULT from CopyPixels called from the image's source
*/
HRESULT Watermarker::LoadImageFromFile(std::wstring filepath){
//Decode image, get image properties
WICPixelFormatGUID pixelFormat = GUID_NULL;
HRESULT hr = m_pFactory->CreateDecoderFromFilename(filepath.c_str(), nullptr, GENERIC_READ, WICDecodeMetadataCacheOnLoad, &m_pDecoder);
if (SUCCEEDED(hr))
{
hr = m_pDecoder->GetFrame(0, &m_pSourceImg);
}
if (SUCCEEDED(hr))
{
hr = m_pSourceImg->GetSize(&xPixelsImg, &yPixelsImg);
}
if (SUCCEEDED(hr))
{
hr = m_pSourceImg->GetResolution(&xDpiImg, &yDpiImg);
}
//Convert format to 32bppPRGBA
m_pFactory->CreateFormatConverter(&m_pConverter);
if (SUCCEEDED(hr))
{
hr = m_pConverter->Initialize(m_pSourceImg,
GUID_WICPixelFormat32bppRGBA,
WICBitmapDitherTypeNone,
nullptr,
0.f,
WICBitmapPaletteTypeMedianCut);
}
if (SUCCEEDED(hr))
{
hr = m_pFactory->CreateBitmapFromSource(m_pConverter, WICBitmapNoCache, &m_pBitmapImg);
}
if (SUCCEEDED(hr))
{
hr = m_pBitmapImg->Lock(NULL, WICBitmapLockWrite, &m_pImgLock);
}
RELEASE_INTERFACE(m_pDecoder);
RELEASE_INTERFACE(m_pConverter);
RELEASE_INTERFACE(m_pSourceImg);
return hr;
}
/*
This method loads the watermark from a given filepath into m_pWatermarkBuffer. It also
@param= std::wstring filepath - the path + filename of the watermark image to load
@return= HRESULT from CopyPixels called from the watermark's source
*/
HRESULT Watermarker::LoadWatermarkFromFile(std::wstring filepath){
//Decode image, get image properties
HRESULT hr = m_pFactory->CreateDecoderFromFilename(filepath.c_str(), nullptr, GENERIC_READ, WICDecodeMetadataCacheOnLoad, &m_pDecoder);
if (SUCCEEDED(hr))
{
hr = m_pDecoder->GetFrame(0, &m_pSourceImg);
}
//Create scaler and manipulate watermark to appropriate size
hr = m_pFactory->CreateBitmapScaler(&m_pScaler);
if (SUCCEEDED(hr))
{
hr = m_pScaler->Initialize(m_pSourceImg, xPixelsImg, yPixelsImg, WICBitmapInterpolationModeFant);
}
//Convert format to 32bppPRGBA
m_pFactory->CreateFormatConverter(&m_pConverter);
if (SUCCEEDED(hr))
{
hr = m_pConverter->Initialize(m_pScaler,
GUID_WICPixelFormat32bppRGBA,
WICBitmapDitherTypeNone,
nullptr,
0.f,
WICBitmapPaletteTypeMedianCut);
}
if (SUCCEEDED(hr))
{
hr = m_pFactory->CreateBitmapFromSource(m_pConverter, WICBitmapNoCache, &m_pBitmapWmrk);
}
if (SUCCEEDED(hr))
{
hr = m_pBitmapWmrk->Lock(NULL, WICBitmapLockWrite, &m_pWmrkLock);
}
return hr;
}
/*
This method saves m_pOriginalImageBuffer to file specified by filename
@param= std::wstring filepath is the path to and name of the file to save the image as
*/
HRESULT Watermarker::SaveImageBufferToFile(std::wstring filepath)
{
IWICStream *pFileStream = nullptr;
IWICBitmapEncoder *pEncoder = nullptr;
IWICBitmapFrameEncode *pFrameEncodeOut = nullptr;
WICPixelFormatGUID outputPixelFormat = GUID_NULL;
UINT cbWidth, cbHeight, cbStride, cbBufferSize;
BYTE *output = nullptr;
m_pImgLock->GetSize(&cbWidth, &cbHeight);
m_pImgLock->GetStride(&cbStride);
cbBufferSize = cbStride * cbHeight;
output = new BYTE[cbBufferSize];
m_pImgLock->GetDataPointer(NULL, &output);
HRESULT hr = m_pFactory->CreateStream(&pFileStream);
if (SUCCEEDED(hr))
{
hr = pFileStream->InitializeFromFilename(filepath.c_str(), GENERIC_WRITE);
}
//Create encoder for output .png and initialize with pFileStream
if (SUCCEEDED(hr))
{
hr = m_pFactory->CreateEncoder(GUID_ContainerFormatPng, nullptr, &pEncoder);
}
if (SUCCEEDED(hr))
{
hr = pEncoder->Initialize(pFileStream, WICBitmapEncoderNoCache);
}
//Create new frame, set new frame properties (resolution, size, image type)
if (SUCCEEDED(hr))
{
hr = pEncoder->CreateNewFrame(&pFrameEncodeOut, nullptr);
}
if (SUCCEEDED(hr))
{
hr = pFrameEncodeOut->Initialize(NULL);
}
if (SUCCEEDED(hr))
{
hr = pFrameEncodeOut->SetSize(xPixelsImg, yPixelsImg);
}
if (SUCCEEDED(hr))
{
hr = pFrameEncodeOut->SetResolution(xDpiImg, yDpiImg);
}
if (SUCCEEDED(hr))
{
outputPixelFormat = GUID_ContainerFormatPng;
hr = pFrameEncodeOut->SetPixelFormat(&outputPixelFormat);
}
//Write modified image buffer -> frame encoder
if (SUCCEEDED(hr))
{
hr = pFrameEncodeOut->WritePixels(yPixelsImg, 4 * xPixelsImg, (4 * xPixelsImg * yPixelsImg), output);
}
if (SUCCEEDED(hr))
{
hr = pFrameEncodeOut->Commit();
}
if (SUCCEEDED(hr))
{
hr = pEncoder->Commit();
}
return hr;
} | 3e865c05b3707978614b7c10d962c713d7f903f0 | [
"Text",
"C++"
] | 5 | C++ | mattboran/Watermarker | 9a270dc5ff61068692147103e974b0f9a4b8b139 | 1102ef1b73a577d6ff220593c05afecb30a13e70 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.